summaryrefslogtreecommitdiffstats
path: root/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget
diff options
context:
space:
mode:
Diffstat (limited to 'src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget')
-rw-r--r--src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/.gitignore7
-rw-r--r--src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/README.md78
-rwxr-xr-xsrc/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/artifact.py177
-rwxr-xr-xsrc/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/cleanup-s3.py143
-rw-r--r--src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr120.zipbin0 -> 679055 bytes
-rw-r--r--src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr140.zipbin0 -> 516022 bytes
-rw-r--r--src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr120.zipbin0 -> 662837 bytes
-rw-r--r--src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr140.zipbin0 -> 621912 bytes
-rwxr-xr-xsrc/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/nuget.sh21
-rw-r--r--src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/nugetpackage.py286
-rwxr-xr-xsrc/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/packaging.py448
-rwxr-xr-xsrc/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/push-to-nuget.sh21
-rwxr-xr-xsrc/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/release.py167
-rw-r--r--src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/requirements.txt3
-rw-r--r--src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/staticpackage.py178
-rw-r--r--src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.nuspec21
-rw-r--r--src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.props18
-rw-r--r--src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.targets19
-rw-r--r--src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/zfile/__init__.py0
-rw-r--r--src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/zfile/zfile.py98
20 files changed, 1685 insertions, 0 deletions
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/.gitignore b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/.gitignore
new file mode 100644
index 000000000..56919a155
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/.gitignore
@@ -0,0 +1,7 @@
+dl-*
+out-*
+*.nupkg
+*.tgz
+*.key
+*.pyc
+__pycache__
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/README.md b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/README.md
new file mode 100644
index 000000000..87b176930
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/README.md
@@ -0,0 +1,78 @@
+# Package assembly
+
+This set of scripts collect CI artifacts from a local directory or S3, and
+assembles them into a package structure defined by a packaging class in a
+staging directory.
+For the NugetPackage class the NuGet tool is then run (from within docker) on
+this staging directory to create a proper NuGet package (with all the metadata).
+While the StaticPackage class creates a tarball.
+
+The finalized nuget package maybe uploaded manually to NuGet.org
+
+## Requirements
+
+ * Requires Python 3
+ * Requires Docker
+ * (if --s3) Requires private S3 access keys for the librdkafka-ci-packages bucket.
+
+
+
+## Usage
+
+1. Trigger CI builds by creating and pushing a new release (candidate) tag
+ in the librdkafka repo. Make sure the tag is created on the correct branch.
+
+ $ git tag v0.11.0-RC3
+ $ git push origin v0.11.0-RC3
+
+2. Wait for CI builds to finish, monitor the builds here:
+
+ * https://travis-ci.org/edenhill/librdkafka
+ * https://ci.appveyor.com/project/edenhill/librdkafka
+
+Or if using SemaphoreCI, just have the packaging job depend on prior build jobs
+in the same pipeline.
+
+3. On a Linux host, run the release.py script to assemble the NuGet package
+
+ $ cd packaging/nuget
+ # Specify the tag
+ $ ./release.py v0.11.0-RC3
+ # Optionally, if the tag was moved and an exact sha is also required:
+ # $ ./release.py --sha <the-full-git-sha> v0.11.0-RC3
+
+4. If all artifacts were available the NuGet package will be built
+ and reside in the current directory as librdkafka.redist.<v-less-tag>.nupkg
+
+5. Test the package manually
+
+6. Upload the package to NuGet
+
+ * https://www.nuget.org/packages/manage/upload
+
+7. If you trust this process you can have release.py upload the package
+ automatically to NuGet after building it:
+
+ $ ./release.py --retries 100 --upload your-nuget-api.key v0.11.0-RC3
+
+
+
+## Other uses
+
+### Create static library bundles
+
+To create a bundle (tarball) of librdkafka self-contained static library
+builds, use the following command:
+
+ $ ./release.py --class StaticPackage v1.1.0
+
+
+### Clean up S3 bucket
+
+To clean up old non-release/non-RC builds from the S3 bucket, first check with:
+
+ $ AWS_PROFILE=.. ./cleanup-s3.py --age 360
+
+Verify that the listed objects should really be deleted, then delete:
+
+ $ AWS_PROFILE=.. ./cleanup-s3.py --age 360 --delete
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/artifact.py b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/artifact.py
new file mode 100755
index 000000000..c58e0c9c7
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/artifact.py
@@ -0,0 +1,177 @@
+#!/usr/bin/env python3
+#
+#
+# Collects CI artifacts from S3 storage, downloading them
+# to a local directory.
+#
+# The artifacts' folder in the S3 bucket must have the following token
+# format:
+# <token>-[<value>]__ (repeat)
+#
+# Recognized tokens (unrecognized tokens are ignored):
+# p - project (e.g., "confluent-kafka-python")
+# bld - builder (e.g., "travis")
+# plat - platform ("osx", "linux", ..)
+# arch - arch ("x64", ..)
+# tag - git tag
+# sha - git sha
+# bid - builder's build-id
+# bldtype - Release, Debug (appveyor)
+#
+# Example:
+# p-confluent-kafka-python__bld-travis__plat-linux__tag-__sha-112130ce297656ea1c39e7c94c99286f95133a24__bid-271588764__/confluent_kafka-0.11.0-cp35-cp35m-manylinux1_x86_64.whl
+
+
+import re
+import os
+import boto3
+
+import packaging
+
+s3_bucket = 'librdkafka-ci-packages'
+dry_run = False
+
+
+class Artifact (object):
+ def __init__(self, arts, path, info=None):
+ self.path = path
+ # Remove unexpanded AppVeyor $(..) tokens from filename
+ self.fname = re.sub(r'\$\([^\)]+\)', '', os.path.basename(path))
+ slpath = os.path.join(os.path.dirname(path), self.fname)
+ if os.path.isfile(slpath):
+ # Already points to local file in correct location
+ self.lpath = slpath
+ else:
+ # Prepare download location in dlpath
+ self.lpath = os.path.join(arts.dlpath, slpath)
+
+ if info is None:
+ self.info = dict()
+ else:
+ # Assign the map and convert all keys to lower case
+ self.info = {k.lower(): v for k, v in info.items()}
+ # Rename values, e.g., 'plat':'linux' to 'plat':'debian'
+ for k, v in self.info.items():
+ rdict = packaging.rename_vals.get(k, None)
+ if rdict is not None:
+ self.info[k] = rdict.get(v, v)
+
+ # Score value for sorting
+ self.score = 0
+
+ # AppVeyor symbol builds are of less value
+ if self.fname.find('.symbols.') != -1:
+ self.score -= 10
+
+ self.arts = arts
+ arts.artifacts.append(self)
+
+ def __repr__(self):
+ return self.path
+
+ def __lt__(self, other):
+ return self.score < other.score
+
+ def download(self):
+ """ Download artifact from S3 and store in local directory .lpath.
+ If the artifact is already downloaded nothing is done. """
+ if os.path.isfile(self.lpath) and os.path.getsize(self.lpath) > 0:
+ return
+ print('Downloading %s -> %s' % (self.path, self.lpath))
+ if dry_run:
+ return
+ ldir = os.path.dirname(self.lpath)
+ if not os.path.isdir(ldir):
+ os.makedirs(ldir, 0o755)
+ self.arts.s3_bucket.download_file(self.path, self.lpath)
+
+
+class Artifacts (object):
+ def __init__(self, match, dlpath):
+ super(Artifacts, self).__init__()
+ self.match = match
+ self.artifacts = list()
+ # Download directory (make sure it ends with a path separator)
+ if not dlpath.endswith(os.path.sep):
+ dlpath = os.path.join(dlpath, '')
+ self.dlpath = dlpath
+ if not os.path.isdir(self.dlpath):
+ if not dry_run:
+ os.makedirs(self.dlpath, 0o755)
+
+ def collect_single(self, path, req_tag=True):
+ """ Collect single artifact, be it in S3 or locally.
+ :param: path string: S3 or local (relative) path
+ :param: req_tag bool: Require tag to match.
+ """
+
+ print('? %s' % path)
+
+ # For local files, strip download path.
+ # Also ignore any parent directories.
+ if path.startswith(self.dlpath):
+ folder = os.path.basename(os.path.dirname(path[len(self.dlpath):]))
+ else:
+ folder = os.path.basename(os.path.dirname(path))
+
+ # The folder contains the tokens needed to perform
+ # matching of project, gitref, etc.
+ rinfo = re.findall(r'(?P<tag>[^-]+)-(?P<val>.*?)__', folder)
+ if rinfo is None or len(rinfo) == 0:
+ print('Incorrect folder/file name format for %s' % folder)
+ return None
+
+ info = dict(rinfo)
+
+ # Ignore AppVeyor Debug builds
+ if info.get('bldtype', '').lower() == 'debug':
+ print('Ignoring debug artifact %s' % folder)
+ return None
+
+ tag = info.get('tag', None)
+ if tag is not None and (len(tag) == 0 or tag.startswith('$(')):
+ # AppVeyor doesn't substite $(APPVEYOR_REPO_TAG_NAME)
+ # with an empty value when not set, it leaves that token
+ # in the string - so translate that to no tag.
+ del info['tag']
+
+ # Match tag or sha to gitref
+ unmatched = list()
+ for m, v in self.match.items():
+ if m not in info or info[m] != v:
+ unmatched.append(m)
+
+ # Make sure all matches were satisfied, unless this is a
+ # common artifact.
+ if info.get('p', '') != 'common' and len(unmatched) > 0:
+ print(info)
+ print('%s: %s did not match %s' %
+ (info.get('p', None), folder, unmatched))
+ return None
+
+ return Artifact(self, path, info)
+
+ def collect_s3(self):
+ """ Collect and download build-artifacts from S3 based on
+ git reference """
+ print(
+ 'Collecting artifacts matching %s from S3 bucket %s' %
+ (self.match, s3_bucket))
+ self.s3 = boto3.resource('s3')
+ self.s3_bucket = self.s3.Bucket(s3_bucket)
+ self.s3_client = boto3.client('s3')
+ for item in self.s3_client.list_objects(
+ Bucket=s3_bucket, Prefix='librdkafka/').get('Contents'):
+ self.collect_single(item.get('Key'))
+
+ for a in self.artifacts:
+ a.download()
+
+ def collect_local(self, path, req_tag=True):
+ """ Collect artifacts from a local directory possibly previously
+ collected from s3 """
+ for f in [os.path.join(dp, f) for dp, dn,
+ filenames in os.walk(path) for f in filenames]:
+ if not os.path.isfile(f):
+ continue
+ self.collect_single(f, req_tag)
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/cleanup-s3.py b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/cleanup-s3.py
new file mode 100755
index 000000000..2093af0c1
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/cleanup-s3.py
@@ -0,0 +1,143 @@
+#!/usr/bin/env python3
+#
+# Clean up test builds from librdkafka's S3 bucket.
+# This also covers python builds.
+
+import re
+from datetime import datetime, timezone
+import boto3
+import argparse
+
+# Collects CI artifacts from S3 storage, downloading them
+# to a local directory, or collecting already downloaded artifacts from
+# local directory.
+#
+# The artifacts' folder in the S3 bucket must have the following token
+# format:
+# <token>-[<value>]__ (repeat)
+#
+# Recognized tokens (unrecognized tokens are ignored):
+# p - project (e.g., "confluent-kafka-python")
+# bld - builder (e.g., "travis")
+# plat - platform ("osx", "linux", ..)
+# arch - arch ("x64", ..)
+# tag - git tag
+# sha - git sha
+# bid - builder's build-id
+# bldtype - Release, Debug (appveyor)
+# lnk - std, static
+#
+# Example:
+# librdkafka/p-librdkafka__bld-travis__plat-linux__arch-x64__tag-v0.0.62__sha-d051b2c19eb0c118991cd8bc5cf86d8e5e446cde__bid-1562.1/librdkafka.tar.gz
+
+
+s3_bucket = 'librdkafka-ci-packages'
+
+
+def may_delete(path):
+ """ Returns true if S3 object path is eligible for deletion, e.g.
+ has a non-release/rc tag. """
+
+ # The path contains the tokens needed to perform
+ # matching of project, gitref, etc.
+ rinfo = re.findall(r'(?P<tag>[^-]+)-(?P<val>.*?)(?:__|$)', path)
+ if rinfo is None or len(rinfo) == 0:
+ print(f"Incorrect folder/file name format for {path}")
+ return False
+
+ info = dict(rinfo)
+
+ tag = info.get('tag', None)
+ if tag is not None and (len(tag) == 0 or tag.startswith('$(')):
+ # AppVeyor doesn't substite $(APPVEYOR_REPO_TAG_NAME)
+ # with an empty value when not set, it leaves that token
+ # in the string - so translate that to no tag.
+ del info['tag']
+ tag = None
+
+ if tag is None:
+ return True
+
+ if re.match(r'^v?\d+\.\d+\.\d+(-?RC\d+)?$', tag,
+ flags=re.IGNORECASE) is None:
+ return True
+
+ return False
+
+
+def collect_s3(s3, min_age_days=60):
+ """ Collect artifacts from S3 """
+ now = datetime.now(timezone.utc)
+ eligible = []
+ totcnt = 0
+ # note: list_objects will return at most 1000 objects per call,
+ # use continuation token to read full list.
+ cont_token = None
+ more = True
+ while more:
+ if cont_token is not None:
+ res = s3.list_objects_v2(Bucket=s3_bucket,
+ ContinuationToken=cont_token)
+ else:
+ res = s3.list_objects_v2(Bucket=s3_bucket)
+
+ if res.get('IsTruncated') is True:
+ cont_token = res.get('NextContinuationToken')
+ else:
+ more = False
+
+ for item in res.get('Contents'):
+ totcnt += 1
+ age = (now - item.get('LastModified')).days
+ path = item.get('Key')
+ if age >= min_age_days and may_delete(path):
+ eligible.append(path)
+
+ return (eligible, totcnt)
+
+
+def chunk_list(lst, cnt):
+ """ Split list into lists of cnt """
+ for i in range(0, len(lst), cnt):
+ yield lst[i:i + cnt]
+
+
+if __name__ == '__main__':
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--delete",
+ help="WARNING! Don't just check, actually delete "
+ "S3 objects.",
+ action="store_true")
+ parser.add_argument("--age", help="Minimum object age in days.",
+ type=int, default=360)
+
+ args = parser.parse_args()
+ dry_run = args.delete is not True
+ min_age_days = args.age
+
+ if dry_run:
+ op = "Eligible for deletion"
+ else:
+ op = "Deleting"
+
+ s3 = boto3.client('s3')
+
+ # Collect eligible artifacts
+ eligible, totcnt = collect_s3(s3, min_age_days=min_age_days)
+ print(f"{len(eligible)}/{totcnt} eligible artifacts to delete")
+
+ # Delete in chunks of 1000 (max what the S3 API can do)
+ for chunk in chunk_list(eligible, 1000):
+ print(op + ":\n" + '\n'.join(chunk))
+ if dry_run:
+ continue
+
+ res = s3.delete_objects(Bucket=s3_bucket,
+ Delete={
+ 'Objects': [{'Key': x} for x in chunk],
+ 'Quiet': True
+ })
+ errors = res.get('Errors', [])
+ if len(errors) > 0:
+ raise Exception(f"Delete failed: {errors}")
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr120.zip b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr120.zip
new file mode 100644
index 000000000..9bc5e9fbc
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr120.zip
Binary files differ
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr140.zip b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr140.zip
new file mode 100644
index 000000000..152938138
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr140.zip
Binary files differ
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr120.zip b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr120.zip
new file mode 100644
index 000000000..3609c0385
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr120.zip
Binary files differ
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr140.zip b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr140.zip
new file mode 100644
index 000000000..b99e5ae5b
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr140.zip
Binary files differ
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/nuget.sh b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/nuget.sh
new file mode 100755
index 000000000..032371231
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/nuget.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+#
+#
+# Front-end for nuget that runs nuget in a docker image.
+
+set -ex
+
+if [[ -f /.dockerenv ]]; then
+ echo "Inside docker"
+
+ pushd $(dirname $0)
+
+ nuget $*
+
+ popd
+
+else
+ echo "Running docker image"
+ docker run -v $(pwd):/io mono:latest /io/$0 $*
+fi
+
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/nugetpackage.py b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/nugetpackage.py
new file mode 100644
index 000000000..aea05ade0
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/nugetpackage.py
@@ -0,0 +1,286 @@
+#!/usr/bin/env python3
+#
+# Create NuGet package
+#
+
+import os
+import tempfile
+import shutil
+import subprocess
+from packaging import Package, Mapping
+
+
+class NugetPackage (Package):
+ """ All platforms, archs, et.al, are bundled into one set of
+ NuGet output packages: "main", redist and symbols """
+
+ # See .semamphore/semaphore.yml for where these are built.
+ mappings = [
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'lnk': 'std'},
+ 'librdkafka.tgz',
+ './usr/local/include/librdkafka/rdkafka.h',
+ 'build/native/include/librdkafka/rdkafka.h'),
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'lnk': 'std'},
+ 'librdkafka.tgz',
+ './usr/local/include/librdkafka/rdkafkacpp.h',
+ 'build/native/include/librdkafka/rdkafkacpp.h'),
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'lnk': 'std'},
+ 'librdkafka.tgz',
+ './usr/local/include/librdkafka/rdkafka_mock.h',
+ 'build/native/include/librdkafka/rdkafka_mock.h'),
+
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'lnk': 'std'},
+ 'librdkafka.tgz',
+ './usr/local/share/doc/librdkafka/README.md',
+ 'README.md'),
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'lnk': 'std'},
+ 'librdkafka.tgz',
+ './usr/local/share/doc/librdkafka/CONFIGURATION.md',
+ 'CONFIGURATION.md'),
+ Mapping({'arch': 'x64',
+ 'plat': 'osx',
+ 'lnk': 'all'},
+ 'librdkafka.tgz',
+ './usr/local/share/doc/librdkafka/LICENSES.txt',
+ 'LICENSES.txt'),
+
+ # OSX x64
+ Mapping({'arch': 'x64',
+ 'plat': 'osx'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka.dylib',
+ 'runtimes/osx-x64/native/librdkafka.dylib'),
+ # OSX arm64
+ Mapping({'arch': 'arm64',
+ 'plat': 'osx'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka.1.dylib',
+ 'runtimes/osx-arm64/native/librdkafka.dylib'),
+
+ # Linux glibc centos6 x64 with GSSAPI
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'centos6',
+ 'lnk': 'std'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka.so.1',
+ 'runtimes/linux-x64/native/librdkafka.so'),
+ # Linux glibc centos6 x64 without GSSAPI (no external deps)
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'centos6',
+ 'lnk': 'all'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka.so.1',
+ 'runtimes/linux-x64/native/centos6-librdkafka.so'),
+ # Linux glibc centos7 x64 with GSSAPI
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'centos7',
+ 'lnk': 'std'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka.so.1',
+ 'runtimes/linux-x64/native/centos7-librdkafka.so'),
+ # Linux glibc centos7 arm64 without GSSAPI (no external deps)
+ Mapping({'arch': 'arm64',
+ 'plat': 'linux',
+ 'dist': 'centos7',
+ 'lnk': 'all'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka.so.1',
+ 'runtimes/linux-arm64/native/librdkafka.so'),
+
+ # Linux musl alpine x64 without GSSAPI (no external deps)
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'alpine',
+ 'lnk': 'all'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka.so.1',
+ 'runtimes/linux-x64/native/alpine-librdkafka.so'),
+
+ # Common Win runtime
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'msvcr140.zip',
+ 'vcruntime140.dll',
+ 'runtimes/win-x64/native/vcruntime140.dll'),
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'msvcr140.zip',
+ 'msvcp140.dll', 'runtimes/win-x64/native/msvcp140.dll'),
+
+ # matches x64 librdkafka.redist.zip
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/x64/Release/librdkafka.dll',
+ 'runtimes/win-x64/native/librdkafka.dll'),
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/x64/Release/librdkafkacpp.dll',
+ 'runtimes/win-x64/native/librdkafkacpp.dll'),
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/x64/Release/libcrypto-3-x64.dll',
+ 'runtimes/win-x64/native/libcrypto-3-x64.dll'),
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/x64/Release/libssl-3-x64.dll',
+ 'runtimes/win-x64/native/libssl-3-x64.dll'),
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/x64/Release/zlib1.dll',
+ 'runtimes/win-x64/native/zlib1.dll'),
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/x64/Release/zstd.dll',
+ 'runtimes/win-x64/native/zstd.dll'),
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/x64/Release/libcurl.dll',
+ 'runtimes/win-x64/native/libcurl.dll'),
+ # matches x64 librdkafka.redist.zip, lib files
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/lib/v142/x64/Release/librdkafka.lib',
+ 'build/native/lib/win/x64/win-x64-Release/v142/librdkafka.lib' # noqa: E501
+ ),
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/lib/v142/x64/Release/librdkafkacpp.lib',
+ 'build/native/lib/win/x64/win-x64-Release/v142/librdkafkacpp.lib' # noqa: E501
+ ),
+
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'msvcr140.zip',
+ 'vcruntime140.dll',
+ 'runtimes/win-x86/native/vcruntime140.dll'),
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'msvcr140.zip',
+ 'msvcp140.dll', 'runtimes/win-x86/native/msvcp140.dll'),
+
+ # matches Win32 librdkafka.redist.zip
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/Win32/Release/librdkafka.dll',
+ 'runtimes/win-x86/native/librdkafka.dll'),
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/Win32/Release/librdkafkacpp.dll',
+ 'runtimes/win-x86/native/librdkafkacpp.dll'),
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/Win32/Release/libcrypto-3.dll',
+ 'runtimes/win-x86/native/libcrypto-3.dll'),
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/Win32/Release/libssl-3.dll',
+ 'runtimes/win-x86/native/libssl-3.dll'),
+
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/Win32/Release/zlib1.dll',
+ 'runtimes/win-x86/native/zlib1.dll'),
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/Win32/Release/zstd.dll',
+ 'runtimes/win-x86/native/zstd.dll'),
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/Win32/Release/libcurl.dll',
+ 'runtimes/win-x86/native/libcurl.dll'),
+
+ # matches Win32 librdkafka.redist.zip, lib files
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/lib/v142/Win32/Release/librdkafka.lib',
+ 'build/native/lib/win/x86/win-x86-Release/v142/librdkafka.lib' # noqa: E501
+ ),
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/lib/v142/Win32/Release/librdkafkacpp.lib',
+ 'build/native/lib/win/x86/win-x86-Release/v142/librdkafkacpp.lib' # noqa: E501
+ )
+ ]
+
+ def __init__(self, version, arts):
+ if version.startswith('v'):
+ version = version[1:] # Strip v prefix
+ super(NugetPackage, self).__init__(version, arts)
+
+ def cleanup(self):
+ if os.path.isdir(self.stpath):
+ shutil.rmtree(self.stpath)
+
+ def build(self, buildtype):
+ """ Build single NuGet package for all its artifacts. """
+
+ # NuGet removes the prefixing v from the version.
+ vless_version = self.kv['version']
+ if vless_version[0] == 'v':
+ vless_version = vless_version[1:]
+
+ self.stpath = tempfile.mkdtemp(prefix="out-", suffix="-%s" % buildtype,
+ dir=".")
+
+ self.render('librdkafka.redist.nuspec')
+ self.copy_template('librdkafka.redist.targets',
+ destpath=os.path.join('build', 'native'))
+ self.copy_template('librdkafka.redist.props',
+ destpath='build')
+
+ # Generate template tokens for artifacts
+ for a in self.arts.artifacts:
+ if 'bldtype' not in a.info:
+ a.info['bldtype'] = 'release'
+
+ a.info['variant'] = '%s-%s-%s' % (a.info.get('plat'),
+ a.info.get('arch'),
+ a.info.get('bldtype'))
+ if 'toolset' not in a.info:
+ a.info['toolset'] = 'v142'
+
+ # Apply mappings and extract files
+ self.apply_mappings()
+
+ print('Tree extracted to %s' % self.stpath)
+
+ # After creating a bare-bone nupkg layout containing the artifacts
+ # and some spec and props files, call the 'nuget' utility to
+ # make a proper nupkg of it (with all the metadata files).
+ subprocess.check_call("./nuget.sh pack %s -BasePath '%s' -NonInteractive" % # noqa: E501
+ (os.path.join(self.stpath,
+ 'librdkafka.redist.nuspec'),
+ self.stpath), shell=True)
+
+ return 'librdkafka.redist.%s.nupkg' % vless_version
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/packaging.py b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/packaging.py
new file mode 100755
index 000000000..c4dab806d
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/packaging.py
@@ -0,0 +1,448 @@
+#!/usr/bin/env python3
+#
+# Packaging script.
+# Assembles packages using CI artifacts.
+#
+
+import sys
+import re
+import os
+import shutil
+from fnmatch import fnmatch
+from string import Template
+from zfile import zfile
+import boto3
+import magic
+
+if sys.version_info[0] < 3:
+ from urllib import unquote as _unquote
+else:
+ from urllib.parse import unquote as _unquote
+
+
+def unquote(path):
+ # Removes URL escapes, and normalizes the path by removing ./.
+ path = _unquote(path)
+ if path[:2] == './':
+ return path[2:]
+ return path
+
+
+# Rename token values
+rename_vals = {'plat': {'windows': 'win'},
+ 'arch': {'x86_64': 'x64',
+ 'amd64': 'x64',
+ 'i386': 'x86',
+ 'win32': 'x86'}}
+
+# Filemagic arch mapping.
+# key is (plat, arch, file_extension), value is a compiled filemagic regex.
+# This is used to verify that an artifact has the expected file type.
+magic_patterns = {
+ ('win', 'x64', '.dll'): re.compile('PE32.*DLL.* x86-64, for MS Windows'),
+ ('win', 'x86', '.dll'):
+ re.compile('PE32.*DLL.* Intel 80386, for MS Windows'),
+ ('win', 'x64', '.lib'): re.compile('current ar archive'),
+ ('win', 'x86', '.lib'): re.compile('current ar archive'),
+ ('linux', 'x64', '.so'): re.compile('ELF 64.* x86-64'),
+ ('linux', 'arm64', '.so'): re.compile('ELF 64.* ARM aarch64'),
+ ('osx', 'x64', '.dylib'): re.compile('Mach-O 64.* x86_64'),
+ ('osx', 'arm64', '.dylib'): re.compile('Mach-O 64.*arm64')}
+
+magic = magic.Magic()
+
+
+def magic_mismatch(path, a):
+ """ Verify that the filemagic for \\p path matches for artifact \\p a.
+ Returns True if the magic file info does NOT match.
+ Returns False if no matching is needed or the magic matches. """
+ k = (a.info.get('plat', None), a.info.get('arch', None),
+ os.path.splitext(path)[1])
+ pattern = magic_patterns.get(k, None)
+ if pattern is None:
+ return False
+
+ minfo = magic.id_filename(path)
+ if not pattern.match(minfo):
+ print(
+ f"Warning: {path} magic \"{minfo}\" "
+ f"does not match expected {pattern} for key {k}")
+ return True
+
+ return False
+
+
+# Collects CI artifacts from S3 storage, downloading them
+# to a local directory, or collecting already downloaded artifacts from
+# local directory.
+#
+# The artifacts' folder in the S3 bucket must have the following token
+# format:
+# <token>-[<value>]__ (repeat)
+#
+# Recognized tokens (unrecognized tokens are ignored):
+# p - project (e.g., "confluent-kafka-python")
+# bld - builder (e.g., "travis")
+# plat - platform ("osx", "linux", ..)
+# dist - distro or runtime ("centos6", "mingw", "msvcr", "alpine", ..).
+# arch - arch ("x64", ..)
+# tag - git tag
+# sha - git sha
+# bid - builder's build-id
+# bldtype - Release, Debug (appveyor)
+# lnk - Linkage ("std", "static", "all" (both std and static))
+# extra - Extra build options, typically "gssapi" (for cyrus-sasl linking).
+
+#
+# Example:
+# librdkafka/p-librdkafka__bld-travis__plat-linux__arch-x64__tag-v0.0.62__sha-d051b2c19eb0c118991cd8bc5cf86d8e5e446cde__bid-1562.1/librdkafka.tar.gz
+
+
+class MissingArtifactError(Exception):
+ pass
+
+
+s3_bucket = 'librdkafka-ci-packages'
+dry_run = False
+
+
+class Artifact (object):
+ def __init__(self, arts, path, info=None):
+ self.path = path
+ # Remove unexpanded AppVeyor $(..) tokens from filename
+ self.fname = re.sub(r'\$\([^\)]+\)', '', os.path.basename(path))
+ slpath = os.path.join(os.path.dirname(path), self.fname)
+ if os.path.isfile(slpath):
+ # Already points to local file in correct location
+ self.lpath = slpath
+ else:
+ # Prepare download location in dlpath
+ self.lpath = os.path.join(arts.dlpath, slpath)
+
+ if info is None:
+ self.info = dict()
+ else:
+ # Assign the map and convert all keys to lower case
+ self.info = {k.lower(): v for k, v in info.items()}
+ # Rename values, e.g., 'plat':'windows' to 'plat':'win'
+ for k, v in self.info.items():
+ rdict = rename_vals.get(k, None)
+ if rdict is not None:
+ self.info[k] = rdict.get(v, v)
+
+ # Score value for sorting
+ self.score = 0
+
+ # AppVeyor symbol builds are of less value
+ if self.fname.find('.symbols.') != -1:
+ self.score -= 10
+
+ self.arts = arts
+ arts.artifacts.append(self)
+
+ def __repr__(self):
+ return self.path
+
+ def __lt__(self, other):
+ return self.score < other.score
+
+ def download(self):
+ """ Download artifact from S3 and store in local directory .lpath.
+ If the artifact is already downloaded nothing is done. """
+ if os.path.isfile(self.lpath) and os.path.getsize(self.lpath) > 0:
+ return
+ print('Downloading %s' % self.path)
+ if dry_run:
+ return
+ ldir = os.path.dirname(self.lpath)
+ if not os.path.isdir(ldir):
+ os.makedirs(ldir, 0o755)
+ self.arts.s3_bucket.download_file(self.path, self.lpath)
+
+
+class Artifacts (object):
+ def __init__(self, match, dlpath):
+ super(Artifacts, self).__init__()
+ self.match = match
+ self.artifacts = list()
+ # Download directory (make sure it ends with a path separator)
+ if not dlpath.endswith(os.path.sep):
+ dlpath = os.path.join(dlpath, '')
+ self.dlpath = dlpath
+ if not os.path.isdir(self.dlpath):
+ if not dry_run:
+ os.makedirs(self.dlpath, 0o755)
+
+ def collect_single(self, path, req_tag=True):
+ """ Collect single artifact, be it in S3 or locally.
+ :param: path string: S3 or local (relative) path
+ :param: req_tag bool: Require tag to match.
+ """
+
+ # For local files, strip download path.
+ # Also ignore any parent directories.
+ if path.startswith(self.dlpath):
+ folder = os.path.basename(os.path.dirname(path[len(self.dlpath):]))
+ else:
+ folder = os.path.basename(os.path.dirname(path))
+
+ # The folder contains the tokens needed to perform
+ # matching of project, gitref, etc.
+ rinfo = re.findall(r'(?P<tag>[^-]+)-(?P<val>.*?)(?:__|$)', folder)
+ if rinfo is None or len(rinfo) == 0:
+ print('Incorrect folder/file name format for %s' % folder)
+ return None
+
+ info = dict(rinfo)
+
+ # Ignore AppVeyor Debug builds
+ if info.get('bldtype', '').lower() == 'debug':
+ print('Ignoring debug artifact %s' % folder)
+ return None
+
+ tag = info.get('tag', None)
+ if tag is not None and (len(tag) == 0 or tag.startswith('$(')):
+ # AppVeyor doesn't substite $(APPVEYOR_REPO_TAG_NAME)
+ # with an empty value when not set, it leaves that token
+ # in the string - so translate that to no tag.
+ del info['tag']
+
+ # Perform matching
+ unmatched = list()
+ for m, v in self.match.items():
+ if m not in info or info[m] != v:
+ unmatched.append(f"{m} = {v}")
+
+ # Make sure all matches were satisfied, unless this is a
+ # common artifact.
+ if info.get('p', '') != 'common' and len(unmatched) > 0:
+ return None
+
+ return Artifact(self, path, info)
+
+ def collect_s3(self):
+ """ Collect and download build-artifacts from S3 based on
+ git reference """
+ print(
+ 'Collecting artifacts matching %s from S3 bucket %s' %
+ (self.match, s3_bucket))
+ self.s3 = boto3.resource('s3')
+ self.s3_bucket = self.s3.Bucket(s3_bucket)
+ self.s3_client = boto3.client('s3')
+
+ # note: list_objects will return at most 1000 objects per call,
+ # use continuation token to read full list.
+ cont_token = None
+ more = True
+ while more:
+ if cont_token is not None:
+ res = self.s3_client.list_objects_v2(
+ Bucket=s3_bucket,
+ Prefix='librdkafka/',
+ ContinuationToken=cont_token)
+ else:
+ res = self.s3_client.list_objects_v2(Bucket=s3_bucket,
+ Prefix='librdkafka/')
+
+ if res.get('IsTruncated') is True:
+ cont_token = res.get('NextContinuationToken')
+ else:
+ more = False
+
+ for item in res.get('Contents'):
+ self.collect_single(item.get('Key'))
+
+ for a in self.artifacts:
+ a.download()
+
+ def collect_local(self, path, req_tag=True):
+ """ Collect artifacts from a local directory possibly previously
+ collected from s3 """
+ for f in [os.path.join(dp, f) for dp, dn,
+ filenames in os.walk(path) for f in filenames]:
+ if not os.path.isfile(f):
+ continue
+ self.collect_single(f, req_tag)
+
+
+class Mapping (object):
+ """ Maps/matches a file in an input release artifact to
+ the output location of the package, based on attributes and paths. """
+
+ def __init__(self, attributes, artifact_fname_glob, path_in_artifact,
+ output_pkg_path=None, artifact_fname_excludes=[]):
+ """
+ @param attributes A dict of artifact attributes that must match.
+ If an attribute name (dict key) is prefixed
+ with "!" (e.g., "!plat") then the attribute
+ must not match.
+ @param artifact_fname_glob Match artifacts with this filename glob.
+ @param path_in_artifact On match, extract this file in the artifact,..
+ @param output_pkg_path ..and write it to this location in the package.
+ Defaults to path_in_artifact.
+ @param artifact_fname_excludes Exclude artifacts matching these
+ filenames.
+
+ Pass a list of Mapping objects to FIXME to perform all mappings.
+ """
+ super(Mapping, self).__init__()
+ self.attributes = attributes
+ self.fname_glob = artifact_fname_glob
+ self.input_path = path_in_artifact
+ if output_pkg_path is None:
+ self.output_path = self.input_path
+ else:
+ self.output_path = output_pkg_path
+ self.name = self.output_path
+ self.fname_excludes = artifact_fname_excludes
+
+ def __str__(self):
+ return self.name
+
+
+class Package (object):
+ """ Generic Package class
+ A Package is a working container for one or more output
+ packages for a specific package type (e.g., nuget) """
+
+ def __init__(self, version, arts):
+ super(Package, self).__init__()
+ self.version = version
+ self.arts = arts
+ # These may be overwritten by specific sub-classes:
+ self.artifacts = arts.artifacts
+ # Staging path, filled in later.
+ self.stpath = None
+ self.kv = {'version': version}
+ self.files = dict()
+
+ def add_file(self, file):
+ self.files[file] = True
+
+ def build(self):
+ """ Build package output(s), return a list of paths "
+ to built packages """
+ raise NotImplementedError
+
+ def cleanup(self):
+ """ Optional cleanup routine for removing temporary files, etc. """
+ pass
+
+ def render(self, fname, destpath='.'):
+ """ Render template in file fname and save to destpath/fname,
+ where destpath is relative to stpath """
+
+ outf = os.path.join(self.stpath, destpath, fname)
+
+ if not os.path.isdir(os.path.dirname(outf)):
+ os.makedirs(os.path.dirname(outf), 0o0755)
+
+ with open(os.path.join('templates', fname), 'r') as tf:
+ tmpl = Template(tf.read())
+ with open(outf, 'w') as of:
+ of.write(tmpl.substitute(self.kv))
+
+ self.add_file(outf)
+
+ def copy_template(self, fname, target_fname=None, destpath='.'):
+ """ Copy template file to destpath/fname
+ where destpath is relative to stpath """
+
+ if target_fname is None:
+ target_fname = fname
+ outf = os.path.join(self.stpath, destpath, target_fname)
+
+ if not os.path.isdir(os.path.dirname(outf)):
+ os.makedirs(os.path.dirname(outf), 0o0755)
+
+ shutil.copy(os.path.join('templates', fname), outf)
+
+ self.add_file(outf)
+
+ def apply_mappings(self):
+ """ Applies a list of Mapping to match and extract files from
+ matching artifacts. If any of the listed Mappings can not be
+ fulfilled an exception is raised. """
+
+ assert self.mappings
+ assert len(self.mappings) > 0
+
+ for m in self.mappings:
+
+ artifact = None
+ for a in self.arts.artifacts:
+ found = True
+
+ for attr in m.attributes:
+ if attr[0] == '!':
+ # Require attribute NOT to match
+ origattr = attr
+ attr = attr[1:]
+
+ if attr in a.info and \
+ a.info[attr] != m.attributes[origattr]:
+ found = False
+ break
+ else:
+ # Require attribute to match
+ if attr not in a.info or \
+ a.info[attr] != m.attributes[attr]:
+ found = False
+ break
+
+ if not fnmatch(a.fname, m.fname_glob):
+ found = False
+
+ for exclude in m.fname_excludes:
+ if exclude in a.fname:
+ found = False
+ break
+
+ if found:
+ artifact = a
+ break
+
+ if artifact is None:
+ raise MissingArtifactError(
+ '%s: unable to find artifact with tags %s matching "%s"' %
+ (m, str(m.attributes), m.fname_glob))
+
+ output_path = os.path.join(self.stpath, m.output_path)
+
+ try:
+ zfile.ZFile.extract(artifact.lpath, m.input_path, output_path)
+# except KeyError:
+# continue
+ except Exception as e:
+ raise Exception(
+ '%s: file not found in archive %s: %s. Files in archive are:\n%s' % # noqa: E501
+ (m, artifact.lpath, e, '\n'.join(zfile.ZFile(
+ artifact.lpath).getnames())))
+
+ # Check that the file type matches.
+ if magic_mismatch(output_path, a):
+ os.unlink(output_path)
+ continue
+
+ # All mappings found and extracted.
+
+ def verify(self, path):
+ """ Verify package content based on the previously defined mappings """
+
+ missing = list()
+ with zfile.ZFile(path, 'r') as zf:
+ print('Verifying %s:' % path)
+
+ # Zipfiles may url-encode filenames, unquote them before matching.
+ pkgd = [unquote(x) for x in zf.getnames()]
+ missing = [x for x in self.mappings if x.output_path not in pkgd]
+
+ if len(missing) > 0:
+ print(
+ 'Missing files in package %s:\n%s' %
+ (path, '\n'.join([str(x) for x in missing])))
+ print('Actual: %s' % '\n'.join(pkgd))
+ return False
+
+ print('OK - %d expected files found' % len(self.mappings))
+ return True
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/push-to-nuget.sh b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/push-to-nuget.sh
new file mode 100755
index 000000000..598dd4cd7
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/push-to-nuget.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+#
+# Upload NuGet package to NuGet.org using provided NuGet API key
+#
+
+set -e
+
+key=$1
+pkg=$2
+
+if [[ -z $pkg ]]; then
+ echo "Usage: $0 <nuget.org-api-key> <nuget-package>"
+ exit 1
+fi
+
+set -u
+
+docker run -t -v $PWD/$pkg:/$pkg mcr.microsoft.com/dotnet/sdk:3.1 \
+ dotnet nuget push /$pkg -n -s https://api.nuget.org/v3/index.json \
+ -k $key --source https://api.nuget.org/v3/index.json
+
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/release.py b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/release.py
new file mode 100755
index 000000000..f230a580c
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/release.py
@@ -0,0 +1,167 @@
+#!/usr/bin/env python3
+#
+#
+# NuGet release packaging tool.
+# Creates a NuGet package from CI artifacts on S3.
+#
+
+
+import os
+import sys
+import argparse
+import time
+import packaging
+import nugetpackage
+import staticpackage
+
+
+dry_run = False
+
+
+if __name__ == '__main__':
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "--s3",
+ help="Collect artifacts from S3 bucket",
+ action="store_true")
+ parser.add_argument("--dry-run",
+ help="Locate artifacts but don't actually "
+ "download or do anything",
+ action="store_true")
+ parser.add_argument(
+ "--directory",
+ help="Download directory (default: dl-<tag>)",
+ default=None)
+ parser.add_argument(
+ "--no-cleanup",
+ help="Don't clean up temporary folders",
+ action="store_true")
+ parser.add_argument(
+ "--sha",
+ help="Also match on this git sha1",
+ default=None)
+ parser.add_argument(
+ "--ignore-tag",
+ help="Ignore the artifacts' tag attribute (for devel use only)",
+ action="store_true",
+ default=False)
+ parser.add_argument(
+ "--nuget-version",
+ help="The nuget package version (defaults to same as tag)",
+ default=None)
+ parser.add_argument("--upload", help="Upload package to after building, "
+ "using provided NuGet API key "
+ "(either file or the key itself)",
+ default=None,
+ type=str)
+ parser.add_argument(
+ "--class",
+ help="Packaging class (either NugetPackage or StaticPackage)",
+ default="NugetPackage",
+ dest="pkgclass")
+ parser.add_argument(
+ "--retries",
+ help="Number of retries to collect artifacts",
+ default=0,
+ type=int)
+ parser.add_argument("tag", help="Git tag to collect")
+
+ args = parser.parse_args()
+ dry_run = args.dry_run
+ retries = args.retries
+ if not args.directory:
+ args.directory = 'dl-%s' % args.tag
+
+ match = {}
+ if not args.ignore_tag:
+ match['tag'] = args.tag
+
+ if args.sha is not None:
+ match['sha'] = args.sha
+
+ if args.pkgclass == "NugetPackage":
+ pkgclass = nugetpackage.NugetPackage
+ elif args.pkgclass == "StaticPackage":
+ pkgclass = staticpackage.StaticPackage
+ else:
+ raise ValueError(f'Unknown packaging class {args.pkgclass}: '
+ 'should be one of NugetPackage or StaticPackage')
+
+ try:
+ match.update(getattr(pkgclass, 'match'))
+ except BaseException:
+ pass
+
+ arts = packaging.Artifacts(match, args.directory)
+
+ # Collect common local artifacts, such as support files.
+ arts.collect_local('common', req_tag=False)
+
+ while True:
+ if args.s3:
+ arts.collect_s3()
+
+ arts.collect_local(arts.dlpath)
+
+ if len(arts.artifacts) == 0:
+ raise ValueError('No artifacts found for %s' % match)
+
+ print('Collected artifacts (%s):' % (arts.dlpath))
+ for a in arts.artifacts:
+ print(' %s' % a.lpath)
+ print('')
+
+ if args.nuget_version is not None:
+ package_version = args.nuget_version
+ else:
+ package_version = args.tag
+
+ print('')
+
+ if dry_run:
+ sys.exit(0)
+
+ print('Building packages:')
+
+ try:
+ p = pkgclass(package_version, arts)
+ pkgfile = p.build(buildtype='release')
+ break
+ except packaging.MissingArtifactError as e:
+ if retries <= 0 or not args.s3:
+ if not args.no_cleanup:
+ p.cleanup()
+ raise e
+
+ p.cleanup()
+ retries -= 1
+ print(e)
+ print('Retrying in 30 seconds')
+ time.sleep(30)
+
+ if not args.no_cleanup:
+ p.cleanup()
+ else:
+ print(' --no-cleanup: leaving %s' % p.stpath)
+
+ print('')
+
+ if not p.verify(pkgfile):
+ print('Package failed verification.')
+ sys.exit(1)
+
+ print('Created package: %s' % pkgfile)
+
+ if args.upload is not None:
+ if os.path.isfile(args.upload):
+ with open(args.upload, 'r') as f:
+ nuget_key = f.read().replace('\n', '')
+ else:
+ nuget_key = args.upload
+
+ print('Uploading %s to NuGet' % pkgfile)
+ r = os.system("./push-to-nuget.sh '%s' %s" % (nuget_key, pkgfile))
+ assert int(r) == 0, \
+ f"NuGet upload failed with exit code {r}, see previous errors"
+ print('%s successfully uploaded to NuGet' % pkgfile)
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/requirements.txt b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/requirements.txt
new file mode 100644
index 000000000..0fa2fd19c
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/requirements.txt
@@ -0,0 +1,3 @@
+boto3==1.18.45
+rpmfile==1.0.8
+filemagic==1.6
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/staticpackage.py b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/staticpackage.py
new file mode 100644
index 000000000..38567bb60
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/staticpackage.py
@@ -0,0 +1,178 @@
+#!/usr/bin/env python3
+#
+# Create self-contained static-library tar-ball package
+#
+
+import os
+import tempfile
+import shutil
+import subprocess
+from packaging import Package, Mapping
+
+
+class StaticPackage (Package):
+ """ Create a tar-ball with self-contained static libraries.
+ These are later imported into confluent-kafka-go. """
+
+ # Make sure gssapi (cyrus-sasl) is not linked, since that is a
+ # dynamic linkage, by specifying negative match '!extra': 'gssapi'.
+ # Except for on OSX where cyrus-sasl is always available, and
+ # Windows where it is never linked.
+ #
+ # Match statically linked artifacts (which are included in 'all' builds)
+ mappings = [
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'centos6',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/include/librdkafka/rdkafka.h',
+ 'rdkafka.h'),
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'centos6',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/share/doc/librdkafka/LICENSES.txt',
+ 'LICENSES.txt'),
+
+ # glibc linux static lib and pkg-config file
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'centos6',
+ 'lnk': 'all',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka-static.a',
+ 'librdkafka_glibc_linux_amd64.a'),
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'centos6',
+ 'lnk': 'all',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/lib/pkgconfig/rdkafka-static.pc',
+ 'librdkafka_glibc_linux_amd64.pc'),
+
+ # glibc linux arm64 static lib and pkg-config file
+ Mapping({'arch': 'arm64',
+ 'plat': 'linux',
+ 'dist': 'centos7',
+ 'lnk': 'all',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka-static.a',
+ 'librdkafka_glibc_linux_arm64.a'),
+ Mapping({'arch': 'arm64',
+ 'plat': 'linux',
+ 'dist': 'centos7',
+ 'lnk': 'all',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/lib/pkgconfig/rdkafka-static.pc',
+ 'librdkafka_glibc_linux_arm64.pc'),
+
+ # musl linux static lib and pkg-config file
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'alpine',
+ 'lnk': 'all',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka-static.a',
+ 'librdkafka_musl_linux_amd64.a'),
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'alpine',
+ 'lnk': 'all',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/lib/pkgconfig/rdkafka-static.pc',
+ 'librdkafka_musl_linux_amd64.pc'),
+
+ # musl linux arm64 static lib and pkg-config file
+ Mapping({'arch': 'arm64',
+ 'plat': 'linux',
+ 'dist': 'alpine',
+ 'lnk': 'all',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka-static.a',
+ 'librdkafka_musl_linux_arm64.a'),
+ Mapping({'arch': 'arm64',
+ 'plat': 'linux',
+ 'dist': 'alpine',
+ 'lnk': 'all',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/lib/pkgconfig/rdkafka-static.pc',
+ 'librdkafka_musl_linux_arm64.pc'),
+
+ # osx x64 static lib and pkg-config file
+ Mapping({'arch': 'x64',
+ 'plat': 'osx',
+ 'lnk': 'all'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka-static.a',
+ 'librdkafka_darwin_amd64.a'),
+ Mapping({'arch': 'x64',
+ 'plat': 'osx',
+ 'lnk': 'all'},
+ 'librdkafka.tgz',
+ './usr/local/lib/pkgconfig/rdkafka-static.pc',
+ 'librdkafka_darwin_amd64.pc'),
+
+ # osx arm64 static lib and pkg-config file
+ Mapping({'arch': 'arm64',
+ 'plat': 'osx',
+ 'lnk': 'all'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka-static.a',
+ 'librdkafka_darwin_arm64.a'),
+ Mapping({'arch': 'arm64',
+ 'plat': 'osx',
+ 'lnk': 'all'},
+ 'librdkafka.tgz',
+ './usr/local/lib/pkgconfig/rdkafka-static.pc',
+ 'librdkafka_darwin_arm64.pc'),
+
+ # win static lib and pkg-config file (mingw)
+ Mapping({'arch': 'x64',
+ 'plat': 'win',
+ 'dist': 'mingw',
+ 'lnk': 'static'},
+ 'librdkafka.tgz',
+ './lib/librdkafka-static.a', 'librdkafka_windows.a'),
+ Mapping({'arch': 'x64',
+ 'plat': 'win',
+ 'dist': 'mingw',
+ 'lnk': 'static'},
+ 'librdkafka.tgz',
+ './lib/pkgconfig/rdkafka-static.pc',
+ 'librdkafka_windows.pc'),
+ ]
+
+ def __init__(self, version, arts):
+ super(StaticPackage, self).__init__(version, arts)
+
+ def cleanup(self):
+ if os.path.isdir(self.stpath):
+ shutil.rmtree(self.stpath)
+
+ def build(self, buildtype):
+ """ Build single package for all artifacts. """
+
+ self.stpath = tempfile.mkdtemp(prefix="out-", dir=".")
+
+ self.apply_mappings()
+
+ print('Tree extracted to %s' % self.stpath)
+
+ # After creating a bare-bone layout, create a tarball.
+ outname = "librdkafka-static-bundle-%s.tgz" % self.version
+ print('Writing to %s in %s' % (outname, self.stpath))
+ subprocess.check_call("(cd %s && tar cvzf ../%s .)" %
+ (self.stpath, outname),
+ shell=True)
+
+ return outname
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.nuspec b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.nuspec
new file mode 100644
index 000000000..dbfd7b1aa
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.nuspec
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="utf-8"?>
+<package xmlns="http://schemas.microsoft.com/packaging/2011/10/nuspec.xsd">
+ <metadata>
+ <id>librdkafka.redist</id>
+ <version>${version}</version>
+ <title>librdkafka - redistributable</title>
+ <authors>Magnus Edenhill, edenhill</authors>
+ <owners>Confluent Inc.</owners>
+ <requireLicenseAcceptance>false</requireLicenseAcceptance>
+ <licenseUrl>https://github.com/confluentinc/librdkafka/blob/master/LICENSES.txt</licenseUrl>
+ <projectUrl>https://github.com/confluentinc/librdkafka</projectUrl>
+ <description>The Apache Kafka C/C++ client library - redistributable</description>
+ <summary>The Apache Kafka C/C++ client library</summary>
+ <releaseNotes>Release of librdkafka</releaseNotes>
+ <copyright>Copyright 2012-2023</copyright>
+ <tags>native apache kafka librdkafka C C++ nativepackage</tags>
+ </metadata>
+ <files>
+ <file src="**" />
+ </files>
+</package>
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.props b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.props
new file mode 100644
index 000000000..c1615c61c
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.props
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<Project ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup>
+ <Content Include="$(MSBuildThisFileDirectory)..\runtimes\win-x86\native\*">
+ <Link>librdkafka\x86\%(Filename)%(Extension)</Link>
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)..\runtimes\win-x64\native\*">
+ <Link>librdkafka\x64\%(Filename)%(Extension)</Link>
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ </ItemGroup>
+ <ItemDefinitionGroup>
+ <ClCompile>
+ <AdditionalIncludeDirectories>$(MSBuildThisFileDirectory)include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+ </ClCompile>
+ </ItemDefinitionGroup>
+</Project>
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.targets b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.targets
new file mode 100644
index 000000000..d174cda11
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.targets
@@ -0,0 +1,19 @@
+<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemDefinitionGroup>
+ <Link>
+ <AdditionalDependencies Condition="'$(Platform)' == 'x64'">$(MSBuildThisFileDirectory)lib\win\x64\win-x64-Release\v142\librdkafka.lib;%(AdditionalDependencies)</AdditionalDependencies>
+ <AdditionalDependencies Condition="'$(Platform)' != 'x64'">$(MSBuildThisFileDirectory)lib\win\x86\win-x86-Release\v142\librdkafka.lib;%(AdditionalDependencies)</AdditionalDependencies>
+ <AdditionalLibraryDirectories Condition="'$(Platform)' == 'x64'">$(MSBuildThisFileDirectory)lib\win\x64\win-x64-Release\v142;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+ <AdditionalLibraryDirectories Condition="'$(Platform)' != 'x64'">$(MSBuildThisFileDirectory)lib\win\x86\win-x86-Release\v142;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+ </Link>
+ <ClCompile>
+ <AdditionalIncludeDirectories>$(MSBuildThisFileDirectory)include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+ </ClCompile>
+ </ItemDefinitionGroup>
+ <ItemGroup Condition="'$(Platform)' == 'x64'">
+ <ReferenceCopyLocalPaths Include="$(MSBuildThisFileDirectory)..\..\runtimes\win-x64\native\*.dll" />
+ </ItemGroup>
+ <ItemGroup Condition="'$(Platform)' != 'x64'">
+ <ReferenceCopyLocalPaths Include="$(MSBuildThisFileDirectory)..\..\runtimes\win-x86\native\*.dll" />
+ </ItemGroup>
+</Project>
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/zfile/__init__.py b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/zfile/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/zfile/__init__.py
diff --git a/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/zfile/zfile.py b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/zfile/zfile.py
new file mode 100644
index 000000000..51f2df25f
--- /dev/null
+++ b/src/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/zfile/zfile.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env python3
+
+import os
+import tarfile
+import zipfile
+import rpmfile
+
+
+class ZFile (object):
+ def __init__(self, path, mode='r', ext=None):
+ super(ZFile, self).__init__()
+
+ if ext is not None:
+ _ext = ext
+ else:
+ _ext = os.path.splitext(path)[-1]
+ if _ext.startswith('.'):
+ _ext = _ext[1:]
+
+ if zipfile.is_zipfile(path) or _ext == 'zip':
+ self.f = zipfile.ZipFile(path, mode)
+ elif tarfile.is_tarfile(path) or _ext in ('tar', 'tgz', 'gz'):
+ self.f = tarfile.open(path, mode)
+ elif _ext == 'rpm':
+ self.f = rpmfile.open(path, mode + 'b')
+ else:
+ raise ValueError('Unsupported file extension: %s' % path)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ if callable(getattr(self.f, 'close', None)):
+ self.f.close()
+
+ def getnames(self):
+ if isinstance(self.f, zipfile.ZipFile):
+ return self.f.namelist()
+ elif isinstance(self.f, tarfile.TarFile):
+ return self.f.getnames()
+ elif isinstance(self.f, rpmfile.RPMFile):
+ return [x.name for x in self.f.getmembers()]
+ else:
+ raise NotImplementedError
+
+ def headers(self):
+ if isinstance(self.f, rpmfile.RPMFile):
+ return self.f.headers
+ else:
+ return dict()
+
+ def extract_to(self, member, path):
+ """ Extract compress file's \\p member to \\p path
+ If \\p path is a directory the member's basename will used as
+ filename, otherwise path is considered the full file path name. """
+
+ if not os.path.isdir(os.path.dirname(path)):
+ os.makedirs(os.path.dirname(path))
+
+ if os.path.isdir(path):
+ path = os.path.join(path, os.path.basename(member))
+
+ with open(path, 'wb') as of:
+ if isinstance(self.f, zipfile.ZipFile):
+ zf = self.f.open(member)
+ else:
+ zf = self.f.extractfile(member)
+
+ while True:
+ b = zf.read(1024 * 100)
+ if b:
+ of.write(b)
+ else:
+ break
+
+ zf.close()
+
+ @classmethod
+ def extract(cls, zpath, member, outpath):
+ """
+ Extract file member (full internal path) to output from
+ archive zpath.
+ """
+
+ with ZFile(zpath) as zf:
+ zf.extract_to(member, outpath)
+
+ @classmethod
+ def compress(cls, zpath, paths, stripcnt=0, ext=None):
+ """
+ Create new compressed file \\p zpath containing files in \\p paths
+ """
+
+ with ZFile(zpath, 'w', ext=ext) as zf:
+ for p in paths:
+ outp = os.path.sep.join(p.split(os.path.sep)[stripcnt:])
+ print('zip %s to %s (stripcnt %d)' % (p, outp, stripcnt))
+ zf.f.write(p, outp)