diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-11 08:28:00 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-11 08:28:00 +0000 |
commit | 3565071f226432336a54d0193d729fa4508a3394 (patch) | |
tree | 4cde13f078f84c0a7785d234fd52edce7c90546a /debian/bin | |
parent | Adding upstream version 6.6.15. (diff) | |
download | linux-3565071f226432336a54d0193d729fa4508a3394.tar.xz linux-3565071f226432336a54d0193d729fa4508a3394.zip |
Adding debian version 6.6.15-2.debian/6.6.15-2
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'debian/bin')
-rwxr-xr-x | debian/bin/buildcheck.py | 111 | ||||
-rwxr-xr-x | debian/bin/check-patches.sh | 28 | ||||
l--------- | debian/bin/debian_linux | 1 | ||||
-rwxr-xr-x | debian/bin/fix-shebang | 12 | ||||
-rwxr-xr-x | debian/bin/gencontrol.py | 719 | ||||
-rwxr-xr-x | debian/bin/genorig.py | 169 | ||||
-rwxr-xr-x | debian/bin/genpatch-lockdown | 109 | ||||
-rwxr-xr-x | debian/bin/genpatch-rt | 160 | ||||
-rwxr-xr-x | debian/bin/getconfig.py | 25 | ||||
-rwxr-xr-x | debian/bin/git-tag-gpg-wrapper | 42 | ||||
-rwxr-xr-x | debian/bin/kconfig.py | 39 | ||||
-rwxr-xr-x | debian/bin/no-depmod | 18 | ||||
-rwxr-xr-x | debian/bin/stable-update | 135 | ||||
-rwxr-xr-x | debian/bin/stable-update.sh | 2 | ||||
-rwxr-xr-x | debian/bin/test-patches | 140 | ||||
-rwxr-xr-x | debian/bin/update-bug-taint-list | 24 |
16 files changed, 1734 insertions, 0 deletions
diff --git a/debian/bin/buildcheck.py b/debian/bin/buildcheck.py new file mode 100755 index 0000000000..92dba9347c --- /dev/null +++ b/debian/bin/buildcheck.py @@ -0,0 +1,111 @@ +#!/usr/bin/python3 + +import sys +import glob +import os + +from debian_linux.config import ConfigCoreDump +from debian_linux.debian import Changelog, VersionLinux + + +class CheckImage(object): + def __init__(self, config, dir, arch, featureset, flavour): + self.dir = dir + self.arch, self.featureset, self.flavour = arch, featureset, flavour + + self.changelog = Changelog(version=VersionLinux)[0] + + self.config_entry_base = config.merge('base', arch, featureset, + flavour) + self.config_entry_build = config.merge('build', arch, featureset, + flavour) + self.config_entry_image = config.merge('image', arch, featureset, + flavour) + + def __call__(self, out): + image = self.config_entry_build.get('image-file') + uncompressed_image = self.config_entry_build \ + .get('uncompressed-image-file') + + if not image: + # TODO: Bail out + return 0 + + image = os.path.join(self.dir, image) + if uncompressed_image: + uncompressed_image = os.path.join(self.dir, uncompressed_image) + + fail = 0 + + fail |= self.check_size(out, image, uncompressed_image) + + return fail + + def check_size(self, out, image, uncompressed_image): + value = self.config_entry_image.get('check-size') + + if not value: + return 0 + + dtb_size = 0 + if self.config_entry_image.get('check-size-with-dtb'): + for dtb in glob.glob( + os.path.join(self.dir, 'arch', + self.config_entry_base['kernel-arch'], + 'boot/dts/*.dtb')): + dtb_size = max(dtb_size, os.stat(dtb).st_size) + + size = os.stat(image).st_size + dtb_size + + # 1% overhead is desirable in order to cope with growth + # through the lifetime of a stable release. Warn if this is + # not the case. + usage = (float(size)/value) * 100.0 + out.write('Image size %d/%d, using %.2f%%. ' % (size, value, usage)) + if size > value: + out.write('Too large. Refusing to continue.\n') + return 1 + elif usage >= 99.0: + out.write('Under 1%% space in %s. ' % self.changelog.distribution) + else: + out.write('Image fits. ') + out.write('Continuing.\n') + + # Also check the uncompressed image + if uncompressed_image and \ + self.config_entry_image.get('check-uncompressed-size'): + value = self.config_entry_image.get('check-uncompressed-size') + size = os.stat(uncompressed_image).st_size + usage = (float(size)/value) * 100.0 + out.write('Uncompressed Image size %d/%d, using %.2f%%. ' % + (size, value, usage)) + if size > value: + out.write('Too large. Refusing to continue.\n') + return 1 + elif usage >= 99.0: + out.write('Uncompressed Image Under 1%% space in %s. ' % + self.changelog.distribution) + else: + out.write('Uncompressed Image fits. ') + out.write('Continuing.\n') + + return 0 + + +class Main(object): + def __init__(self, dir, arch, featureset, flavour): + self.args = dir, arch, featureset, flavour + + self.config = ConfigCoreDump(open("debian/config.defines.dump", "rb")) + + def __call__(self): + fail = 0 + + for c in (CheckImage, ): + fail |= c(self.config, *self.args)(sys.stdout) + + return fail + + +if __name__ == '__main__': + sys.exit(Main(*sys.argv[1:])()) diff --git a/debian/bin/check-patches.sh b/debian/bin/check-patches.sh new file mode 100755 index 0000000000..54bb731e9a --- /dev/null +++ b/debian/bin/check-patches.sh @@ -0,0 +1,28 @@ +#!/bin/sh -e + +TMPDIR=$(mktemp -d) +trap "rm -rf $TMPDIR" EXIT +for patchdir in debian/patches*; do + sed '/^#/d; /^[[:space:]]*$/d; /^X /d; s/^+ //; s,^,'"$patchdir"'/,' "$patchdir"/series +done | sort -u > $TMPDIR/used +find debian/patches* ! -path '*/series' -type f -name "*.diff" -o -name "*.patch" -printf "%p\n" | sort > $TMPDIR/avail +echo "Used patches" +echo "==============" +cat $TMPDIR/used +echo +echo "Unused patches" +echo "==============" +grep -F -v -f $TMPDIR/used $TMPDIR/avail || test $? = 1 +echo +echo "Patches without required headers" +echo "================================" +xargs grep -E -l '^(Subject|Description):' < $TMPDIR/used | xargs grep -E -l '^(From|Author|Origin):' > $TMPDIR/goodheaders || test $? = 1 +grep -F -v -f $TMPDIR/goodheaders $TMPDIR/used || test $? = 1 +echo +echo "Patches without Origin or Forwarded header" +echo "==========================================" +xargs grep -E -L '^(Origin:|Forwarded: (no\b|not-needed|http))' < $TMPDIR/used || test $? = 1 +echo +echo "Patches to be forwarded" +echo "=======================" +xargs grep -E -l '^Forwarded: no\b' < $TMPDIR/used || test $? = 1 diff --git a/debian/bin/debian_linux b/debian/bin/debian_linux new file mode 120000 index 0000000000..01f3e04dc7 --- /dev/null +++ b/debian/bin/debian_linux @@ -0,0 +1 @@ +../lib/python/debian_linux/
\ No newline at end of file diff --git a/debian/bin/fix-shebang b/debian/bin/fix-shebang new file mode 100755 index 0000000000..edf551fa3a --- /dev/null +++ b/debian/bin/fix-shebang @@ -0,0 +1,12 @@ +#!/usr/bin/perl -pi + +# Change "#!/usr/bin/env perl" to "#!/usr/bin/perl" (policy ยง10.4). +# Other uses of /usr/bin/env should probably be converted as well, but +# policy doesn't specify what to do. +if ($. == 1 && m|^\#!\s*/usr/bin/env\s+(.+)|) { + if ($1 eq "perl") { + $_ = "#!/usr/bin/perl\n"; + } else { + print STDERR "W: Found #!/usr/bin/env $1 and don't know what to substitute\n"; + } +} diff --git a/debian/bin/gencontrol.py b/debian/bin/gencontrol.py new file mode 100755 index 0000000000..f7485df372 --- /dev/null +++ b/debian/bin/gencontrol.py @@ -0,0 +1,719 @@ +#!/usr/bin/python3 + +import sys +import json +import locale +import os +import os.path +import pathlib +import subprocess +import re +import tempfile +from typing import Any + +from debian_linux import config +from debian_linux.debian import \ + PackageRelationEntry, PackageRelationGroup, \ + VersionLinux, BinaryPackage, TestsControl +from debian_linux.gencontrol import Gencontrol as Base, PackagesBundle, \ + iter_featuresets, iter_flavours +from debian_linux.utils import Templates + +locale.setlocale(locale.LC_CTYPE, "C.UTF-8") + + +class Gencontrol(Base): + disable_installer: bool + disable_signed: bool + + tests_control_headers: TestsControl | None + + config_schema = { + 'build': { + 'signed-code': config.SchemaItemBoolean(), + 'vdso': config.SchemaItemBoolean(), + }, + 'description': { + 'parts': config.SchemaItemList(), + }, + 'image': { + 'configs': config.SchemaItemList(), + 'check-size': config.SchemaItemInteger(), + 'check-size-with-dtb': config.SchemaItemBoolean(), + 'check-uncompressed-size': config.SchemaItemInteger(), + 'depends': config.SchemaItemList(','), + 'provides': config.SchemaItemList(','), + 'suggests': config.SchemaItemList(','), + 'recommends': config.SchemaItemList(','), + 'conflicts': config.SchemaItemList(','), + 'breaks': config.SchemaItemList(','), + }, + 'packages': { + 'docs': config.SchemaItemBoolean(), + 'installer': config.SchemaItemBoolean(), + 'libc-dev': config.SchemaItemBoolean(), + 'meta': config.SchemaItemBoolean(), + 'tools-unversioned': config.SchemaItemBoolean(), + 'tools-versioned': config.SchemaItemBoolean(), + 'source': config.SchemaItemBoolean(), + } + } + + env_flags = [ + ('DEBIAN_KERNEL_DISABLE_INSTALLER', 'disable_installer', 'installer modules'), + ('DEBIAN_KERNEL_DISABLE_SIGNED', 'disable_signed', 'signed code'), + ] + + def __init__(self, config_dirs=["debian/config", "debian/config.local"], + template_dirs=["debian/templates"]) -> None: + super(Gencontrol, self).__init__( + config.ConfigCoreHierarchy(self.config_schema, config_dirs), + Templates(template_dirs), + VersionLinux) + self.process_changelog() + self.config_dirs = config_dirs + + for env, attr, desc in self.env_flags: + setattr(self, attr, False) + if os.getenv(env): + if self.changelog[0].distribution == 'UNRELEASED': + import warnings + warnings.warn(f'Disable {desc} on request ({env} set)') + setattr(self, attr, True) + else: + raise RuntimeError( + f'Unable to disable {desc} in release build ({env} set)') + + def _setup_makeflags(self, names, makeflags, data) -> None: + for src, dst, optional in names: + if src in data or not optional: + makeflags[dst] = data[src] + + def do_main_setup(self, vars, makeflags) -> None: + super(Gencontrol, self).do_main_setup(vars, makeflags) + makeflags.update({ + 'VERSION': self.version.linux_version, + 'UPSTREAMVERSION': self.version.linux_upstream, + 'ABINAME': self.abiname, + 'SOURCEVERSION': self.version.complete, + }) + makeflags['SOURCE_BASENAME'] = vars['source_basename'] + makeflags['SOURCE_SUFFIX'] = vars['source_suffix'] + + # Prepare to generate debian/tests/control + self.tests_control = self.templates.get_tests_control('main.tests-control', vars) + self.tests_control_image = None + self.tests_control_headers = None + + def do_main_makefile(self, makeflags) -> None: + for featureset in iter_featuresets(self.config): + makeflags_featureset = makeflags.copy() + makeflags_featureset['FEATURESET'] = featureset + + self.bundle.makefile.add_rules(f'source_{featureset}', + 'source', makeflags_featureset) + self.bundle.makefile.add_deps('source', [f'source_{featureset}']) + + makeflags = makeflags.copy() + makeflags['ALL_FEATURESETS'] = ' '.join(iter_featuresets(self.config)) + super().do_main_makefile(makeflags) + + def do_main_packages(self, vars, makeflags) -> None: + self.bundle.add('main', (), makeflags, vars) + + # Only build the metapackages if their names won't exactly match + # the packages they depend on + do_meta = self.config.merge('packages').get('meta', True) \ + and vars['source_suffix'] != '-' + vars['version'] + + if self.config.merge('packages').get('docs', True): + self.bundle.add('docs', (), makeflags, vars) + if do_meta: + self.bundle.add('docs.meta', (), makeflags, vars) + if self.config.merge('packages').get('source', True): + self.bundle.add('sourcebin', (), makeflags, vars) + if do_meta: + self.bundle.add('sourcebin.meta', (), makeflags, vars) + + if self.config.merge('packages').get('libc-dev', True): + libcdev_kernelarches = set() + libcdev_multiarches = set() + for arch in iter(self.config['base', ]['arches']): + libcdev_kernelarch = self.config['base', arch]['kernel-arch'] + libcdev_multiarch = subprocess.check_output( + ['dpkg-architecture', '-f', '-a', arch, + '-q', 'DEB_HOST_MULTIARCH'], + stderr=subprocess.DEVNULL, + encoding='utf-8').strip() + libcdev_kernelarches.add(libcdev_kernelarch) + libcdev_multiarches.add(f'{libcdev_multiarch}:{libcdev_kernelarch}') + + libcdev_makeflags = makeflags.copy() + libcdev_makeflags['ALL_LIBCDEV_KERNELARCHES'] = ' '.join(sorted(libcdev_kernelarches)) + libcdev_makeflags['ALL_LIBCDEV_MULTIARCHES'] = ' '.join(sorted(libcdev_multiarches)) + + self.bundle.add('libc-dev', (), libcdev_makeflags, vars) + + def do_indep_featureset_setup(self, vars, makeflags, featureset) -> None: + makeflags['LOCALVERSION'] = vars['localversion'] + kernel_arches = set() + for arch in iter(self.config['base', ]['arches']): + if self.config.get_merge('base', arch, featureset, None, + 'flavours'): + kernel_arches.add(self.config['base', arch]['kernel-arch']) + makeflags['ALL_KERNEL_ARCHES'] = ' '.join(sorted(list(kernel_arches))) + + vars['featureset_desc'] = '' + if featureset != 'none': + desc = self.config[('description', None, featureset)] + desc_parts = desc['parts'] + vars['featureset_desc'] = (' with the %s featureset' % + desc['part-short-%s' % desc_parts[0]]) + + def do_indep_featureset_packages(self, featureset, vars, makeflags) -> None: + self.bundle.add('headers.featureset', (featureset, ), makeflags, vars) + + arch_makeflags = ( + ('kernel-arch', 'KERNEL_ARCH', False), + ) + + def do_arch_setup(self, vars, makeflags, arch) -> None: + config_base = self.config.merge('base', arch) + + self._setup_makeflags(self.arch_makeflags, makeflags, config_base) + + try: + gnu_type = subprocess.check_output( + ['dpkg-architecture', '-f', '-a', arch, + '-q', 'DEB_HOST_GNU_TYPE'], + stderr=subprocess.DEVNULL, + encoding='utf-8') + except subprocess.CalledProcessError: + # This sometimes happens for the newest ports :-/ + print('W: Unable to get GNU type for %s' % arch, file=sys.stderr) + else: + vars['gnu-type-package'] = gnu_type.strip().replace('_', '-') + + def do_arch_packages(self, arch, vars, makeflags) -> None: + if not self.disable_signed: + build_signed = self.config.merge('build', arch) \ + .get('signed-code', False) + else: + build_signed = False + + if build_signed: + # Make sure variables remain + vars['signedtemplate_binaryversion'] = '@signedtemplate_binaryversion@' + vars['signedtemplate_sourceversion'] = '@signedtemplate_sourceversion@' + + self.bundle.add('signed-template', (arch,), makeflags, vars, arch=arch) + + bundle_signed = self.bundles[f'signed-{arch}'] = \ + PackagesBundle(f'signed-{arch}', self.templates) + bundle_signed.packages['source'] = \ + self.templates.get_source_control('signed.source.control', vars)[0] + + with bundle_signed.open('source/lintian-overrides', 'w') as f: + f.write(self.substitute( + self.templates.get('signed.source.lintian-overrides'), vars)) + + with bundle_signed.open('changelog.head', 'w') as f: + dist = self.changelog[0].distribution + urgency = self.changelog[0].urgency + f.write(f'''\ +linux-signed-{vars['arch']} (@signedtemplate_sourceversion@) {dist}; urgency={urgency} + + * Sign kernel from {self.changelog[0].source} @signedtemplate_binaryversion@ +''') + + if self.config['base', arch].get('featuresets') and \ + self.config.merge('packages').get('source', True): + self.bundle.add('config', (arch, ), makeflags, vars) + + if self.config.merge('packages').get('tools-unversioned', True): + self.bundle.add('tools-unversioned', (arch, ), makeflags, vars) + + if self.config.merge('packages').get('tools-versioned', True): + self.bundle.add('tools-versioned', (arch, ), makeflags, vars) + + def do_featureset_setup(self, vars, makeflags, arch, featureset) -> None: + vars['localversion_headers'] = vars['localversion'] + makeflags['LOCALVERSION_HEADERS'] = vars['localversion_headers'] + + self.default_flavour = self.config.merge('base', arch, featureset) \ + .get('default-flavour') + if self.default_flavour is not None: + if featureset != 'none': + raise RuntimeError("default-flavour set for %s %s," + " but must only be set for featureset none" + % (arch, featureset)) + if self.default_flavour \ + not in iter_flavours(self.config, arch, featureset): + raise RuntimeError("default-flavour %s for %s %s does not exist" + % (self.default_flavour, arch, featureset)) + + self.quick_flavour = self.config.merge('base', arch, featureset) \ + .get('quick-flavour') + + flavour_makeflags_base = ( + ('compiler', 'COMPILER', False), + ('compiler-filename', 'COMPILER', True), + ('kernel-arch', 'KERNEL_ARCH', False), + ('cflags', 'KCFLAGS', True), + ('kernel-deb-arch', 'KERNEL_DEB_ARCH', True), + ('kernel-gnu-type', 'KERNEL_GNU_TYPE', True), + ('compat-deb-arch', 'COMPAT_DEB_ARCH', True), + ('compat-gnu-type', 'COMPAT_GNU_TYPE', True), + ) + + flavour_makeflags_build = ( + ('image-file', 'IMAGE_FILE', True), + ) + + flavour_makeflags_image = ( + ('install-stem', 'IMAGE_INSTALL_STEM', True), + ) + + flavour_makeflags_other = ( + ('localversion', 'LOCALVERSION', False), + ('localversion-image', 'LOCALVERSION_IMAGE', True), + ) + + def do_flavour_setup(self, vars, makeflags, arch, featureset, flavour) -> None: + config_base = self.config.merge('base', arch, featureset, flavour) + config_build = self.config.merge('build', arch, featureset, flavour) + config_description = self.config.merge('description', arch, featureset, + flavour) + config_image = self.config.merge('image', arch, featureset, flavour) + + vars['flavour'] = vars['localversion'][1:] + vars['class'] = config_description['hardware'] + vars['longclass'] = (config_description.get('hardware-long') + or vars['class']) + + vars['localversion-image'] = vars['localversion'] + override_localversion = config_image.get('override-localversion', None) + if override_localversion is not None: + vars['localversion-image'] = (vars['localversion_headers'] + '-' + + override_localversion) + vars['image-stem'] = config_image.get('install-stem') + + self._setup_makeflags(self.flavour_makeflags_base, makeflags, + config_base) + self._setup_makeflags(self.flavour_makeflags_build, makeflags, + config_build) + self._setup_makeflags(self.flavour_makeflags_image, makeflags, + config_image) + self._setup_makeflags(self.flavour_makeflags_other, makeflags, vars) + + def do_flavour_packages(self, arch, featureset, + flavour, vars, makeflags) -> None: + ruleid = (arch, featureset, flavour) + + packages_headers = ( + self.bundle.add('headers', ruleid, makeflags, vars, arch=arch) + ) + assert len(packages_headers) == 1 + + do_meta = self.config.merge('packages').get('meta', True) + config_entry_base = self.config.merge('base', arch, featureset, + flavour) + config_entry_build = self.config.merge('build', arch, featureset, + flavour) + config_entry_description = self.config.merge('description', arch, + featureset, flavour) + config_entry_packages = self.config.merge('packages', arch, featureset, + flavour) + + def config_entry_image(key, *args, **kwargs) -> Any: + return self.config.get_merge( + 'image', arch, featureset, flavour, key, *args, **kwargs) + + compiler = config_entry_base.get('compiler', 'gcc') + + relation_compiler = PackageRelationEntry(compiler) + + relation_compiler_header = PackageRelationGroup([relation_compiler]) + + # Generate compiler build-depends for native: + # gcc-13 [arm64] <!cross !pkg.linux.nokernel> + self.bundle.packages['source']['Build-Depends-Arch'].merge([ + PackageRelationEntry( + relation_compiler, + arches={arch}, + restrictions='<!cross !pkg.linux.nokernel>', + ) + ]) + + # Generate compiler build-depends for cross: + # gcc-13-aarch64-linux-gnu [arm64] <cross !pkg.linux.nokernel> + self.bundle.packages['source']['Build-Depends-Arch'].merge([ + PackageRelationEntry( + relation_compiler, + name=f'{relation_compiler.name}-{vars["gnu-type-package"]}', + arches={arch}, + restrictions='<cross !pkg.linux.nokernel>', + ) + ]) + + # Generate compiler build-depends for kernel: + # gcc-13-hppa64-linux-gnu [hppa] <!pkg.linux.nokernel> + if gnutype := config_entry_base.get('kernel-gnu-type'): + self.bundle.packages['source']['Build-Depends-Arch'].merge([ + PackageRelationEntry( + relation_compiler, + name=f'{relation_compiler.name}-{gnutype}', + arches={arch}, + restrictions='<!pkg.linux.nokernel>', + ) + ]) + + # Generate compiler build-depends for compat: + # gcc-arm-linux-gnueabihf [arm64] <!pkg.linux.nokernel> + # XXX: Linux uses various definitions for this, all ending with "gcc", not $CC + if gnutype := config_entry_base.get('compat-gnu-type'): + self.bundle.packages['source']['Build-Depends-Arch'].merge([ + PackageRelationEntry( + f'gcc-{gnutype}', + arches={arch}, + restrictions='<!pkg.linux.nokernel>', + ) + ]) + + packages_own = [] + + if not self.disable_signed: + build_signed = config_entry_build.get('signed-code') + else: + build_signed = False + + if build_signed: + bundle_signed = self.bundles[f'signed-{arch}'] + else: + bundle_signed = self.bundle + + vars.setdefault('desc', None) + + packages_image = [] + + if build_signed: + packages_image.extend( + bundle_signed.add('signed.image', ruleid, makeflags, vars, arch=arch)) + packages_image.extend( + self.bundle.add('image-unsigned', ruleid, makeflags, vars, arch=arch)) + + else: + packages_image.extend(bundle_signed.add('image', ruleid, makeflags, vars, arch=arch)) + + for field in ('Depends', 'Provides', 'Suggests', 'Recommends', + 'Conflicts', 'Breaks'): + for i in config_entry_image(field.lower(), ()): + for package_image in packages_image: + package_image.setdefault(field).merge( + PackageRelationGroup(i, arches={arch}) + ) + + for field in ('Depends', 'Suggests', 'Recommends'): + for i in config_entry_image(field.lower(), ()): + group = PackageRelationGroup(i, arches={arch}) + for entry in group: + if entry.operator is not None: + entry.operator = -entry.operator + for package_image in packages_image: + package_image.setdefault('Breaks').append(PackageRelationGroup([entry])) + + desc_parts = self.config.get_merge('description', arch, featureset, + flavour, 'parts') + if desc_parts: + # XXX: Workaround, we need to support multiple entries of the same + # name + parts = list(set(desc_parts)) + parts.sort() + for package_image in packages_image: + desc = package_image['Description'] + for part in parts: + desc.append(config_entry_description['part-long-' + part]) + desc.append_short(config_entry_description + .get('part-short-' + part, '')) + + packages_headers[0]['Depends'].merge(relation_compiler_header) + packages_own.extend(packages_image) + packages_own.extend(packages_headers) + + # The image meta-packages will depend on signed linux-image + # packages where applicable, so should be built from the + # signed source packages The header meta-packages will also be + # built along with the signed packages, to create a dependency + # relationship that ensures src:linux and src:linux-signed-* + # transition to testing together. + if do_meta: + packages_meta = ( + bundle_signed.add('image.meta', ruleid, makeflags, vars, arch=arch) + ) + assert len(packages_meta) == 1 + packages_meta += ( + bundle_signed.add(build_signed and 'signed.headers.meta' or 'headers.meta', + ruleid, makeflags, vars, arch=arch) + ) + assert len(packages_meta) == 2 + + if flavour == self.default_flavour \ + and not self.vars['source_suffix']: + packages_meta[0].setdefault('Provides') \ + .append('linux-image-generic') + packages_meta[1].setdefault('Provides') \ + .append('linux-headers-generic') + + packages_own.extend(packages_meta) + + if config_entry_build.get('vdso', False): + makeflags['VDSO'] = True + + packages_own.extend( + self.bundle.add('image-dbg', ruleid, makeflags, vars, arch=arch) + ) + if do_meta: + packages_own.extend( + self.bundle.add('image-dbg.meta', ruleid, makeflags, vars, arch=arch) + ) + + # In a quick build, only build the quick flavour (if any). + if flavour != self.quick_flavour: + for package in packages_own: + package['Build-Profiles'][0].neg.add('pkg.linux.quick') + + tests_control = self.templates.get_tests_control('image.tests-control', vars)[0] + tests_control['Depends'].merge( + PackageRelationGroup(package_image['Package'], + arches={arch})) + if self.tests_control_image: + for i in tests_control['Depends']: + self.tests_control_image['Depends'].merge(i) + else: + self.tests_control_image = tests_control + self.tests_control.append(tests_control) + + if flavour == (self.quick_flavour or self.default_flavour): + if not self.tests_control_headers: + self.tests_control_headers = \ + self.templates.get_tests_control('headers.tests-control', vars)[0] + self.tests_control.append(self.tests_control_headers) + assert self.tests_control_headers is not None + self.tests_control_headers['Architecture'].add(arch) + self.tests_control_headers['Depends'].merge( + PackageRelationGroup(packages_headers[0]['Package'], + arches={arch})) + + def get_config(*entry_name) -> Any: + entry_real = ('image',) + entry_name + entry = self.config.get(entry_real, None) + if entry is None: + return None + return entry.get('configs', None) + + def check_config_default(fail, f) -> list[str]: + for d in self.config_dirs[::-1]: + f1 = d + '/' + f + if os.path.exists(f1): + return [f1] + if fail: + raise RuntimeError("%s unavailable" % f) + return [] + + def check_config_files(files) -> list[str]: + ret = [] + for f in files: + for d in self.config_dirs[::-1]: + f1 = d + '/' + f + if os.path.exists(f1): + ret.append(f1) + break + else: + raise RuntimeError("%s unavailable" % f) + return ret + + def check_config(default, fail, *entry_name) -> list[str]: + configs = get_config(*entry_name) + if configs is None: + return check_config_default(fail, default) + return check_config_files(configs) + + kconfig = check_config('config', True) + # XXX: We have no way to override kernelarch-X configs + kconfig.extend(check_config_default(False, + "kernelarch-%s/config" % config_entry_base['kernel-arch'])) + kconfig.extend(check_config("%s/config" % arch, True, arch)) + kconfig.extend(check_config("%s/config.%s" % (arch, flavour), False, + arch, None, flavour)) + kconfig.extend(check_config("featureset-%s/config" % featureset, False, + None, featureset)) + kconfig.extend(check_config("%s/%s/config" % (arch, featureset), False, + arch, featureset)) + kconfig.extend(check_config("%s/%s/config.%s" % + (arch, featureset, flavour), False, + arch, featureset, flavour)) + makeflags['KCONFIG'] = ' '.join(kconfig) + makeflags['KCONFIG_OPTIONS'] = '' + # Add "salt" to fix #872263 + makeflags['KCONFIG_OPTIONS'] += \ + ' -o "BUILD_SALT=\\"%(abiname)s%(localversion)s\\""' % vars + + merged_config = ('debian/build/config.%s_%s_%s' % + (arch, featureset, flavour)) + self.bundle.makefile.add_cmds(merged_config, + ["$(MAKE) -f debian/rules.real %s %s" % + (merged_config, makeflags)]) + + if not self.disable_installer and config_entry_packages.get('installer'): + with tempfile.TemporaryDirectory(prefix='linux-gencontrol') as config_dir: + base_path = pathlib.Path('debian/installer').absolute() + config_path = pathlib.Path(config_dir) + (config_path / 'modules').symlink_to(base_path / 'modules') + (config_path / 'package-list').symlink_to(base_path / 'package-list') + + with (config_path / 'kernel-versions').open('w') as versions: + versions.write(f'{arch} - {vars["flavour"]} - - -\n') + + # Add udebs using kernel-wedge + kw_env = os.environ.copy() + kw_env['KW_DEFCONFIG_DIR'] = config_dir + kw_env['KW_CONFIG_DIR'] = config_dir + kw_proc = subprocess.Popen( + ['kernel-wedge', 'gen-control', vars['abiname']], + stdout=subprocess.PIPE, + text=True, + env=kw_env) + udeb_packages_base = BinaryPackage.read_rfc822(kw_proc.stdout) + kw_proc.wait() + if kw_proc.returncode != 0: + raise RuntimeError('kernel-wedge exited with code %d' % + kw_proc.returncode) + + udeb_packages = [] + for package_base in udeb_packages_base: + package = package_base.copy() + # kernel-wedge currently chokes on Build-Profiles so add it now + package['Build-Profiles'] = ( + '<!noudeb !pkg.linux.nokernel !pkg.linux.quick>') + package.meta['rules-target'] = 'installer' + udeb_packages.append(package) + + makeflags_local = makeflags.copy() + makeflags_local['IMAGE_PACKAGE_NAME'] = udeb_packages[0]['Package'] + + bundle_signed.add_packages( + udeb_packages, + (arch, featureset, flavour), + makeflags_local, arch=arch, + ) + + if build_signed: + udeb_packages = [] + # XXX This is a hack to exclude the udebs from + # the package list while still being able to + # convince debhelper and kernel-wedge to go + # part way to building them. + for package_base in udeb_packages_base: + package = package_base.copy() + # kernel-wedge currently chokes on Build-Profiles so add it now + package['Build-Profiles'] = ( + '<pkg.linux.udeb-unsigned-test-build !noudeb' + ' !pkg.linux.nokernel !pkg.linux.quick>') + package.meta['rules-target'] = 'installer-test' + udeb_packages.append(package) + + self.bundle.add_packages( + udeb_packages, + (arch, featureset, flavour), + makeflags, arch=arch, check_packages=False, + ) + + def process_changelog(self) -> None: + version = self.version = self.changelog[0].version + + if self.changelog[0].distribution == 'UNRELEASED': + self.abiname = f'{version.linux_upstream}+unreleased' + elif self.changelog[0].distribution == 'experimental': + self.abiname = f'{version.linux_upstream}' + elif version.linux_revision_backports: + self.abiname = f'{version.linux_upstream_full}+bpo' + else: + self.abiname = f'{version.linux_upstream_full}' + + self.vars = { + 'upstreamversion': self.version.linux_upstream, + 'version': self.version.linux_version, + 'version_complete': self.version.complete, + 'source_basename': re.sub(r'-[\d.]+$', '', + self.changelog[0].source), + 'source_upstream': self.version.upstream, + 'source_package': self.changelog[0].source, + 'abiname': self.abiname, + } + self.vars['source_suffix'] = \ + self.changelog[0].source[len(self.vars['source_basename']):] + self.config['version', ] = {'source': self.version.complete, + 'upstream': self.version.linux_upstream, + 'abiname_base': self.abiname, + 'abiname': self.abiname} + + distribution = self.changelog[0].distribution + if distribution in ('unstable', ): + if version.linux_revision_experimental or \ + version.linux_revision_backports or \ + version.linux_revision_other: + raise RuntimeError("Can't upload to %s with a version of %s" % + (distribution, version)) + if distribution in ('experimental', ): + if not version.linux_revision_experimental: + raise RuntimeError("Can't upload to %s with a version of %s" % + (distribution, version)) + if distribution.endswith('-security') or distribution.endswith('-lts'): + if version.linux_revision_backports or \ + version.linux_revision_other: + raise RuntimeError("Can't upload to %s with a version of %s" % + (distribution, version)) + if distribution.endswith('-backports'): + if not version.linux_revision_backports: + raise RuntimeError("Can't upload to %s with a version of %s" % + (distribution, version)) + + def write(self) -> None: + self.write_config() + super().write() + self.write_tests_control() + self.write_signed() + + def write_config(self) -> None: + f = open("debian/config.defines.dump", 'wb') + self.config.dump(f) + f.close() + + def write_signed(self) -> None: + for bundle in self.bundles.values(): + pkg_sign_entries = {} + + for p in bundle.packages.values(): + if pkg_sign_pkg := p.meta.get('sign-package'): + pkg_sign_entries[pkg_sign_pkg] = { + 'trusted_certs': [], + 'files': [ + { + 'sig_type': e.split(':', 1)[-1], + 'file': e.split(':', 1)[0], + } + for e in p.meta['sign-files'].split() + ], + } + + if pkg_sign_entries: + with bundle.path('files.json').open('w') as f: + json.dump({'packages': pkg_sign_entries}, f, indent=2) + + def write_tests_control(self) -> None: + self.bundle.write_rfc822(open("debian/tests/control", 'w'), + self.tests_control) + + +if __name__ == '__main__': + Gencontrol()() diff --git a/debian/bin/genorig.py b/debian/bin/genorig.py new file mode 100755 index 0000000000..9bf43a34e2 --- /dev/null +++ b/debian/bin/genorig.py @@ -0,0 +1,169 @@ +#!/usr/bin/python3 + +import sys +from debian import deb822 +import glob +import os +import os.path +import shutil +import subprocess +import time +import warnings + +from debian_linux.debian import Changelog, VersionLinux + + +class Main(object): + def __init__(self, input_repo, override_version): + self.log = sys.stdout.write + + self.input_repo = input_repo + + changelog = Changelog(version=VersionLinux)[0] + source = changelog.source + version = changelog.version + + if override_version: + version = VersionLinux('%s-0' % override_version) + + self.version_dfsg = version.linux_dfsg + if self.version_dfsg is None: + self.version_dfsg = '0' + + self.log('Using source name %s, version %s, dfsg %s\n' % + (source, version.upstream, self.version_dfsg)) + + self.orig = '%s-%s' % (source, version.upstream) + self.orig_tar = '%s_%s.orig.tar.xz' % (source, version.upstream) + self.tag = 'v' + version.linux_upstream_full + + def __call__(self): + import tempfile + temp_dir = tempfile.mkdtemp(prefix='genorig', dir='debian') + old_umask = os.umask(0o022) + try: + # When given a remote repo, we need a local copy. + if not self.input_repo.startswith('/') and ':' in self.input_repo: + temp_repo = os.path.join(temp_dir, 'git') + subprocess.run( + ['git', 'clone', '--bare', '--depth=1', '-b', self.tag, + self.input_repo, temp_repo], + check=True) + self.input_repo = temp_repo + + self.dir = os.path.join(temp_dir, 'export') + os.mkdir(self.dir) + self.upstream_export(self.input_repo) + + # exclude_files() will change dir mtimes. Capture the + # original release time so we can apply it to the final + # tarball. + orig_date = time.strftime( + "%a, %d %b %Y %H:%M:%S +0000", + time.gmtime( + os.stat(os.path.join(self.dir, self.orig, 'Makefile')) + .st_mtime)) + + self.exclude_files() + os.umask(old_umask) + self.tar(orig_date) + finally: + os.umask(old_umask) + shutil.rmtree(temp_dir) + + def upstream_export(self, input_repo): + self.log("Exporting %s from %s\n" % (self.tag, input_repo)) + + gpg_wrapper = os.path.join(os.getcwd(), + "debian/bin/git-tag-gpg-wrapper") + verify_proc = subprocess.Popen(['git', + '-c', 'gpg.program=%s' % gpg_wrapper, + 'tag', '-v', self.tag], + cwd=input_repo) + if verify_proc.wait(): + raise RuntimeError("GPG tag verification failed") + + archive_proc = subprocess.Popen(['git', 'archive', '--format=tar', + '--prefix=%s/' % self.orig, self.tag], + cwd=input_repo, + stdout=subprocess.PIPE) + extract_proc = subprocess.Popen(['tar', '-xaf', '-'], cwd=self.dir, + stdin=archive_proc.stdout) + + ret1 = archive_proc.wait() + ret2 = extract_proc.wait() + if ret1 or ret2: + raise RuntimeError("Can't create archive") + + def exclude_files(self): + self.log("Excluding file patterns specified in debian/copyright\n") + with open("debian/copyright") as f: + header = deb822.Deb822(f) + patterns = header.get("Files-Excluded", '').strip().split() + for pattern in patterns: + matched = False + for name in glob.glob(os.path.join(self.dir, self.orig, pattern)): + try: + shutil.rmtree(name) + except NotADirectoryError: + os.unlink(name) + matched = True + if not matched: + warnings.warn("Exclusion pattern '%s' did not match anything" + % pattern, + RuntimeWarning) + + def tar(self, orig_date): + out = os.path.join("../orig", self.orig_tar) + try: + os.mkdir("../orig") + except OSError: + pass + try: + os.stat(out) + raise RuntimeError("Destination already exists") + except OSError: + pass + self.log("Generate tarball %s\n" % out) + + env = os.environ.copy() + env.update({ + 'LC_ALL': 'C', + }) + cmd = [ + 'tar', + '-C', self.dir, + '--sort=name', + '--mtime={}'.format(orig_date), + '--owner=root', + '--group=root', + '--use-compress-program=xz -T0', + '-cf', + out, self.orig, + ] + + try: + subprocess.run(cmd, env=env, check=True) + os.chmod(out, 0o644) + except BaseException: + try: + os.unlink(out) + except OSError: + pass + raise + try: + os.symlink(os.path.join('orig', self.orig_tar), + os.path.join('..', self.orig_tar)) + except OSError: + pass + + +if __name__ == '__main__': + from optparse import OptionParser + parser = OptionParser(usage="%prog [OPTION]... REPO") + parser.add_option("-V", "--override-version", dest="override_version", + help="Override version", metavar="VERSION") + options, args = parser.parse_args() + + assert len(args) == 1 + Main(args[0], options.override_version)() diff --git a/debian/bin/genpatch-lockdown b/debian/bin/genpatch-lockdown new file mode 100755 index 0000000000..1aed0c7352 --- /dev/null +++ b/debian/bin/genpatch-lockdown @@ -0,0 +1,109 @@ +#!/usr/bin/python3 + +import io +import os +import os.path +import re +import subprocess +import sys + + +def main(repo, range='torvalds/master..dhowells/efi-lock-down'): + patch_dir = 'debian/patches' + lockdown_patch_dir = 'features/all/lockdown' + series_name = 'series' + + # Only replace patches in this subdirectory and starting with a digit + # - the others are presumably Debian-specific for now + lockdown_patch_name_re = re.compile( + r'^' + re.escape(lockdown_patch_dir) + r'/\d') + series_before = [] + series_after = [] + + old_series = set() + new_series = set() + + try: + with open(os.path.join(patch_dir, series_name), 'r') as series_fh: + for line in series_fh: + name = line.strip() + if lockdown_patch_name_re.match(name): + old_series.add(name) + elif len(old_series) == 0: + series_before.append(line) + else: + series_after.append(line) + except FileNotFoundError: + pass + + with open(os.path.join(patch_dir, series_name), 'w') as series_fh: + for line in series_before: + series_fh.write(line) + + # Add directory prefix to all filenames. + # Add Origin to all patch headers. + def add_patch(name, source_patch, origin): + name = os.path.join(lockdown_patch_dir, name) + path = os.path.join(patch_dir, name) + try: + os.unlink(path) + except FileNotFoundError: + pass + with open(path, 'w') as patch: + in_header = True + for line in source_patch: + if in_header and re.match(r'^(\n|[^\w\s]|Index:)', line): + patch.write('Origin: %s\n' % origin) + if line != '\n': + patch.write('\n') + in_header = False + patch.write(line) + series_fh.write(name) + series_fh.write('\n') + new_series.add(name) + + # XXX No signature to verify + + env = os.environ.copy() + env['GIT_DIR'] = os.path.join(repo, '.git') + args = ['git', 'format-patch', '--subject-prefix=', range] + format_proc = subprocess.Popen(args, + cwd=os.path.join(patch_dir, + lockdown_patch_dir), + env=env, stdout=subprocess.PIPE) + with io.open(format_proc.stdout.fileno(), encoding='utf-8') as pipe: + for line in pipe: + name = line.strip('\n') + with open(os.path.join(patch_dir, lockdown_patch_dir, name)) \ + as source_patch: + patch_from = source_patch.readline() + match = re.match(r'From ([0-9a-f]{40}) ', patch_from) + assert match + origin = ('https://git.kernel.org/pub/scm/linux/kernel/' + 'git/dhowells/linux-fs.git/commit?id=%s' % + match.group(1)) + add_patch(name, source_patch, origin) + + for line in series_after: + series_fh.write(line) + + for name in new_series: + if name in old_series: + old_series.remove(name) + else: + print('Added patch', os.path.join(patch_dir, name)) + + for name in old_series: + print('Obsoleted patch', os.path.join(patch_dir, name)) + + +if __name__ == '__main__': + if not (2 <= len(sys.argv) <= 3): + sys.stderr.write('''\ +Usage: %s REPO [REVISION-RANGE] +REPO is a git repo containing the REVISION-RANGE. The default range is +torvalds/master..dhowells/efi-lock-down. +''' % sys.argv[0]) + print('BASE is the base branch (default: torvalds/master).') + sys.exit(2) + main(*sys.argv[1:]) diff --git a/debian/bin/genpatch-rt b/debian/bin/genpatch-rt new file mode 100755 index 0000000000..66affb076c --- /dev/null +++ b/debian/bin/genpatch-rt @@ -0,0 +1,160 @@ +#!/usr/bin/python3 + +import argparse +import io +import os +import os.path +import re +import shutil +import subprocess +import sys +import tempfile + + +def main(source, version, verify_signature): + patch_dir = 'debian/patches-rt' + series_name = 'series' + old_series = set() + new_series = set() + + try: + with open(os.path.join(patch_dir, series_name), 'r') as series_fh: + for line in series_fh: + name = line.strip() + if name != '' and name[0] != '#': + old_series.add(name) + except FileNotFoundError: + pass + + with open(os.path.join(patch_dir, series_name), 'w') as series_fh: + # Add Origin to all patch headers. + def add_patch(name, source_patch, origin): + path = os.path.join(patch_dir, name) + try: + os.unlink(path) + except FileNotFoundError: + pass + with open(path, 'w') as patch: + in_header = True + for line in source_patch: + if in_header and re.match(r'^(\n|[^\w\s]|Index:)', line): + patch.write('Origin: %s\n' % origin) + if line != '\n': + patch.write('\n') + in_header = False + patch.write(line) + new_series.add(name) + + if os.path.isdir(os.path.join(source, '.git')): + # Export rebased branch from stable-rt git as patch series + up_ver = re.sub(r'-rt\d+$', '', version) + env = os.environ.copy() + env['GIT_DIR'] = os.path.join(source, '.git') + env['DEBIAN_KERNEL_KEYRING'] = 'rt-signing-key.pgp' + + if verify_signature: + # Validate tag signature + gpg_wrapper = os.path.join(os.getcwd(), + "debian/bin/git-tag-gpg-wrapper") + verify_proc = subprocess.Popen( + ['git', '-c', 'gpg.program=%s' % gpg_wrapper, + 'tag', '-v', 'v%s-rebase' % version], + env=env) + if verify_proc.wait(): + raise RuntimeError("GPG tag verification failed") + + args = ['git', 'format-patch', + 'v%s..v%s-rebase' % (up_ver, version)] + format_proc = subprocess.Popen(args, + cwd=patch_dir, + env=env, stdout=subprocess.PIPE) + with io.open(format_proc.stdout.fileno(), encoding='utf-8') \ + as pipe: + for line in pipe: + name = line.strip('\n') + with open(os.path.join(patch_dir, name)) as source_patch: + patch_from = source_patch.readline() + match = re.match(r'From ([0-9a-f]{40}) ', patch_from) + assert match + origin = ('https://git.kernel.org/cgit/linux/kernel/' + 'git/rt/linux-stable-rt.git/commit?id=%s' % + match.group(1)) + add_patch(name, source_patch, origin) + series_fh.write(line) + + else: + # Get version and upstream version + if version is None: + match = re.search(r'(?:^|/)patches-(.+)\.tar\.[gx]z$', source) + assert match, 'no version specified or found in filename' + version = match.group(1) + match = re.match(r'^(\d+\.\d+)(?:\.\d+|-rc\d+)?-rt\d+$', version) + assert match, 'could not parse version string' + up_ver = match.group(1) + + if verify_signature: + # Expect an accompanying signature, and validate it + source_sig = re.sub(r'.[gx]z$', '.sign', source) + unxz_proc = subprocess.Popen(['xzcat', source], + stdout=subprocess.PIPE) + verify_output = subprocess.check_output( + ['gpgv', '--status-fd', '1', + '--keyring', 'debian/upstream/rt-signing-key.pgp', + '--ignore-time-conflict', source_sig, '-'], + stdin=unxz_proc.stdout, + text=True) + if unxz_proc.wait() or \ + not re.search(r'^\[GNUPG:\]\s+VALIDSIG\s', + verify_output, re.MULTILINE): + sys.stderr.write(verify_output) + raise RuntimeError("GPG signature verification failed") + + temp_dir = tempfile.mkdtemp(prefix='rt-genpatch', dir='debian') + try: + # Unpack tarball + subprocess.check_call(['tar', '-C', temp_dir, '-xaf', source]) + source_dir = os.path.join(temp_dir, 'patches') + assert os.path.isdir(source_dir), \ + 'tarball does not contain patches directory' + + # Copy patch series + origin = ('https://www.kernel.org/pub/linux/kernel/projects/' + 'rt/%s/older/patches-%s.tar.xz' % + (up_ver, version)) + with open(os.path.join(source_dir, 'series'), 'r') \ + as source_series_fh: + for line in source_series_fh: + name = line.strip() + if name != '' and name[0] != '#': + with open(os.path.join(source_dir, name)) \ + as source_patch: + add_patch(name, source_patch, origin) + series_fh.write(line) + finally: + shutil.rmtree(temp_dir) + + for name in new_series: + if name in old_series: + old_series.remove(name) + else: + print('Added patch', os.path.join(patch_dir, name)) + + for name in old_series: + print('Obsoleted patch', os.path.join(patch_dir, name)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description='Generate or update the rt featureset patch series') + parser.add_argument( + 'source', metavar='SOURCE', type=str, + help='tarball of patches or git repo containing the given RT-VERSION') + parser.add_argument( + 'version', metavar='RT-VERSION', type=str, nargs='?', + help='rt kernel version (optional for tarballs)') + parser.add_argument( + '--verify-signature', action=argparse.BooleanOptionalAction, + default=True, + help='verify signature on tarball (detached in .sign file) or git tag') + args = parser.parse_args() + main(args.source, args.version, args.verify_signature) diff --git a/debian/bin/getconfig.py b/debian/bin/getconfig.py new file mode 100755 index 0000000000..b719a17a12 --- /dev/null +++ b/debian/bin/getconfig.py @@ -0,0 +1,25 @@ +#!/usr/bin/python3 + +import sys + +from debian_linux.config import ConfigCoreDump + +section = tuple(s or None for s in sys.argv[1:-1]) +key = sys.argv[-1] +config = ConfigCoreDump(fp=open("debian/config.defines.dump", "rb")) +try: + value = config[section][key] +except KeyError: + sys.exit(1) + +if isinstance(value, str): + # Don't iterate over it + print(value) +else: + # In case it's a sequence, try printing each item + try: + for item in value: + print(item) + except TypeError: + # Otherwise use the default format + print(value) diff --git a/debian/bin/git-tag-gpg-wrapper b/debian/bin/git-tag-gpg-wrapper new file mode 100755 index 0000000000..43030206fa --- /dev/null +++ b/debian/bin/git-tag-gpg-wrapper @@ -0,0 +1,42 @@ +#!/bin/bash -e + +# Instead of calling gpg, call gpgv and provide a local keyring + +debian_dir="$(readlink -f "$(dirname "$0")/..")" + +# Parse the expected options. If the next two lines are combined, a +# failure of getopt won't cause the script to exit. +ordered_args="$(getopt -n "$0" -o "" -l "status-fd:" -l "keyid-format:" -l "verify" -- "$@")" +eval "set -- $ordered_args" +gpgv_opts=() +while true; do + case "$1" in + --status-fd) + gpgv_opts+=(--status-fd $2) + shift 2 + ;; + --keyid-format) + # ignore + shift 2 + ;; + --verify) + # ignore + shift 1 + ;; + --) + shift 1 + break + ;; + esac +done + +keyring="$debian_dir/upstream/${DEBIAN_KERNEL_KEYRING:-signing-key.asc}" +case "$keyring" in + *.asc) + keyring_armored="$keyring" + keyring="$(mktemp)" + trap 'rm -f "$keyring"' EXIT + gpg --dearmor <"$keyring_armored" > "$keyring" + ;; +esac +gpgv "${gpgv_opts[@]}" --keyring "$keyring" -- "$@" diff --git a/debian/bin/kconfig.py b/debian/bin/kconfig.py new file mode 100755 index 0000000000..6115355f46 --- /dev/null +++ b/debian/bin/kconfig.py @@ -0,0 +1,39 @@ +#!/usr/bin/python3 + +import optparse +import re + +from debian_linux.kconfig import KconfigFile + + +def merge(output, configs, overrides): + kconfig = KconfigFile() + for c in configs: + kconfig.read(open(c)) + for key, value in overrides.items(): + kconfig.set(key, value) + open(output, "w").write(str(kconfig)) + + +def opt_callback_dict(option, opt, value, parser): + match = re.match(r'^\s*(\S+)=(\S+)\s*$', value) + if not match: + raise optparse.OptionValueError('not key=value') + dest = option.dest + data = getattr(parser.values, dest) + data[match.group(1)] = match.group(2) + + +if __name__ == '__main__': + parser = optparse.OptionParser(usage="%prog [OPTION]... FILE...") + parser.add_option( + '-o', '--override', + action='callback', + callback=opt_callback_dict, + default={}, + dest='overrides', + help="Override option", + type='string') + options, args = parser.parse_args() + + merge(args[0], args[1:], options.overrides) diff --git a/debian/bin/no-depmod b/debian/bin/no-depmod new file mode 100755 index 0000000000..ed5a8463f1 --- /dev/null +++ b/debian/bin/no-depmod @@ -0,0 +1,18 @@ +#!/bin/sh + +set -e + +# This is a dummy substitute for depmod. Since we run depmod during +# postinst, we do not need or want to package the files that it +# generates. + +if [ "x$1" = x-V ]; then + # Satisfy version test + echo 'not really module-init-tools' +elif [ "x$1" = x-b -a "${2%/depmod.??????}" != "$2" ]; then + # Satisfy test of short kernel versions + mkdir -p "$2/lib/modules/$3" + touch "$2/lib/modules/$3/modules.dep" +else + echo 'skipping depmod' +fi diff --git a/debian/bin/stable-update b/debian/bin/stable-update new file mode 100755 index 0000000000..0ce6112bb5 --- /dev/null +++ b/debian/bin/stable-update @@ -0,0 +1,135 @@ +#!/usr/bin/python3 + +import sys +import os +import re +import subprocess + +from debian_linux.debian import Changelog, VersionLinux + + +def base_version(ver): + # Assume base version is at least 3.0, thus only 2 components wanted + match = re.match(r'^(\d+\.\d+)', ver) + assert match + return match.group(1) + + +def add_update(ver, inc): + base = base_version(ver) + if base == ver: + update = 0 + else: + update = int(ver[len(base)+1:]) + update += inc + if update == 0: + return base + else: + return '{}.{}'.format(base, update) + + +def next_update(ver): + return add_update(ver, 1) + + +def print_stable_log(log, cur_ver, new_ver): + major_ver = re.sub(r'^(\d+)\..*', r'\1', cur_ver) + while cur_ver != new_ver: + next_ver = next_update(cur_ver) + print(' https://www.kernel.org/pub/linux/kernel/v{}.x/ChangeLog-{}' + .format(major_ver, next_ver), + file=log) + log.flush() # serialise our output with git's + subprocess.check_call(['git', 'log', '--reverse', + '--pretty= - %s', + 'v{}..v{}^'.format(cur_ver, next_ver)], + stdout=log) + cur_ver = next_ver + + +def main(repo, new_ver): + if os.path.exists(os.path.join(repo, '.git')): + os.environ['GIT_DIR'] = os.path.join(repo, '.git') + else: + os.environ['GIT_DIR'] = repo + + changelog = Changelog(version=VersionLinux) + cur_pkg_ver = changelog[0].version + cur_ver = cur_pkg_ver.linux_upstream_full + + if base_version(new_ver) != base_version(cur_ver): + print('{} is not on the same stable series as {}' + .format(new_ver, cur_ver), + file=sys.stderr) + sys.exit(2) + + new_pkg_ver = new_ver + '-1' + if cur_pkg_ver.linux_revision_experimental: + new_pkg_ver += '~exp1' + + # Three possible cases: + # 1. The current version has been released so we need to add a new + # version to the changelog. + # 2. The current version has not been released so we're changing its + # version string. + # (a) There are no stable updates included in the current version, + # so we need to insert an introductory line, the URL(s) and + # git log(s) and a blank line at the top. + # (b) One or more stable updates are already included in the current + # version, so we need to insert the URL(s) and git log(s) after + # them. + + changelog_intro = 'New upstream stable update:' + + # Case 1 + if changelog[0].distribution != 'UNRELEASED': + subprocess.check_call(['dch', '-v', new_pkg_ver, '-D', 'UNRELEASED', + changelog_intro]) + + with open('debian/changelog', 'r') as old_log: + with open('debian/changelog.new', 'w') as new_log: + line_no = 0 + inserted = False + intro_line = ' * {}\n'.format(changelog_intro) + + for line in old_log: + line_no += 1 + + # Case 2 + if changelog[0].distribution == 'UNRELEASED' and line_no == 1: + print('{} ({}) UNRELEASED; urgency={}' + .format(changelog[0].source, new_pkg_ver, + changelog[0].urgency), + file=new_log) + continue + + if not inserted: + # Case 2(a) + if line_no == 3 and line != intro_line: + new_log.write(intro_line) + print_stable_log(new_log, cur_ver, new_ver) + new_log.write('\n') + inserted = True + # Case 1 or 2(b) + elif line_no > 3 and line == '\n': + print_stable_log(new_log, cur_ver, new_ver) + inserted = True + + # Check that we inserted before hitting the end of the + # first version entry + assert not (line.startswith(' -- ') and not inserted) + + new_log.write(line) + + os.rename('debian/changelog.new', 'debian/changelog') + + +if __name__ == '__main__': + if len(sys.argv) != 3: + print('''\ +Usage: {} REPO VERSION +REPO is the git repository to generate a changelog from +VERSION is the stable version (without leading v)'''.format(sys.argv[0]), + file=sys.stderr) + sys.exit(2) + main(*sys.argv[1:]) diff --git a/debian/bin/stable-update.sh b/debian/bin/stable-update.sh new file mode 100755 index 0000000000..bd86860c63 --- /dev/null +++ b/debian/bin/stable-update.sh @@ -0,0 +1,2 @@ +#!/bin/sh -e +exec "$(dirname "$0")/stable-update" "$@" diff --git a/debian/bin/test-patches b/debian/bin/test-patches new file mode 100755 index 0000000000..a85369042d --- /dev/null +++ b/debian/bin/test-patches @@ -0,0 +1,140 @@ +#!/bin/bash + +set -e +shopt -s extglob + +# Set defaults from the running kernel +arch="$(dpkg --print-architecture)" +kernelabi="$(uname -r)" +ff="${kernelabi#+([^-])-@(trunk|?(rc)+([0-9])|0.@(bpo|deb+([0-9])).+([0-9]))-}" +if [ "x$ff" != "x$kernelabi" ]; then + flavour="${ff#@(openvz|rt|vserver|xen)-}" + if [ "x$flavour" != "x$ff" ]; then + featureset="${ff%-$flavour}" + else + featureset=none + fi +else + flavour= + featureset=none +fi + +dbginfo= +fuzz=0 +jobs=$(nproc) + +eval "set -- $(getopt -n "$0" -o "f:gj:s:" -l "fuzz:" -- "$@")" +while true; do + case "$1" in + -f) flavour="$2"; shift 2 ;; + -g) dbginfo=y; shift 1 ;; + -j) jobs="$2"; shift 2 ;; + -s) featureset="$2"; shift 2 ;; + --fuzz) fuzz="$2"; shift 2;; + --) shift 1; break ;; + esac +done + +if [ $# -lt 1 ]; then + echo >&2 "Usage: $0 [<options>] <patch>..." + cat >&2 <<EOF +Options: + -f <flavour> specify the 'flavour' of kernel to build, e.g. 686-pae + -g enable debug info + -j <jobs> specify number of compiler jobs to run in parallel + (default: number of available processors) + -s <featureset> specify an optional featureset to apply, e.g. rt + --fuzz <num> set the maximum patch fuzz factor (default: 0) +EOF + exit 2 +fi + +if [ -z "$flavour" ]; then + echo >&2 "You must specify a flavour to build with the -f option" + exit 2 +fi + +profiles=nodoc,noudeb,pkg.linux.nosource,pkg.linux.mintools +if [ -z "$dbginfo" ]; then + profiles="$profiles,pkg.linux.nokerneldbg,pkg.linux.nokerneldbginfo" +fi + +# Check build-dependencies early if possible +if [ -f debian/control ]; then + dpkg-checkbuilddeps -P"$profiles" +fi + +# Append 'a~test' to Debian version; this should be less than any official +# successor and easily recognisable +version="$(dpkg-parsechangelog | sed 's/^Version: //; t; d')" +if [ "${version%a~test}" = "$version" ]; then + version="$version"a~test + dch -v "$version" --distribution UNRELEASED "Testing patches $*" +fi + +# Ignore user's .quiltrc +alias quilt='quilt --quiltrc -' + +# Try to clean up any previous test patches +if [ "$featureset" = none ]; then + patchdir=debian/patches + while patch="$(quilt top 2>/dev/null)" && \ + [ "${patch#test/}" != "$patch" ]; do + quilt pop -f + done + while patch="$(quilt next 2>/dev/null)" && \ + [ "${patch#test/}" != "$patch" ]; do + quilt delete -r "$patch" + done +else + patchdir=debian/patches-${featureset} + sed -i '/^test\//d' $patchdir/series +fi + +# Prepare a new directory for the patches +rm -rf $patchdir/test/ +mkdir $patchdir/test + +# Prepare a new directory for the config; override ABI name, featuresets, flavours +rm -rf debian/config.local +mkdir debian/config.local debian/config.local/"$arch" debian/config.local/"$arch"/"$featureset" +cat >debian/config.local/defines <<EOF +EOF +cat >debian/config.local/"$arch"/defines <<EOF +[base] +featuresets: $featureset +EOF +cat >debian/config.local/"$arch"/"$featureset"/defines <<EOF +[base] +flavours: $flavour +EOF +if [ "$featureset" = none ]; then + # default-flavour must refer to a flavour that's enabled + cat >>debian/config.local/"$arch"/"$featureset"/defines <<EOF +default-flavour: $flavour +EOF +fi + +# Regenerate control and included rules +rm -f debian/control debian/rules.gen +debian/rules debian/control-real && exit 1 || true +test -f debian/control +test -f debian/rules.gen + +# Check build-dependencies now that we know debian/control exists +dpkg-checkbuilddeps -P"$profiles" + +# Clean up old build; apply existing patches for featureset +debian/rules clean +debian/rules source + +# Apply the additional patches +for patch in "$@"; do + patch_abs="$(readlink -f "$patch")" + (cd "debian/build/source_${featureset}" && \ + quilt import -P "test/$(basename "$patch")" "$patch_abs" && \ + quilt push --fuzz="$fuzz") +done + +# Build selected binaries +dpkg-buildpackage -b -P"$profiles" -j"$jobs" -nc -uc diff --git a/debian/bin/update-bug-taint-list b/debian/bin/update-bug-taint-list new file mode 100755 index 0000000000..76bb1fc2e0 --- /dev/null +++ b/debian/bin/update-bug-taint-list @@ -0,0 +1,24 @@ +#!/bin/sh -eu + +temp="$(mktemp)" +trap 'rm -f "$temp"' EXIT + +# Copy everything above the existing flag checks. +sed -rne '/^ +_check /q; p' \ + < debian/templates/image.bug/include-1tainted >"$temp" + +# Generate flag checks from the table in tainted-kernels.rst. We +# could alternatively extract them from sysctl/kernel.rst or in the C +# sources, but this is easy to find and parse and is likely to have +# the most useful descriptions. +sed -rne '/^Bit +Log +Number +Reason/,/^$/ { + s/^ *([0-9]+) +.\/(.) +[0-9]+ +(.*)/ _check \1 \2 '\''\3'\''/p + }' \ + < Documentation/admin-guide/tainted-kernels.rst >>"$temp" + +# Copy everything below the existing flag checks. +sed -rne '/^ +echo "\*\* Tainted:/,$p' \ + < debian/templates/image.bug/include-1tainted >>"$temp" + +# Update the bug script in-place. +cp "$temp" debian/templates/image.bug/include-1tainted |