diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-07 18:49:59 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-07 18:49:59 +0000 |
commit | 01997497f915e8f79871f3f2acb55ac465051d24 (patch) | |
tree | 1ce1afd7246e1014199e15cbf854bf7924458e5d /debian/bin | |
parent | Adding upstream version 6.1.76. (diff) | |
download | linux-01997497f915e8f79871f3f2acb55ac465051d24.tar.xz linux-01997497f915e8f79871f3f2acb55ac465051d24.zip |
Adding debian version 6.1.76-1.debian/6.1.76-1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'debian/bin')
-rwxr-xr-x | debian/bin/abiupdate.py | 232 | ||||
-rwxr-xr-x | debian/bin/buildcheck.py | 285 | ||||
-rwxr-xr-x | debian/bin/check-patches.sh | 28 | ||||
l--------- | debian/bin/debian_linux | 1 | ||||
-rwxr-xr-x | debian/bin/fix-shebang | 12 | ||||
-rwxr-xr-x | debian/bin/gencontrol.py | 664 | ||||
-rwxr-xr-x | debian/bin/gencontrol_signed.py | 351 | ||||
-rwxr-xr-x | debian/bin/genorig.py | 169 | ||||
-rwxr-xr-x | debian/bin/genpatch-lockdown | 109 | ||||
-rwxr-xr-x | debian/bin/genpatch-rt | 160 | ||||
-rwxr-xr-x | debian/bin/getconfig.py | 25 | ||||
-rwxr-xr-x | debian/bin/git-tag-gpg-wrapper | 42 | ||||
-rwxr-xr-x | debian/bin/kconfig.py | 39 | ||||
-rwxr-xr-x | debian/bin/no-depmod | 18 | ||||
-rwxr-xr-x | debian/bin/stable-update | 135 | ||||
-rwxr-xr-x | debian/bin/stable-update.sh | 2 | ||||
-rwxr-xr-x | debian/bin/test-patches | 142 | ||||
-rwxr-xr-x | debian/bin/update-bug-taint-list | 24 |
18 files changed, 2438 insertions, 0 deletions
diff --git a/debian/bin/abiupdate.py b/debian/bin/abiupdate.py new file mode 100755 index 000000000..dbb6f77ba --- /dev/null +++ b/debian/bin/abiupdate.py @@ -0,0 +1,232 @@ +#!/usr/bin/python3 + +import sys +import optparse +import os +import shutil +import tempfile + +from urllib.request import urlopen +from urllib.error import HTTPError + +from debian_linux.abi import Symbols +from debian_linux.config import ConfigCoreDump +from debian_linux.debian import Changelog, VersionLinux + +default_url_base = "https://deb.debian.org/debian/" +default_url_base_incoming = "https://incoming.debian.org/debian-buildd/" +default_url_base_ports = "https://deb.debian.org/debian-ports/" +default_url_base_ports_incoming = "https://incoming.ports.debian.org/" +default_url_base_security = "https://deb.debian.org/debian-security/" + + +class url_debian_flat(object): + def __init__(self, base): + self.base = base + + def __call__(self, source, filename, arch): + return self.base + filename + + +class url_debian_pool(object): + def __init__(self, base): + self.base = base + + def __call__(self, source, filename, arch): + return (self.base + "pool/main/" + source[0] + "/" + source + "/" + + filename) + + +class url_debian_ports_pool(url_debian_pool): + def __call__(self, source, filename, arch): + if arch == 'all': + return url_debian_pool.__call__(self, source, filename, arch) + return (self.base + "pool-" + arch + "/main/" + source[0] + "/" + + source + "/" + filename) + + +class url_debian_security_pool(url_debian_pool): + def __call__(self, source, filename, arch): + return (self.base + "pool/updates/main/" + source[0] + "/" + source + + "/" + filename) + + +class Main(object): + dir = None + + def __init__(self, arch=None, featureset=None, flavour=None): + self.log = sys.stdout.write + + self.override_arch = arch + self.override_featureset = featureset + self.override_flavour = flavour + + changelog = Changelog(version=VersionLinux) + while changelog[0].distribution == 'UNRELEASED': + changelog.pop(0) + changelog = changelog[0] + + self.source = changelog.source + self.version = changelog.version.linux_version + self.version_source = changelog.version.complete + + if changelog.distribution.endswith('-security'): + self.urls = [url_base_security] + else: + self.urls = [url_base, url_base_ports, + url_base_incoming, url_base_ports_incoming] + + self.config = ConfigCoreDump(fp=open("debian/config.defines.dump", + "rb")) + + self.version_abi = self.config['version', ]['abiname'] + + def __call__(self): + self.dir = tempfile.mkdtemp(prefix='abiupdate') + try: + self.log("Retrieve config\n") + + try: + config = self.get_config() + except HTTPError as e: + self.log("Failed to retrieve %s: %s\n" % (e.filename, e)) + sys.exit(1) + + if self.override_arch: + arches = [self.override_arch] + else: + arches = config[('base',)]['arches'] + for arch in arches: + self.update_arch(config, arch) + finally: + shutil.rmtree(self.dir) + + def extract_package(self, filename, base): + base_out = self.dir + "/" + base + os.mkdir(base_out) + os.system("dpkg-deb --extract %s %s" % (filename, base_out)) + return base_out + + def get_abi(self, arch, prefix): + try: + version_abi = (self.config[('version',)]['abiname_base'] + '-' + + self.config['abi', arch]['abiname']) + except KeyError: + version_abi = self.version_abi + filename = ("linux-headers-%s-%s_%s_%s.deb" % + (version_abi, prefix, self.version_source, arch)) + f = self.retrieve_package(filename, arch) + d = self.extract_package(f, "linux-headers-%s_%s" % (prefix, arch)) + f1 = d + ("/usr/src/linux-headers-%s-%s/Module.symvers" % + (version_abi, prefix)) + s = Symbols(open(f1)) + shutil.rmtree(d) + return version_abi, s + + def get_config(self): + # XXX We used to fetch the previous version of linux-support here, + # but until we authenticate downloads we should not do that as + # pickle.load allows running arbitrary code. + return self.config + + def retrieve_package(self, filename, arch): + for i, url in enumerate(self.urls): + u = url(self.source, filename, arch) + filename_out = self.dir + "/" + filename + + try: + f_in = urlopen(u) + except HTTPError: + if i == len(self.urls) - 1: + # No more URLs to try + raise + else: + continue + + f_out = open(filename_out, 'wb') + while 1: + r = f_in.read() + if not r: + break + f_out.write(r) + return filename_out + + def save_abi(self, version_abi, symbols, arch, featureset, flavour): + dir = "debian/abi/%s" % version_abi + if not os.path.exists(dir): + os.makedirs(dir) + out = "%s/%s_%s_%s" % (dir, arch, featureset, flavour) + symbols.write(open(out, 'w')) + + def update_arch(self, config, arch): + if self.override_featureset: + featuresets = [self.override_featureset] + else: + featuresets = config[('base', arch)]['featuresets'] + for featureset in featuresets: + self.update_featureset(config, arch, featureset) + + def update_featureset(self, config, arch, featureset): + config_base = config.merge('base', arch, featureset) + + if not config_base.get('enabled', True): + return + + if self.override_flavour: + flavours = [self.override_flavour] + else: + flavours = config_base['flavours'] + for flavour in flavours: + self.update_flavour(config, arch, featureset, flavour) + + def update_flavour(self, config, arch, featureset, flavour): + self.log("Updating ABI for arch %s, featureset %s, flavour %s: " % + (arch, featureset, flavour)) + try: + if featureset == 'none': + localversion = flavour + else: + localversion = featureset + '-' + flavour + + version_abi, abi = self.get_abi(arch, localversion) + self.save_abi(version_abi, abi, arch, featureset, flavour) + self.log("Ok.\n") + except HTTPError as e: + self.log("Failed to retrieve %s: %s\n" % (e.filename, e)) + except Exception: + self.log("FAILED!\n") + import traceback + traceback.print_exc(None, sys.stdout) + + +if __name__ == '__main__': + options = optparse.OptionParser() + options.add_option("-u", "--url-base", dest="url_base", + default=default_url_base) + options.add_option("--url-base-incoming", dest="url_base_incoming", + default=default_url_base_incoming) + options.add_option("--url-base-ports", dest="url_base_ports", + default=default_url_base_ports) + options.add_option("--url-base-ports-incoming", + dest="url_base_ports_incoming", + default=default_url_base_ports_incoming) + options.add_option("--url-base-security", dest="url_base_security", + default=default_url_base_security) + + opts, args = options.parse_args() + + kw = {} + if len(args) >= 1: + kw['arch'] = args[0] + if len(args) >= 2: + kw['featureset'] = args[1] + if len(args) >= 3: + kw['flavour'] = args[2] + + url_base = url_debian_pool(opts.url_base) + url_base_incoming = url_debian_pool(opts.url_base_incoming) + url_base_ports = url_debian_ports_pool(opts.url_base_ports) + url_base_ports_incoming = url_debian_flat(opts.url_base_ports_incoming) + url_base_security = url_debian_security_pool(opts.url_base_security) + + Main(**kw)() diff --git a/debian/bin/buildcheck.py b/debian/bin/buildcheck.py new file mode 100755 index 000000000..3f7ce25a9 --- /dev/null +++ b/debian/bin/buildcheck.py @@ -0,0 +1,285 @@ +#!/usr/bin/python3 + +import sys +import glob +import os +import re + +from debian_linux.abi import Symbols +from debian_linux.config import ConfigCoreDump +from debian_linux.debian import Changelog, VersionLinux + + +class CheckAbi(object): + class SymbolInfo(object): + def __init__(self, symbol, symbol_ref=None): + self.symbol = symbol + self.symbol_ref = symbol_ref or symbol + + @property + def module(self): + return self.symbol.module + + @property + def name(self): + return self.symbol.name + + def write(self, out, ignored): + info = [] + if ignored: + info.append("ignored") + for name in ('module', 'version', 'export'): + data = getattr(self.symbol, name) + data_ref = getattr(self.symbol_ref, name) + if data != data_ref: + info.append("%s: %s -> %s" % (name, data_ref, data)) + else: + info.append("%s: %s" % (name, data)) + out.write("%-48s %s\n" % (self.symbol.name, ", ".join(info))) + + def __init__(self, config, dir, arch, featureset, flavour): + self.config = config + self.arch, self.featureset, self.flavour = arch, featureset, flavour + + self.filename_new = "%s/Module.symvers" % dir + + try: + version_abi = (self.config[('version',)]['abiname_base'] + '-' + + self.config['abi', arch]['abiname']) + except KeyError: + version_abi = self.config[('version',)]['abiname'] + self.filename_ref = ("debian/abi/%s/%s_%s_%s" % + (version_abi, arch, featureset, flavour)) + + def __call__(self, out): + ret = 0 + + new = Symbols(open(self.filename_new)) + unversioned = [name for name in new + if new[name].version == '0x00000000'] + if unversioned: + out.write("ABI is not completely versioned! " + "Refusing to continue.\n") + out.write("\nUnversioned symbols:\n") + for name in sorted(unversioned): + self.SymbolInfo(new[name]).write(out, False) + ret = 1 + + try: + ref = Symbols(open(self.filename_ref)) + except IOError: + out.write("Can't read ABI reference. ABI not checked!\n") + return ret + + symbols, add, change, remove = self._cmp(ref, new) + + ignore = self._ignore(symbols) + + add_effective = add - ignore + change_effective = change - ignore + remove_effective = remove - ignore + + if change_effective or remove_effective: + out.write("ABI has changed! Refusing to continue.\n") + ret = 1 + elif change or remove: + out.write("ABI has changed but all changes have been ignored. " + "Continuing.\n") + elif add_effective: + out.write("New symbols have been added. Continuing.\n") + elif add: + out.write("New symbols have been added but have been ignored. " + "Continuing.\n") + else: + out.write("No ABI changes.\n") + + if add: + out.write("\nAdded symbols:\n") + for name in sorted(add): + symbols[name].write(out, name in ignore) + + if change: + out.write("\nChanged symbols:\n") + for name in sorted(change): + symbols[name].write(out, name in ignore) + + if remove: + out.write("\nRemoved symbols:\n") + for name in sorted(remove): + symbols[name].write(out, name in ignore) + + return ret + + def _cmp(self, ref, new): + ref_names = set(ref.keys()) + new_names = set(new.keys()) + + add = set() + change = set() + remove = set() + + symbols = {} + + for name in new_names - ref_names: + add.add(name) + symbols[name] = self.SymbolInfo(new[name]) + + for name in ref_names.intersection(new_names): + s_ref = ref[name] + s_new = new[name] + + if s_ref != s_new: + change.add(name) + symbols[name] = self.SymbolInfo(s_new, s_ref) + + for name in ref_names - new_names: + remove.add(name) + symbols[name] = self.SymbolInfo(ref[name]) + + return symbols, add, change, remove + + def _ignore_pattern(self, pattern): + ret = [] + for i in re.split(r'(\*\*?)', pattern): + if i == '*': + ret.append(r'[^/]*') + elif i == '**': + ret.append(r'.*') + elif i: + ret.append(re.escape(i)) + return re.compile('^' + ''.join(ret) + '$') + + def _ignore(self, symbols): + # TODO: let config merge this lists + configs = [] + configs.append(self.config.get(('abi', self.arch, self.featureset, + self.flavour), {})) + configs.append(self.config.get(('abi', self.arch, None, self.flavour), + {})) + configs.append(self.config.get(('abi', self.arch, self.featureset), + {})) + configs.append(self.config.get(('abi', self.arch), {})) + configs.append(self.config.get(('abi', None, self.featureset), {})) + configs.append(self.config.get(('abi',), {})) + + ignores = set() + for config in configs: + ignores.update(config.get('ignore-changes', [])) + + filtered = set() + for ignore in ignores: + type = 'name' + if ':' in ignore: + type, ignore = ignore.split(':') + if type in ('name', 'module'): + p = self._ignore_pattern(ignore) + for symbol in symbols.values(): + if p.match(getattr(symbol, type)): + filtered.add(symbol.name) + else: + raise NotImplementedError + + return filtered + + +class CheckImage(object): + def __init__(self, config, dir, arch, featureset, flavour): + self.dir = dir + self.arch, self.featureset, self.flavour = arch, featureset, flavour + + self.changelog = Changelog(version=VersionLinux)[0] + + self.config_entry_base = config.merge('base', arch, featureset, + flavour) + self.config_entry_build = config.merge('build', arch, featureset, + flavour) + self.config_entry_image = config.merge('image', arch, featureset, + flavour) + + def __call__(self, out): + image = self.config_entry_build.get('image-file') + uncompressed_image = self.config_entry_build \ + .get('uncompressed-image-file') + + if not image: + # TODO: Bail out + return 0 + + image = os.path.join(self.dir, image) + if uncompressed_image: + uncompressed_image = os.path.join(self.dir, uncompressed_image) + + fail = 0 + + fail |= self.check_size(out, image, uncompressed_image) + + return fail + + def check_size(self, out, image, uncompressed_image): + value = self.config_entry_image.get('check-size') + + if not value: + return 0 + + dtb_size = 0 + if self.config_entry_image.get('check-size-with-dtb'): + for dtb in glob.glob( + os.path.join(self.dir, 'arch', + self.config_entry_base['kernel-arch'], + 'boot/dts/*.dtb')): + dtb_size = max(dtb_size, os.stat(dtb).st_size) + + size = os.stat(image).st_size + dtb_size + + # 1% overhead is desirable in order to cope with growth + # through the lifetime of a stable release. Warn if this is + # not the case. + usage = (float(size)/value) * 100.0 + out.write('Image size %d/%d, using %.2f%%. ' % (size, value, usage)) + if size > value: + out.write('Too large. Refusing to continue.\n') + return 1 + elif usage >= 99.0: + out.write('Under 1%% space in %s. ' % self.changelog.distribution) + else: + out.write('Image fits. ') + out.write('Continuing.\n') + + # Also check the uncompressed image + if uncompressed_image and \ + self.config_entry_image.get('check-uncompressed-size'): + value = self.config_entry_image.get('check-uncompressed-size') + size = os.stat(uncompressed_image).st_size + usage = (float(size)/value) * 100.0 + out.write('Uncompressed Image size %d/%d, using %.2f%%. ' % + (size, value, usage)) + if size > value: + out.write('Too large. Refusing to continue.\n') + return 1 + elif usage >= 99.0: + out.write('Uncompressed Image Under 1%% space in %s. ' % + self.changelog.distribution) + else: + out.write('Uncompressed Image fits. ') + out.write('Continuing.\n') + + return 0 + + +class Main(object): + def __init__(self, dir, arch, featureset, flavour): + self.args = dir, arch, featureset, flavour + + self.config = ConfigCoreDump(open("debian/config.defines.dump", "rb")) + + def __call__(self): + fail = 0 + + for c in CheckAbi, CheckImage: + fail |= c(self.config, *self.args)(sys.stdout) + + return fail + + +if __name__ == '__main__': + sys.exit(Main(*sys.argv[1:])()) diff --git a/debian/bin/check-patches.sh b/debian/bin/check-patches.sh new file mode 100755 index 000000000..54bb731e9 --- /dev/null +++ b/debian/bin/check-patches.sh @@ -0,0 +1,28 @@ +#!/bin/sh -e + +TMPDIR=$(mktemp -d) +trap "rm -rf $TMPDIR" EXIT +for patchdir in debian/patches*; do + sed '/^#/d; /^[[:space:]]*$/d; /^X /d; s/^+ //; s,^,'"$patchdir"'/,' "$patchdir"/series +done | sort -u > $TMPDIR/used +find debian/patches* ! -path '*/series' -type f -name "*.diff" -o -name "*.patch" -printf "%p\n" | sort > $TMPDIR/avail +echo "Used patches" +echo "==============" +cat $TMPDIR/used +echo +echo "Unused patches" +echo "==============" +grep -F -v -f $TMPDIR/used $TMPDIR/avail || test $? = 1 +echo +echo "Patches without required headers" +echo "================================" +xargs grep -E -l '^(Subject|Description):' < $TMPDIR/used | xargs grep -E -l '^(From|Author|Origin):' > $TMPDIR/goodheaders || test $? = 1 +grep -F -v -f $TMPDIR/goodheaders $TMPDIR/used || test $? = 1 +echo +echo "Patches without Origin or Forwarded header" +echo "==========================================" +xargs grep -E -L '^(Origin:|Forwarded: (no\b|not-needed|http))' < $TMPDIR/used || test $? = 1 +echo +echo "Patches to be forwarded" +echo "=======================" +xargs grep -E -l '^Forwarded: no\b' < $TMPDIR/used || test $? = 1 diff --git a/debian/bin/debian_linux b/debian/bin/debian_linux new file mode 120000 index 000000000..01f3e04dc --- /dev/null +++ b/debian/bin/debian_linux @@ -0,0 +1 @@ +../lib/python/debian_linux/
\ No newline at end of file diff --git a/debian/bin/fix-shebang b/debian/bin/fix-shebang new file mode 100755 index 000000000..edf551fa3 --- /dev/null +++ b/debian/bin/fix-shebang @@ -0,0 +1,12 @@ +#!/usr/bin/perl -pi + +# Change "#!/usr/bin/env perl" to "#!/usr/bin/perl" (policy ยง10.4). +# Other uses of /usr/bin/env should probably be converted as well, but +# policy doesn't specify what to do. +if ($. == 1 && m|^\#!\s*/usr/bin/env\s+(.+)|) { + if ($1 eq "perl") { + $_ = "#!/usr/bin/perl\n"; + } else { + print STDERR "W: Found #!/usr/bin/env $1 and don't know what to substitute\n"; + } +} diff --git a/debian/bin/gencontrol.py b/debian/bin/gencontrol.py new file mode 100755 index 000000000..325ff46ec --- /dev/null +++ b/debian/bin/gencontrol.py @@ -0,0 +1,664 @@ +#!/usr/bin/python3 + +import sys +import locale +import os +import os.path +import subprocess +import re + +from debian_linux import config +from debian_linux.debian import PackageRelation, \ + PackageRelationEntry, PackageRelationGroup, VersionLinux, BinaryPackage, \ + restriction_requires_profile +from debian_linux.gencontrol import Gencontrol as Base, \ + iter_featuresets, iter_flavours, add_package_build_restriction +from debian_linux.utils import Templates + +locale.setlocale(locale.LC_CTYPE, "C.UTF-8") + + +class Gencontrol(Base): + config_schema = { + 'abi': { + 'ignore-changes': config.SchemaItemList(), + }, + 'build': { + 'signed-code': config.SchemaItemBoolean(), + 'vdso': config.SchemaItemBoolean(), + }, + 'description': { + 'parts': config.SchemaItemList(), + }, + 'image': { + 'bootloaders': config.SchemaItemList(), + 'configs': config.SchemaItemList(), + 'initramfs-generators': config.SchemaItemList(), + 'check-size': config.SchemaItemInteger(), + 'check-size-with-dtb': config.SchemaItemBoolean(), + 'check-uncompressed-size': config.SchemaItemInteger(), + 'depends': config.SchemaItemList(','), + 'provides': config.SchemaItemList(','), + 'suggests': config.SchemaItemList(','), + 'recommends': config.SchemaItemList(','), + 'conflicts': config.SchemaItemList(','), + 'breaks': config.SchemaItemList(','), + }, + 'relations': { + }, + 'packages': { + 'docs': config.SchemaItemBoolean(), + 'installer': config.SchemaItemBoolean(), + 'libc-dev': config.SchemaItemBoolean(), + 'meta': config.SchemaItemBoolean(), + 'tools-unversioned': config.SchemaItemBoolean(), + 'tools-versioned': config.SchemaItemBoolean(), + 'source': config.SchemaItemBoolean(), + } + } + + env_flags = [ + ('DEBIAN_KERNEL_DISABLE_INSTALLER', 'disable_installer', 'installer modules'), + ('DEBIAN_KERNEL_DISABLE_SIGNED', 'disable_signed', 'signed code'), + ] + + def __init__(self, config_dirs=["debian/config", "debian/config.local"], + template_dirs=["debian/templates"]): + super(Gencontrol, self).__init__( + config.ConfigCoreHierarchy(self.config_schema, config_dirs), + Templates(template_dirs), + VersionLinux) + self.process_changelog() + self.config_dirs = config_dirs + + for env, attr, desc in self.env_flags: + setattr(self, attr, False) + if os.getenv(env): + if self.changelog[0].distribution == 'UNRELEASED': + import warnings + warnings.warn(f'Disable {desc} on request ({env} set)') + setattr(self, attr, True) + else: + raise RuntimeError( + f'Unable to disable {desc} in release build ({env} set)') + + def _setup_makeflags(self, names, makeflags, data): + for src, dst, optional in names: + if src in data or not optional: + makeflags[dst] = data[src] + + def do_main_setup(self, vars, makeflags, extra): + super(Gencontrol, self).do_main_setup(vars, makeflags, extra) + makeflags.update({ + 'VERSION': self.version.linux_version, + 'UPSTREAMVERSION': self.version.linux_upstream, + 'ABINAME': self.abiname_version + self.abiname_part, + 'SOURCEVERSION': self.version.complete, + }) + makeflags['SOURCE_BASENAME'] = vars['source_basename'] + makeflags['SOURCE_SUFFIX'] = vars['source_suffix'] + + # Prepare to generate debian/tests/control + self.tests_control = self.templates.get_tests_control('main.tests-control', vars) + self.tests_control_image = None + self.tests_control_headers = None + + self.installer_packages = {} + + if not self.disable_installer and self.config.merge('packages').get('installer', True): + # Add udebs using kernel-wedge + kw_env = os.environ.copy() + kw_env['KW_DEFCONFIG_DIR'] = 'debian/installer' + kw_env['KW_CONFIG_DIR'] = 'debian/installer' + kw_proc = subprocess.Popen( + ['kernel-wedge', 'gen-control', vars['abiname']], + stdout=subprocess.PIPE, + text=True, + env=kw_env) + udeb_packages = BinaryPackage.read_rfc822(kw_proc.stdout) + kw_proc.wait() + if kw_proc.returncode != 0: + raise RuntimeError('kernel-wedge exited with code %d' % + kw_proc.returncode) + + # All architectures that have some installer udebs + arches = set() + for package in udeb_packages: + arches.update(package['Architecture']) + + # Code-signing status for those architectures + # If we're going to build signed udebs later, don't actually + # generate udebs. Just test that we *can* build, so we find + # configuration errors before building linux-signed. + build_signed = {} + for arch in arches: + if not self.disable_signed: + build_signed[arch] = self.config.merge('build', arch) \ + .get('signed-code', False) + else: + build_signed[arch] = False + + for package in udeb_packages: + # kernel-wedge currently chokes on Build-Profiles so add it now + if any(build_signed[arch] for arch in package['Architecture']): + assert all(build_signed[arch] + for arch in package['Architecture']) + # XXX This is a hack to exclude the udebs from + # the package list while still being able to + # convince debhelper and kernel-wedge to go + # part way to building them. + package['Build-Profiles'] = ( + '<pkg.linux.udeb-unsigned-test-build !noudeb !stage1' + ' !pkg.linux.nokernel !pkg.linux.quick>') + else: + package['Build-Profiles'] = ( + '<!noudeb !stage1 !pkg.linux.nokernel !pkg.linux.quick>') + + for arch in package['Architecture']: + self.installer_packages.setdefault(arch, []) \ + .append(package) + + def do_main_makefile(self, makeflags, extra): + for featureset in iter_featuresets(self.config): + makeflags_featureset = makeflags.copy() + makeflags_featureset['FEATURESET'] = featureset + + self.makefile.add_rules(f'source_{featureset}', + 'source', makeflags_featureset) + self.makefile.add_deps('source', [f'source_{featureset}']) + + makeflags = makeflags.copy() + makeflags['ALL_FEATURESETS'] = ' '.join(iter_featuresets(self.config)) + super().do_main_makefile(makeflags, extra) + + def do_main_packages(self, vars, makeflags, extra): + self.bundle.add('main', ('real', ), makeflags, vars) + + # Only build the metapackages if their names won't exactly match + # the packages they depend on + do_meta = self.config.merge('packages').get('meta', True) \ + and vars['source_suffix'] != '-' + vars['version'] + + if self.config.merge('packages').get('docs', True): + self.bundle.add('docs', ('real', ), makeflags, vars) + if do_meta: + self.bundle.add('docs.meta', ('real', ), makeflags, vars) + if self.config.merge('packages').get('source', True): + self.bundle.add('sourcebin', ('real', ), makeflags, vars) + if do_meta: + self.bundle.add('sourcebin.meta', ('real', ), makeflags, vars) + + def do_indep_featureset_setup(self, vars, makeflags, featureset, extra): + makeflags['LOCALVERSION'] = vars['localversion'] + kernel_arches = set() + for arch in iter(self.config['base', ]['arches']): + if self.config.get_merge('base', arch, featureset, None, + 'flavours'): + kernel_arches.add(self.config['base', arch]['kernel-arch']) + makeflags['ALL_KERNEL_ARCHES'] = ' '.join(sorted(list(kernel_arches))) + + vars['featureset_desc'] = '' + if featureset != 'none': + desc = self.config[('description', None, featureset)] + desc_parts = desc['parts'] + vars['featureset_desc'] = (' with the %s featureset' % + desc['part-short-%s' % desc_parts[0]]) + + def do_indep_featureset_packages(self, featureset, + vars, makeflags, extra): + self.bundle.add('headers.featureset', (featureset, 'real'), makeflags, vars) + + arch_makeflags = ( + ('kernel-arch', 'KERNEL_ARCH', False), + ) + + def do_arch_setup(self, vars, makeflags, arch, extra): + config_base = self.config.merge('base', arch) + + self._setup_makeflags(self.arch_makeflags, makeflags, config_base) + + try: + gnu_type = subprocess.check_output( + ['dpkg-architecture', '-f', '-a', arch, + '-q', 'DEB_HOST_GNU_TYPE'], + stderr=subprocess.DEVNULL, + encoding='utf-8') + except subprocess.CalledProcessError: + # This sometimes happens for the newest ports :-/ + print('W: Unable to get GNU type for %s' % arch, file=sys.stderr) + else: + vars['gnu-type-package'] = gnu_type.strip().replace('_', '-') + + def do_arch_packages(self, arch, vars, makeflags, + extra): + try: + abiname_part = '-%s' % self.config['abi', arch]['abiname'] + except KeyError: + abiname_part = self.abiname_part + makeflags['ABINAME'] = vars['abiname'] = \ + self.abiname_version + abiname_part + + if not self.disable_signed: + build_signed = self.config.merge('build', arch) \ + .get('signed-code', False) + else: + build_signed = False + + udeb_packages = self.installer_packages.get(arch, []) + if udeb_packages: + makeflags_local = makeflags.copy() + makeflags_local['PACKAGE_NAMES'] = ' '.join(p['Package'] for p in udeb_packages) + + for package in udeb_packages: + package.meta['rules-target'] = build_signed and 'udeb_test' or 'udeb' + + self.bundle.add_packages( + udeb_packages, + (arch, 'real'), + makeflags_local, arch=arch, check_packages=not build_signed, + ) + + if build_signed: + self.bundle.add('signed-template', (arch, 'real'), makeflags, vars, arch=arch) + + if self.config.merge('packages').get('libc-dev', True): + self.bundle.add('libc-dev', (arch, 'real'), makeflags, vars) + + if self.config['base', arch].get('featuresets') and \ + self.config.merge('packages').get('source', True): + self.bundle.add('config', (arch, 'real'), makeflags, vars) + + if self.config.merge('packages').get('tools-unversioned', True): + self.bundle.add('tools-unversioned', (arch, 'real'), makeflags, vars) + + if self.config.merge('packages').get('tools-versioned', True): + self.bundle.add('tools-versioned', (arch, 'real'), makeflags, vars) + + def do_featureset_setup(self, vars, makeflags, arch, featureset, extra): + vars['localversion_headers'] = vars['localversion'] + makeflags['LOCALVERSION_HEADERS'] = vars['localversion_headers'] + + self.default_flavour = self.config.merge('base', arch, featureset) \ + .get('default-flavour') + if self.default_flavour is not None: + if featureset != 'none': + raise RuntimeError("default-flavour set for %s %s," + " but must only be set for featureset none" + % (arch, featureset)) + if self.default_flavour \ + not in iter_flavours(self.config, arch, featureset): + raise RuntimeError("default-flavour %s for %s %s does not exist" + % (self.default_flavour, arch, featureset)) + + self.quick_flavour = self.config.merge('base', arch, featureset) \ + .get('quick-flavour') + + flavour_makeflags_base = ( + ('compiler', 'COMPILER', False), + ('compiler-filename', 'COMPILER', True), + ('kernel-arch', 'KERNEL_ARCH', False), + ('cflags', 'KCFLAGS', True), + ('override-host-type', 'OVERRIDE_HOST_TYPE', True), + ('cross-compile-compat', 'CROSS_COMPILE_COMPAT', True), + ) + + flavour_makeflags_build = ( + ('image-file', 'IMAGE_FILE', True), + ) + + flavour_makeflags_image = ( + ('install-stem', 'IMAGE_INSTALL_STEM', True), + ) + + flavour_makeflags_other = ( + ('localversion', 'LOCALVERSION', False), + ('localversion-image', 'LOCALVERSION_IMAGE', True), + ) + + def do_flavour_setup(self, vars, makeflags, arch, featureset, flavour, + extra): + config_base = self.config.merge('base', arch, featureset, flavour) + config_build = self.config.merge('build', arch, featureset, flavour) + config_description = self.config.merge('description', arch, featureset, + flavour) + config_image = self.config.merge('image', arch, featureset, flavour) + + vars['flavour'] = vars['localversion'][1:] + vars['class'] = config_description['hardware'] + vars['longclass'] = (config_description.get('hardware-long') + or vars['class']) + + vars['localversion-image'] = vars['localversion'] + override_localversion = config_image.get('override-localversion', None) + if override_localversion is not None: + vars['localversion-image'] = (vars['localversion_headers'] + '-' + + override_localversion) + vars['image-stem'] = config_image.get('install-stem') + + self._setup_makeflags(self.flavour_makeflags_base, makeflags, + config_base) + self._setup_makeflags(self.flavour_makeflags_build, makeflags, + config_build) + self._setup_makeflags(self.flavour_makeflags_image, makeflags, + config_image) + self._setup_makeflags(self.flavour_makeflags_other, makeflags, vars) + + def do_flavour_packages(self, arch, featureset, + flavour, vars, makeflags, extra): + ruleid = (arch, featureset, flavour, 'real') + + packages_headers = ( + self.bundle.add('headers', ruleid, makeflags, vars, arch=arch) + ) + assert len(packages_headers) == 1 + + do_meta = self.config.merge('packages').get('meta', True) + config_entry_base = self.config.merge('base', arch, featureset, + flavour) + config_entry_build = self.config.merge('build', arch, featureset, + flavour) + config_entry_description = self.config.merge('description', arch, + featureset, flavour) + config_entry_relations = self.config.merge('relations', arch, + featureset, flavour) + + def config_entry_image(key, *args, **kwargs): + return self.config.get_merge( + 'image', arch, featureset, flavour, key, *args, **kwargs) + + compiler = config_entry_base.get('compiler', 'gcc') + + # Work out dependency from linux-headers to compiler. Drop + # dependencies for cross-builds. Strip any remaining + # restrictions, as they don't apply to binary Depends. + relations_compiler_headers = PackageRelation( + self.substitute(config_entry_relations.get('headers%' + compiler) + or config_entry_relations.get(compiler), vars)) + relations_compiler_headers = PackageRelation( + PackageRelationGroup( + entry for entry in group + if not restriction_requires_profile(entry.restrictions, + 'cross')) + for group in relations_compiler_headers) + for group in relations_compiler_headers: + for entry in group: + entry.restrictions = [] + + relations_compiler_build_dep = PackageRelation( + self.substitute(config_entry_relations[compiler], vars)) + for group in relations_compiler_build_dep: + for item in group: + item.arches = [arch] + self.packages['source']['Build-Depends-Arch'].extend( + relations_compiler_build_dep) + + packages_own = [] + + if not self.disable_signed: + build_signed = config_entry_build.get('signed-code') + else: + build_signed = False + + vars.setdefault('desc', None) + + package_image = ( + self.bundle.add(build_signed and 'image-unsigned' or 'image', + ruleid, makeflags, vars, arch=arch) + )[0] + makeflags['IMAGE_PACKAGE_NAME'] = package_image['Package'] + + for field in ('Depends', 'Provides', 'Suggests', 'Recommends', + 'Conflicts', 'Breaks'): + package_image.setdefault(field).extend(PackageRelation( + config_entry_image(field.lower(), None), + override_arches=(arch,))) + + generators = config_entry_image('initramfs-generators') + group = PackageRelationGroup() + for i in generators: + i = config_entry_relations.get(i, i) + group.append(i) + a = PackageRelationEntry(i) + if a.operator is not None: + a.operator = -a.operator + package_image['Breaks'].append(PackageRelationGroup([a])) + for item in group: + item.arches = [arch] + package_image['Depends'].append(group) + + bootloaders = config_entry_image('bootloaders', None) + if bootloaders: + group = PackageRelationGroup() + for i in bootloaders: + i = config_entry_relations.get(i, i) + group.append(i) + a = PackageRelationEntry(i) + if a.operator is not None: + a.operator = -a.operator + package_image['Breaks'].append(PackageRelationGroup([a])) + for item in group: + item.arches = [arch] + package_image['Suggests'].append(group) + + desc_parts = self.config.get_merge('description', arch, featureset, + flavour, 'parts') + if desc_parts: + # XXX: Workaround, we need to support multiple entries of the same + # name + parts = list(set(desc_parts)) + parts.sort() + desc = package_image['Description'] + for part in parts: + desc.append(config_entry_description['part-long-' + part]) + desc.append_short(config_entry_description + .get('part-short-' + part, '')) + + packages_headers[0]['Depends'].extend(relations_compiler_headers) + packages_own.append(package_image) + packages_own.extend(packages_headers) + if extra.get('headers_arch_depends'): + extra['headers_arch_depends'].append('%s (= ${binary:Version})' % + packages_own[-1]['Package']) + + # The image meta-packages will depend on signed linux-image + # packages where applicable, so should be built from the + # signed source packages The header meta-packages will also be + # built along with the signed packages, to create a dependency + # relationship that ensures src:linux and src:linux-signed-* + # transition to testing together. + if do_meta and not build_signed: + packages_meta = ( + self.bundle.add('image.meta', ruleid, makeflags, vars, arch=arch) + ) + assert len(packages_meta) == 1 + packages_meta += ( + self.bundle.add('headers.meta', ruleid, makeflags, vars, arch=arch) + ) + assert len(packages_meta) == 2 + + if flavour == self.default_flavour \ + and not self.vars['source_suffix']: + packages_meta[0].setdefault('Provides') \ + .append('linux-image-generic') + packages_meta[1].setdefault('Provides') \ + .append('linux-headers-generic') + + packages_own.extend(packages_meta) + + if config_entry_build.get('vdso', False): + makeflags['VDSO'] = True + + packages_own.extend( + self.bundle.add('image-dbg', ruleid, makeflags, vars, arch=arch) + ) + if do_meta: + packages_own.extend( + self.bundle.add('image-dbg.meta', ruleid, makeflags, vars, arch=arch) + ) + + # In a quick build, only build the quick flavour (if any). + if flavour != self.quick_flavour: + for package in packages_own: + add_package_build_restriction(package, '!pkg.linux.quick') + + # Make sure signed-template is build after linux + if build_signed: + self.makefile.add_deps(f'build-arch_{arch}_real_signed-template', + [f'build-arch_{arch}_{featureset}_{flavour}_real']) + self.makefile.add_deps(f'binary-arch_{arch}_real_signed-template', + [f'binary-arch_{arch}_{featureset}_{flavour}_real']) + + # Make sure udeb is build after linux + self.makefile.add_deps(f'build-arch_{arch}_real_udeb', + [f'build-arch_{arch}_{featureset}_{flavour}_real']) + self.makefile.add_deps(f'binary-arch_{arch}_real_udeb', + [f'binary-arch_{arch}_{featureset}_{flavour}_real']) + + tests_control = self.templates.get_tests_control('image.tests-control', vars)[0] + tests_control['Depends'].append( + PackageRelationGroup(package_image['Package'], + override_arches=(arch,))) + if self.tests_control_image: + self.tests_control_image['Depends'].extend( + tests_control['Depends']) + else: + self.tests_control_image = tests_control + self.tests_control.append(tests_control) + + if flavour == (self.quick_flavour or self.default_flavour): + if not self.tests_control_headers: + self.tests_control_headers = \ + self.templates.get_tests_control('headers.tests-control', vars)[0] + self.tests_control.append(self.tests_control_headers) + self.tests_control_headers['Architecture'].add(arch) + self.tests_control_headers['Depends'].append( + PackageRelationGroup(packages_headers[0]['Package'], + override_arches=(arch,))) + + def get_config(*entry_name): + entry_real = ('image',) + entry_name + entry = self.config.get(entry_real, None) + if entry is None: + return None + return entry.get('configs', None) + + def check_config_default(fail, f): + for d in self.config_dirs[::-1]: + f1 = d + '/' + f + if os.path.exists(f1): + return [f1] + if fail: + raise RuntimeError("%s unavailable" % f) + return [] + + def check_config_files(files): + ret = [] + for f in files: + for d in self.config_dirs[::-1]: + f1 = d + '/' + f + if os.path.exists(f1): + ret.append(f1) + break + else: + raise RuntimeError("%s unavailable" % f) + return ret + + def check_config(default, fail, *entry_name): + configs = get_config(*entry_name) + if configs is None: + return check_config_default(fail, default) + return check_config_files(configs) + + kconfig = check_config('config', True) + # XXX: We have no way to override kernelarch-X configs + kconfig.extend(check_config_default(False, + "kernelarch-%s/config" % config_entry_base['kernel-arch'])) + kconfig.extend(check_config("%s/config" % arch, True, arch)) + kconfig.extend(check_config("%s/config.%s" % (arch, flavour), False, + arch, None, flavour)) + kconfig.extend(check_config("featureset-%s/config" % featureset, False, + None, featureset)) + kconfig.extend(check_config("%s/%s/config" % (arch, featureset), False, + arch, featureset)) + kconfig.extend(check_config("%s/%s/config.%s" % + (arch, featureset, flavour), False, + arch, featureset, flavour)) + makeflags['KCONFIG'] = ' '.join(kconfig) + makeflags['KCONFIG_OPTIONS'] = '' + if build_signed: + makeflags['KCONFIG_OPTIONS'] += ' -o SECURITY_LOCKDOWN_LSM=y -o MODULE_SIG=y' + # Add "salt" to fix #872263 + makeflags['KCONFIG_OPTIONS'] += \ + ' -o "BUILD_SALT=\\"%(abiname)s%(localversion)s\\""' % vars + if config_entry_build.get('trusted-certs'): + makeflags['KCONFIG_OPTIONS'] += \ + f' -o "SYSTEM_TRUSTED_KEYS=\\"${{CURDIR}}/{config_entry_build["trusted-certs"]}\\""' + + merged_config = ('debian/build/config.%s_%s_%s' % + (arch, featureset, flavour)) + self.makefile.add_cmds(merged_config, + ["$(MAKE) -f debian/rules.real %s %s" % + (merged_config, makeflags)]) + + def process_changelog(self): + version = self.version = self.changelog[0].version + self.abiname_part = '-%s' % self.config['abi', ]['abiname'] + # We need to keep at least three version components to avoid + # userland breakage (e.g. #742226, #745984). + self.abiname_version = re.sub(r'^(\d+\.\d+)(?=-|$)', r'\1.0', + self.version.linux_version) + self.vars = { + 'upstreamversion': self.version.linux_upstream, + 'version': self.version.linux_version, + 'source_basename': re.sub(r'-[\d.]+$', '', + self.changelog[0].source), + 'source_upstream': self.version.upstream, + 'source_package': self.changelog[0].source, + 'abiname': self.abiname_version + self.abiname_part, + } + self.vars['source_suffix'] = \ + self.changelog[0].source[len(self.vars['source_basename']):] + self.config['version', ] = {'source': self.version.complete, + 'upstream': self.version.linux_upstream, + 'abiname_base': self.abiname_version, + 'abiname': (self.abiname_version + + self.abiname_part)} + + distribution = self.changelog[0].distribution + if distribution in ('unstable', ): + if version.linux_revision_experimental or \ + version.linux_revision_backports or \ + version.linux_revision_other: + raise RuntimeError("Can't upload to %s with a version of %s" % + (distribution, version)) + if distribution in ('experimental', ): + if not version.linux_revision_experimental: + raise RuntimeError("Can't upload to %s with a version of %s" % + (distribution, version)) + if distribution.endswith('-security') or distribution.endswith('-lts'): + if version.linux_revision_backports or \ + version.linux_revision_other: + raise RuntimeError("Can't upload to %s with a version of %s" % + (distribution, version)) + if distribution.endswith('-backports'): + if not version.linux_revision_backports: + raise RuntimeError("Can't upload to %s with a version of %s" % + (distribution, version)) + + def write(self): + self.write_config() + super().write() + self.write_tests_control() + + def write_config(self): + f = open("debian/config.defines.dump", 'wb') + self.config.dump(f) + f.close() + + def write_tests_control(self): + self.write_rfc822(open("debian/tests/control", 'w'), + self.tests_control) + + +if __name__ == '__main__': + Gencontrol()() diff --git a/debian/bin/gencontrol_signed.py b/debian/bin/gencontrol_signed.py new file mode 100755 index 000000000..5a5d7f2de --- /dev/null +++ b/debian/bin/gencontrol_signed.py @@ -0,0 +1,351 @@ +#!/usr/bin/python3 + +import hashlib +import json +import os.path +import pathlib +import re +import ssl +import subprocess +import sys + +from debian_linux.config import ConfigCoreDump +from debian_linux.debian import VersionLinux, BinaryPackage +from debian_linux.gencontrol import Gencontrol as Base, \ + iter_flavours, PackagesBundle +from debian_linux.utils import Templates + + +class Gencontrol(Base): + def __init__(self, arch): + super(Gencontrol, self).__init__( + ConfigCoreDump(fp=open('debian/config.defines.dump', 'rb')), + Templates(['debian/signing_templates', 'debian/templates'])) + + image_binary_version = self.changelog[0].version.complete + + config_entry = self.config[('version',)] + self.version = VersionLinux(config_entry['source']) + + # Check config version matches changelog version + assert self.version.complete == re.sub(r'\+b\d+$', r'', + image_binary_version) + + self.abiname = config_entry['abiname'] + self.vars = { + 'upstreamversion': self.version.linux_upstream, + 'version': self.version.linux_version, + 'source_basename': re.sub(r'-[\d.]+$', '', + self.changelog[0].source), + 'source_upstream': self.version.upstream, + 'abiname': self.abiname, + 'imagebinaryversion': image_binary_version, + 'imagesourceversion': self.version.complete, + 'arch': arch, + } + self.vars['source_suffix'] = \ + self.changelog[0].source[len(self.vars['source_basename']):] + self.vars['template'] = \ + 'linux-image%(source_suffix)s-%(arch)s-signed-template' % self.vars + + self.package_dir = 'debian/%(template)s' % self.vars + self.template_top_dir = (self.package_dir + + '/usr/share/code-signing/%(template)s' + % self.vars) + self.template_debian_dir = (self.template_top_dir + + '/source-template/debian') + os.makedirs(self.template_debian_dir, exist_ok=True) + + self.image_packages = [] + + # We need a separate base dir for now + self.bundles = {None: PackagesBundle(None, self.templates, + pathlib.Path(self.template_debian_dir))} + self.packages = self.bundle.packages + self.makefile = self.bundle.makefile + + def do_main_setup(self, vars, makeflags, extra): + makeflags['VERSION'] = self.version.linux_version + makeflags['GENCONTROL_ARGS'] = ( + '-v%(imagebinaryversion)s ' + '-DBuilt-Using="%(source_basename)s%(source_suffix)s (= %(imagesourceversion)s)"' % + vars) + makeflags['PACKAGE_VERSION'] = vars['imagebinaryversion'] + + self.installer_packages = {} + + if os.getenv('DEBIAN_KERNEL_DISABLE_INSTALLER'): + if self.changelog[0].distribution == 'UNRELEASED': + import warnings + warnings.warn('Disable installer modules on request ' + '(DEBIAN_KERNEL_DISABLE_INSTALLER set)') + else: + raise RuntimeError( + 'Unable to disable installer modules in release build ' + '(DEBIAN_KERNEL_DISABLE_INSTALLER set)') + elif self.config.merge('packages').get('installer', True): + # Add udebs using kernel-wedge + kw_env = os.environ.copy() + kw_env['KW_DEFCONFIG_DIR'] = 'debian/installer' + kw_env['KW_CONFIG_DIR'] = 'debian/installer' + kw_proc = subprocess.Popen( + ['kernel-wedge', 'gen-control', vars['abiname']], + stdout=subprocess.PIPE, + text=True, + env=kw_env) + udeb_packages = BinaryPackage.read_rfc822(kw_proc.stdout) + kw_proc.wait() + if kw_proc.returncode != 0: + raise RuntimeError('kernel-wedge exited with code %d' % + kw_proc.returncode) + + for package in udeb_packages: + for arch in package['Architecture']: + if self.config.merge('build', arch) \ + .get('signed-code', False): + self.installer_packages.setdefault(arch, []) \ + .append(package) + + def do_main_packages(self, vars, makeflags, extra): + # Assume that arch:all packages do not get binNMU'd + self.packages['source']['Build-Depends'].append( + 'linux-support-%(abiname)s (= %(imagesourceversion)s)' % vars) + + def do_main_recurse(self, vars, makeflags, extra): + # Each signed source package only covers a single architecture + self.do_arch(vars['arch'], vars.copy(), + makeflags.copy(), extra) + + def do_extra(self): + pass + + def do_arch_setup(self, vars, makeflags, arch, extra): + super(Gencontrol, self).do_main_setup(vars, makeflags, extra) + + abiname_part = '-%s' % self.config.merge('abi', arch)['abiname'] + makeflags['ABINAME'] = vars['abiname'] = \ + self.config['version', ]['abiname_base'] + abiname_part + + def do_arch_packages(self, arch, vars, makeflags, extra): + udeb_packages = self.installer_packages.get(arch, []) + + if udeb_packages: + makeflags_local = makeflags.copy() + makeflags_local['PACKAGE_NAMES'] = ' '.join(p['Package'] for p in udeb_packages) + + for package in udeb_packages: + package.meta['rules-target'] = 'udeb' + + self.bundle.add_packages( + udeb_packages, + (arch, 'real'), + makeflags_local, arch=arch, + ) + + def do_featureset_setup(self, vars, makeflags, arch, featureset, extra): + self.default_flavour = self.config.merge('base', arch, featureset) \ + .get('default-flavour') + if self.default_flavour is not None: + if featureset != 'none': + raise RuntimeError("default-flavour set for %s %s," + " but must only be set for featureset none" + % (arch, featureset)) + if self.default_flavour \ + not in iter_flavours(self.config, arch, featureset): + raise RuntimeError("default-flavour %s for %s %s does not exist" + % (self.default_flavour, arch, featureset)) + + self.quick_flavour = self.config.merge('base', arch, featureset) \ + .get('quick-flavour') + + def do_flavour_setup(self, vars, makeflags, arch, featureset, flavour, + extra): + super(Gencontrol, self).do_flavour_setup(vars, makeflags, arch, + featureset, flavour, extra) + + config_description = self.config.merge('description', arch, featureset, + flavour) + config_image = self.config.merge('image', arch, featureset, flavour) + + vars['flavour'] = vars['localversion'][1:] + vars['class'] = config_description['hardware'] + vars['longclass'] = (config_description.get('hardware-long') + or vars['class']) + + vars['image-stem'] = config_image.get('install-stem') + makeflags['IMAGE_INSTALL_STEM'] = vars['image-stem'] + + def do_flavour_packages(self, arch, featureset, + flavour, vars, makeflags, extra): + ruleid = (arch, featureset, flavour, 'real') + + config_build = self.config.merge('build', arch, featureset, flavour) + if not config_build.get('signed-code', False): + return + + # In a quick build, only build the quick flavour (if any). + if 'pkg.linux.quick' in \ + os.environ.get('DEB_BUILD_PROFILES', '').split() \ + and flavour != self.quick_flavour: + return + + image_suffix = '%(abiname)s%(localversion)s' % vars + image_package_name = 'linux-image-%s-unsigned' % image_suffix + + # Verify that this flavour is configured to support Secure Boot, + # and get the trusted certificates filename. + with open('debian/%s/boot/config-%s' % + (image_package_name, image_suffix)) as f: + kconfig = f.readlines() + assert 'CONFIG_EFI_STUB=y\n' in kconfig + assert 'CONFIG_LOCK_DOWN_IN_EFI_SECURE_BOOT=y\n' in kconfig + cert_file_name = config_build['trusted-certs'] + self.image_packages.append((image_suffix, image_package_name, + cert_file_name)) + + self.packages['source']['Build-Depends'].append( + image_package_name + + ' (= %(imagebinaryversion)s) [%(arch)s]' % vars) + + packages_own = ( + self.bundle.add('image', + ruleid, makeflags, vars, arch=arch) + ) + + if self.config.merge('packages').get('meta', True): + packages_meta = ( + self.bundle.add('image.meta', ruleid, makeflags, vars, arch=arch) + ) + assert len(packages_meta) == 1 + packages_meta += ( + self.bundle.add('headers.meta', ruleid, makeflags, vars, arch=arch) + ) + assert len(packages_meta) == 2 + + # Don't pretend to support build-profiles + for package in packages_meta: + del package['Build-Profiles'] + + if flavour == self.default_flavour \ + and not self.vars['source_suffix']: + packages_meta[0].setdefault('Provides') \ + .append('linux-image-generic') + packages_meta[1].setdefault('Provides') \ + .append('linux-headers-generic') + + packages_own.extend(packages_meta) + + def write(self): + self.bundle.extract_makefile() + self.write_changelog() + self.write_control(name=(self.template_debian_dir + '/control')) + self.write_makefile(name=(self.template_debian_dir + '/rules.gen')) + self.write_files_json() + self.write_source_lintian_overrides() + + def write_changelog(self): + # Copy the linux changelog, but: + # * Change the source package name and version + # * Insert a line to refer to refer to the linux source version + vars = self.vars.copy() + vars['source'] = self.changelog[0].source + vars['distribution'] = self.changelog[0].distribution + vars['urgency'] = self.changelog[0].urgency + vars['signedsourceversion'] = \ + re.sub(r'\+b(\d+)$', r'.b\1', + re.sub(r'-', r'+', vars['imagebinaryversion'])) + + with open(self.template_debian_dir + '/changelog', 'w', + encoding='utf-8') as f: + f.write(self.substitute('''\ +linux-signed@source_suffix@-@arch@ (@signedsourceversion@) @distribution@; urgency=@urgency@ + + * Sign kernel from @source@ @imagebinaryversion@ + +''', + vars)) + + with open('debian/changelog', 'r', encoding='utf-8') \ + as changelog_in: + # Ignore first two header lines + changelog_in.readline() + changelog_in.readline() + + for d in changelog_in.read(): + f.write(d) + + def write_files_json(self): + # Can't raise from a lambda function :-( + def raise_func(e): + raise e + + # Some functions in openssl work with multiple concatenated + # PEM-format certificates, but others do not. + def get_certs(file_name): + certs = [] + BEGIN, MIDDLE = 0, 1 + state = BEGIN + with open(file_name) as f: + for line in f: + if line == '-----BEGIN CERTIFICATE-----\n': + assert state == BEGIN + certs.append([]) + state = MIDDLE + elif line == '-----END CERTIFICATE-----\n': + assert state == MIDDLE + state = BEGIN + else: + assert line[0] != '-' + assert state == MIDDLE + certs[-1].append(line) + assert state == BEGIN + return [''.join(cert_lines) for cert_lines in certs] + + def get_cert_fingerprint(cert, algo): + hasher = hashlib.new(algo) + hasher.update(ssl.PEM_cert_to_DER_cert(cert)) + return hasher.hexdigest() + + all_files = {'packages': {}} + + for image_suffix, image_package_name, cert_file_name in \ + self.image_packages: + package_dir = 'debian/%s' % image_package_name + package_files = [] + package_modules = [] + package_files.append({'sig_type': 'efi', + 'file': 'boot/vmlinuz-%s' % image_suffix}) + for root, dirs, files in os.walk('%s/lib/modules' % package_dir, + onerror=raise_func): + for name in files: + if name.endswith('.ko'): + package_modules.append( + '%s/%s' % + (root[(len(package_dir) + 1):], name)) + package_modules.sort() + for module in package_modules: + package_files.append( + {'sig_type': 'linux-module', + 'file': module}) + package_certs = [get_cert_fingerprint(cert, 'sha256') + for cert in get_certs(cert_file_name)] + assert len(package_certs) >= 1 + all_files['packages'][image_package_name] = { + 'trusted_certs': package_certs, + 'files': package_files + } + + with open(self.template_top_dir + '/files.json', 'w') as f: + json.dump(all_files, f) + + def write_source_lintian_overrides(self): + os.makedirs(os.path.join(self.template_debian_dir, 'source'), + exist_ok=True) + with open(os.path.join(self.template_debian_dir, + 'source/lintian-overrides'), 'w') as f: + f.write(self.substitute(self.templates.get('source.lintian-overrides'), + self.vars)) + + +if __name__ == '__main__': + Gencontrol(sys.argv[1])() diff --git a/debian/bin/genorig.py b/debian/bin/genorig.py new file mode 100755 index 000000000..9bf43a34e --- /dev/null +++ b/debian/bin/genorig.py @@ -0,0 +1,169 @@ +#!/usr/bin/python3 + +import sys +from debian import deb822 +import glob +import os +import os.path +import shutil +import subprocess +import time +import warnings + +from debian_linux.debian import Changelog, VersionLinux + + +class Main(object): + def __init__(self, input_repo, override_version): + self.log = sys.stdout.write + + self.input_repo = input_repo + + changelog = Changelog(version=VersionLinux)[0] + source = changelog.source + version = changelog.version + + if override_version: + version = VersionLinux('%s-0' % override_version) + + self.version_dfsg = version.linux_dfsg + if self.version_dfsg is None: + self.version_dfsg = '0' + + self.log('Using source name %s, version %s, dfsg %s\n' % + (source, version.upstream, self.version_dfsg)) + + self.orig = '%s-%s' % (source, version.upstream) + self.orig_tar = '%s_%s.orig.tar.xz' % (source, version.upstream) + self.tag = 'v' + version.linux_upstream_full + + def __call__(self): + import tempfile + temp_dir = tempfile.mkdtemp(prefix='genorig', dir='debian') + old_umask = os.umask(0o022) + try: + # When given a remote repo, we need a local copy. + if not self.input_repo.startswith('/') and ':' in self.input_repo: + temp_repo = os.path.join(temp_dir, 'git') + subprocess.run( + ['git', 'clone', '--bare', '--depth=1', '-b', self.tag, + self.input_repo, temp_repo], + check=True) + self.input_repo = temp_repo + + self.dir = os.path.join(temp_dir, 'export') + os.mkdir(self.dir) + self.upstream_export(self.input_repo) + + # exclude_files() will change dir mtimes. Capture the + # original release time so we can apply it to the final + # tarball. + orig_date = time.strftime( + "%a, %d %b %Y %H:%M:%S +0000", + time.gmtime( + os.stat(os.path.join(self.dir, self.orig, 'Makefile')) + .st_mtime)) + + self.exclude_files() + os.umask(old_umask) + self.tar(orig_date) + finally: + os.umask(old_umask) + shutil.rmtree(temp_dir) + + def upstream_export(self, input_repo): + self.log("Exporting %s from %s\n" % (self.tag, input_repo)) + + gpg_wrapper = os.path.join(os.getcwd(), + "debian/bin/git-tag-gpg-wrapper") + verify_proc = subprocess.Popen(['git', + '-c', 'gpg.program=%s' % gpg_wrapper, + 'tag', '-v', self.tag], + cwd=input_repo) + if verify_proc.wait(): + raise RuntimeError("GPG tag verification failed") + + archive_proc = subprocess.Popen(['git', 'archive', '--format=tar', + '--prefix=%s/' % self.orig, self.tag], + cwd=input_repo, + stdout=subprocess.PIPE) + extract_proc = subprocess.Popen(['tar', '-xaf', '-'], cwd=self.dir, + stdin=archive_proc.stdout) + + ret1 = archive_proc.wait() + ret2 = extract_proc.wait() + if ret1 or ret2: + raise RuntimeError("Can't create archive") + + def exclude_files(self): + self.log("Excluding file patterns specified in debian/copyright\n") + with open("debian/copyright") as f: + header = deb822.Deb822(f) + patterns = header.get("Files-Excluded", '').strip().split() + for pattern in patterns: + matched = False + for name in glob.glob(os.path.join(self.dir, self.orig, pattern)): + try: + shutil.rmtree(name) + except NotADirectoryError: + os.unlink(name) + matched = True + if not matched: + warnings.warn("Exclusion pattern '%s' did not match anything" + % pattern, + RuntimeWarning) + + def tar(self, orig_date): + out = os.path.join("../orig", self.orig_tar) + try: + os.mkdir("../orig") + except OSError: + pass + try: + os.stat(out) + raise RuntimeError("Destination already exists") + except OSError: + pass + self.log("Generate tarball %s\n" % out) + + env = os.environ.copy() + env.update({ + 'LC_ALL': 'C', + }) + cmd = [ + 'tar', + '-C', self.dir, + '--sort=name', + '--mtime={}'.format(orig_date), + '--owner=root', + '--group=root', + '--use-compress-program=xz -T0', + '-cf', + out, self.orig, + ] + + try: + subprocess.run(cmd, env=env, check=True) + os.chmod(out, 0o644) + except BaseException: + try: + os.unlink(out) + except OSError: + pass + raise + try: + os.symlink(os.path.join('orig', self.orig_tar), + os.path.join('..', self.orig_tar)) + except OSError: + pass + + +if __name__ == '__main__': + from optparse import OptionParser + parser = OptionParser(usage="%prog [OPTION]... REPO") + parser.add_option("-V", "--override-version", dest="override_version", + help="Override version", metavar="VERSION") + options, args = parser.parse_args() + + assert len(args) == 1 + Main(args[0], options.override_version)() diff --git a/debian/bin/genpatch-lockdown b/debian/bin/genpatch-lockdown new file mode 100755 index 000000000..1aed0c735 --- /dev/null +++ b/debian/bin/genpatch-lockdown @@ -0,0 +1,109 @@ +#!/usr/bin/python3 + +import io +import os +import os.path +import re +import subprocess +import sys + + +def main(repo, range='torvalds/master..dhowells/efi-lock-down'): + patch_dir = 'debian/patches' + lockdown_patch_dir = 'features/all/lockdown' + series_name = 'series' + + # Only replace patches in this subdirectory and starting with a digit + # - the others are presumably Debian-specific for now + lockdown_patch_name_re = re.compile( + r'^' + re.escape(lockdown_patch_dir) + r'/\d') + series_before = [] + series_after = [] + + old_series = set() + new_series = set() + + try: + with open(os.path.join(patch_dir, series_name), 'r') as series_fh: + for line in series_fh: + name = line.strip() + if lockdown_patch_name_re.match(name): + old_series.add(name) + elif len(old_series) == 0: + series_before.append(line) + else: + series_after.append(line) + except FileNotFoundError: + pass + + with open(os.path.join(patch_dir, series_name), 'w') as series_fh: + for line in series_before: + series_fh.write(line) + + # Add directory prefix to all filenames. + # Add Origin to all patch headers. + def add_patch(name, source_patch, origin): + name = os.path.join(lockdown_patch_dir, name) + path = os.path.join(patch_dir, name) + try: + os.unlink(path) + except FileNotFoundError: + pass + with open(path, 'w') as patch: + in_header = True + for line in source_patch: + if in_header and re.match(r'^(\n|[^\w\s]|Index:)', line): + patch.write('Origin: %s\n' % origin) + if line != '\n': + patch.write('\n') + in_header = False + patch.write(line) + series_fh.write(name) + series_fh.write('\n') + new_series.add(name) + + # XXX No signature to verify + + env = os.environ.copy() + env['GIT_DIR'] = os.path.join(repo, '.git') + args = ['git', 'format-patch', '--subject-prefix=', range] + format_proc = subprocess.Popen(args, + cwd=os.path.join(patch_dir, + lockdown_patch_dir), + env=env, stdout=subprocess.PIPE) + with io.open(format_proc.stdout.fileno(), encoding='utf-8') as pipe: + for line in pipe: + name = line.strip('\n') + with open(os.path.join(patch_dir, lockdown_patch_dir, name)) \ + as source_patch: + patch_from = source_patch.readline() + match = re.match(r'From ([0-9a-f]{40}) ', patch_from) + assert match + origin = ('https://git.kernel.org/pub/scm/linux/kernel/' + 'git/dhowells/linux-fs.git/commit?id=%s' % + match.group(1)) + add_patch(name, source_patch, origin) + + for line in series_after: + series_fh.write(line) + + for name in new_series: + if name in old_series: + old_series.remove(name) + else: + print('Added patch', os.path.join(patch_dir, name)) + + for name in old_series: + print('Obsoleted patch', os.path.join(patch_dir, name)) + + +if __name__ == '__main__': + if not (2 <= len(sys.argv) <= 3): + sys.stderr.write('''\ +Usage: %s REPO [REVISION-RANGE] +REPO is a git repo containing the REVISION-RANGE. The default range is +torvalds/master..dhowells/efi-lock-down. +''' % sys.argv[0]) + print('BASE is the base branch (default: torvalds/master).') + sys.exit(2) + main(*sys.argv[1:]) diff --git a/debian/bin/genpatch-rt b/debian/bin/genpatch-rt new file mode 100755 index 000000000..66affb076 --- /dev/null +++ b/debian/bin/genpatch-rt @@ -0,0 +1,160 @@ +#!/usr/bin/python3 + +import argparse +import io +import os +import os.path +import re +import shutil +import subprocess +import sys +import tempfile + + +def main(source, version, verify_signature): + patch_dir = 'debian/patches-rt' + series_name = 'series' + old_series = set() + new_series = set() + + try: + with open(os.path.join(patch_dir, series_name), 'r') as series_fh: + for line in series_fh: + name = line.strip() + if name != '' and name[0] != '#': + old_series.add(name) + except FileNotFoundError: + pass + + with open(os.path.join(patch_dir, series_name), 'w') as series_fh: + # Add Origin to all patch headers. + def add_patch(name, source_patch, origin): + path = os.path.join(patch_dir, name) + try: + os.unlink(path) + except FileNotFoundError: + pass + with open(path, 'w') as patch: + in_header = True + for line in source_patch: + if in_header and re.match(r'^(\n|[^\w\s]|Index:)', line): + patch.write('Origin: %s\n' % origin) + if line != '\n': + patch.write('\n') + in_header = False + patch.write(line) + new_series.add(name) + + if os.path.isdir(os.path.join(source, '.git')): + # Export rebased branch from stable-rt git as patch series + up_ver = re.sub(r'-rt\d+$', '', version) + env = os.environ.copy() + env['GIT_DIR'] = os.path.join(source, '.git') + env['DEBIAN_KERNEL_KEYRING'] = 'rt-signing-key.pgp' + + if verify_signature: + # Validate tag signature + gpg_wrapper = os.path.join(os.getcwd(), + "debian/bin/git-tag-gpg-wrapper") + verify_proc = subprocess.Popen( + ['git', '-c', 'gpg.program=%s' % gpg_wrapper, + 'tag', '-v', 'v%s-rebase' % version], + env=env) + if verify_proc.wait(): + raise RuntimeError("GPG tag verification failed") + + args = ['git', 'format-patch', + 'v%s..v%s-rebase' % (up_ver, version)] + format_proc = subprocess.Popen(args, + cwd=patch_dir, + env=env, stdout=subprocess.PIPE) + with io.open(format_proc.stdout.fileno(), encoding='utf-8') \ + as pipe: + for line in pipe: + name = line.strip('\n') + with open(os.path.join(patch_dir, name)) as source_patch: + patch_from = source_patch.readline() + match = re.match(r'From ([0-9a-f]{40}) ', patch_from) + assert match + origin = ('https://git.kernel.org/cgit/linux/kernel/' + 'git/rt/linux-stable-rt.git/commit?id=%s' % + match.group(1)) + add_patch(name, source_patch, origin) + series_fh.write(line) + + else: + # Get version and upstream version + if version is None: + match = re.search(r'(?:^|/)patches-(.+)\.tar\.[gx]z$', source) + assert match, 'no version specified or found in filename' + version = match.group(1) + match = re.match(r'^(\d+\.\d+)(?:\.\d+|-rc\d+)?-rt\d+$', version) + assert match, 'could not parse version string' + up_ver = match.group(1) + + if verify_signature: + # Expect an accompanying signature, and validate it + source_sig = re.sub(r'.[gx]z$', '.sign', source) + unxz_proc = subprocess.Popen(['xzcat', source], + stdout=subprocess.PIPE) + verify_output = subprocess.check_output( + ['gpgv', '--status-fd', '1', + '--keyring', 'debian/upstream/rt-signing-key.pgp', + '--ignore-time-conflict', source_sig, '-'], + stdin=unxz_proc.stdout, + text=True) + if unxz_proc.wait() or \ + not re.search(r'^\[GNUPG:\]\s+VALIDSIG\s', + verify_output, re.MULTILINE): + sys.stderr.write(verify_output) + raise RuntimeError("GPG signature verification failed") + + temp_dir = tempfile.mkdtemp(prefix='rt-genpatch', dir='debian') + try: + # Unpack tarball + subprocess.check_call(['tar', '-C', temp_dir, '-xaf', source]) + source_dir = os.path.join(temp_dir, 'patches') + assert os.path.isdir(source_dir), \ + 'tarball does not contain patches directory' + + # Copy patch series + origin = ('https://www.kernel.org/pub/linux/kernel/projects/' + 'rt/%s/older/patches-%s.tar.xz' % + (up_ver, version)) + with open(os.path.join(source_dir, 'series'), 'r') \ + as source_series_fh: + for line in source_series_fh: + name = line.strip() + if name != '' and name[0] != '#': + with open(os.path.join(source_dir, name)) \ + as source_patch: + add_patch(name, source_patch, origin) + series_fh.write(line) + finally: + shutil.rmtree(temp_dir) + + for name in new_series: + if name in old_series: + old_series.remove(name) + else: + print('Added patch', os.path.join(patch_dir, name)) + + for name in old_series: + print('Obsoleted patch', os.path.join(patch_dir, name)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description='Generate or update the rt featureset patch series') + parser.add_argument( + 'source', metavar='SOURCE', type=str, + help='tarball of patches or git repo containing the given RT-VERSION') + parser.add_argument( + 'version', metavar='RT-VERSION', type=str, nargs='?', + help='rt kernel version (optional for tarballs)') + parser.add_argument( + '--verify-signature', action=argparse.BooleanOptionalAction, + default=True, + help='verify signature on tarball (detached in .sign file) or git tag') + args = parser.parse_args() + main(args.source, args.version, args.verify_signature) diff --git a/debian/bin/getconfig.py b/debian/bin/getconfig.py new file mode 100755 index 000000000..b719a17a1 --- /dev/null +++ b/debian/bin/getconfig.py @@ -0,0 +1,25 @@ +#!/usr/bin/python3 + +import sys + +from debian_linux.config import ConfigCoreDump + +section = tuple(s or None for s in sys.argv[1:-1]) +key = sys.argv[-1] +config = ConfigCoreDump(fp=open("debian/config.defines.dump", "rb")) +try: + value = config[section][key] +except KeyError: + sys.exit(1) + +if isinstance(value, str): + # Don't iterate over it + print(value) +else: + # In case it's a sequence, try printing each item + try: + for item in value: + print(item) + except TypeError: + # Otherwise use the default format + print(value) diff --git a/debian/bin/git-tag-gpg-wrapper b/debian/bin/git-tag-gpg-wrapper new file mode 100755 index 000000000..43030206f --- /dev/null +++ b/debian/bin/git-tag-gpg-wrapper @@ -0,0 +1,42 @@ +#!/bin/bash -e + +# Instead of calling gpg, call gpgv and provide a local keyring + +debian_dir="$(readlink -f "$(dirname "$0")/..")" + +# Parse the expected options. If the next two lines are combined, a +# failure of getopt won't cause the script to exit. +ordered_args="$(getopt -n "$0" -o "" -l "status-fd:" -l "keyid-format:" -l "verify" -- "$@")" +eval "set -- $ordered_args" +gpgv_opts=() +while true; do + case "$1" in + --status-fd) + gpgv_opts+=(--status-fd $2) + shift 2 + ;; + --keyid-format) + # ignore + shift 2 + ;; + --verify) + # ignore + shift 1 + ;; + --) + shift 1 + break + ;; + esac +done + +keyring="$debian_dir/upstream/${DEBIAN_KERNEL_KEYRING:-signing-key.asc}" +case "$keyring" in + *.asc) + keyring_armored="$keyring" + keyring="$(mktemp)" + trap 'rm -f "$keyring"' EXIT + gpg --dearmor <"$keyring_armored" > "$keyring" + ;; +esac +gpgv "${gpgv_opts[@]}" --keyring "$keyring" -- "$@" diff --git a/debian/bin/kconfig.py b/debian/bin/kconfig.py new file mode 100755 index 000000000..6115355f4 --- /dev/null +++ b/debian/bin/kconfig.py @@ -0,0 +1,39 @@ +#!/usr/bin/python3 + +import optparse +import re + +from debian_linux.kconfig import KconfigFile + + +def merge(output, configs, overrides): + kconfig = KconfigFile() + for c in configs: + kconfig.read(open(c)) + for key, value in overrides.items(): + kconfig.set(key, value) + open(output, "w").write(str(kconfig)) + + +def opt_callback_dict(option, opt, value, parser): + match = re.match(r'^\s*(\S+)=(\S+)\s*$', value) + if not match: + raise optparse.OptionValueError('not key=value') + dest = option.dest + data = getattr(parser.values, dest) + data[match.group(1)] = match.group(2) + + +if __name__ == '__main__': + parser = optparse.OptionParser(usage="%prog [OPTION]... FILE...") + parser.add_option( + '-o', '--override', + action='callback', + callback=opt_callback_dict, + default={}, + dest='overrides', + help="Override option", + type='string') + options, args = parser.parse_args() + + merge(args[0], args[1:], options.overrides) diff --git a/debian/bin/no-depmod b/debian/bin/no-depmod new file mode 100755 index 000000000..ed5a8463f --- /dev/null +++ b/debian/bin/no-depmod @@ -0,0 +1,18 @@ +#!/bin/sh + +set -e + +# This is a dummy substitute for depmod. Since we run depmod during +# postinst, we do not need or want to package the files that it +# generates. + +if [ "x$1" = x-V ]; then + # Satisfy version test + echo 'not really module-init-tools' +elif [ "x$1" = x-b -a "${2%/depmod.??????}" != "$2" ]; then + # Satisfy test of short kernel versions + mkdir -p "$2/lib/modules/$3" + touch "$2/lib/modules/$3/modules.dep" +else + echo 'skipping depmod' +fi diff --git a/debian/bin/stable-update b/debian/bin/stable-update new file mode 100755 index 000000000..0ce6112bb --- /dev/null +++ b/debian/bin/stable-update @@ -0,0 +1,135 @@ +#!/usr/bin/python3 + +import sys +import os +import re +import subprocess + +from debian_linux.debian import Changelog, VersionLinux + + +def base_version(ver): + # Assume base version is at least 3.0, thus only 2 components wanted + match = re.match(r'^(\d+\.\d+)', ver) + assert match + return match.group(1) + + +def add_update(ver, inc): + base = base_version(ver) + if base == ver: + update = 0 + else: + update = int(ver[len(base)+1:]) + update += inc + if update == 0: + return base + else: + return '{}.{}'.format(base, update) + + +def next_update(ver): + return add_update(ver, 1) + + +def print_stable_log(log, cur_ver, new_ver): + major_ver = re.sub(r'^(\d+)\..*', r'\1', cur_ver) + while cur_ver != new_ver: + next_ver = next_update(cur_ver) + print(' https://www.kernel.org/pub/linux/kernel/v{}.x/ChangeLog-{}' + .format(major_ver, next_ver), + file=log) + log.flush() # serialise our output with git's + subprocess.check_call(['git', 'log', '--reverse', + '--pretty= - %s', + 'v{}..v{}^'.format(cur_ver, next_ver)], + stdout=log) + cur_ver = next_ver + + +def main(repo, new_ver): + if os.path.exists(os.path.join(repo, '.git')): + os.environ['GIT_DIR'] = os.path.join(repo, '.git') + else: + os.environ['GIT_DIR'] = repo + + changelog = Changelog(version=VersionLinux) + cur_pkg_ver = changelog[0].version + cur_ver = cur_pkg_ver.linux_upstream_full + + if base_version(new_ver) != base_version(cur_ver): + print('{} is not on the same stable series as {}' + .format(new_ver, cur_ver), + file=sys.stderr) + sys.exit(2) + + new_pkg_ver = new_ver + '-1' + if cur_pkg_ver.linux_revision_experimental: + new_pkg_ver += '~exp1' + + # Three possible cases: + # 1. The current version has been released so we need to add a new + # version to the changelog. + # 2. The current version has not been released so we're changing its + # version string. + # (a) There are no stable updates included in the current version, + # so we need to insert an introductory line, the URL(s) and + # git log(s) and a blank line at the top. + # (b) One or more stable updates are already included in the current + # version, so we need to insert the URL(s) and git log(s) after + # them. + + changelog_intro = 'New upstream stable update:' + + # Case 1 + if changelog[0].distribution != 'UNRELEASED': + subprocess.check_call(['dch', '-v', new_pkg_ver, '-D', 'UNRELEASED', + changelog_intro]) + + with open('debian/changelog', 'r') as old_log: + with open('debian/changelog.new', 'w') as new_log: + line_no = 0 + inserted = False + intro_line = ' * {}\n'.format(changelog_intro) + + for line in old_log: + line_no += 1 + + # Case 2 + if changelog[0].distribution == 'UNRELEASED' and line_no == 1: + print('{} ({}) UNRELEASED; urgency={}' + .format(changelog[0].source, new_pkg_ver, + changelog[0].urgency), + file=new_log) + continue + + if not inserted: + # Case 2(a) + if line_no == 3 and line != intro_line: + new_log.write(intro_line) + print_stable_log(new_log, cur_ver, new_ver) + new_log.write('\n') + inserted = True + # Case 1 or 2(b) + elif line_no > 3 and line == '\n': + print_stable_log(new_log, cur_ver, new_ver) + inserted = True + + # Check that we inserted before hitting the end of the + # first version entry + assert not (line.startswith(' -- ') and not inserted) + + new_log.write(line) + + os.rename('debian/changelog.new', 'debian/changelog') + + +if __name__ == '__main__': + if len(sys.argv) != 3: + print('''\ +Usage: {} REPO VERSION +REPO is the git repository to generate a changelog from +VERSION is the stable version (without leading v)'''.format(sys.argv[0]), + file=sys.stderr) + sys.exit(2) + main(*sys.argv[1:]) diff --git a/debian/bin/stable-update.sh b/debian/bin/stable-update.sh new file mode 100755 index 000000000..bd86860c6 --- /dev/null +++ b/debian/bin/stable-update.sh @@ -0,0 +1,2 @@ +#!/bin/sh -e +exec "$(dirname "$0")/stable-update" "$@" diff --git a/debian/bin/test-patches b/debian/bin/test-patches new file mode 100755 index 000000000..ad0d77add --- /dev/null +++ b/debian/bin/test-patches @@ -0,0 +1,142 @@ +#!/bin/bash + +set -e +shopt -s extglob + +# Set defaults from the running kernel +arch="$(dpkg --print-architecture)" +kernelabi="$(uname -r)" +ff="${kernelabi#+([^-])-@(trunk|?(rc)+([0-9])|0.@(bpo|deb+([0-9])).+([0-9]))-}" +if [ "x$ff" != "x$kernelabi" ]; then + flavour="${ff#@(openvz|rt|vserver|xen)-}" + if [ "x$flavour" != "x$ff" ]; then + featureset="${ff%-$flavour}" + else + featureset=none + fi +else + flavour= + featureset=none +fi + +dbginfo= +fuzz=0 +jobs=$(nproc) + +eval "set -- $(getopt -n "$0" -o "f:gj:s:" -l "fuzz:" -- "$@")" +while true; do + case "$1" in + -f) flavour="$2"; shift 2 ;; + -g) dbginfo=y; shift 1 ;; + -j) jobs="$2"; shift 2 ;; + -s) featureset="$2"; shift 2 ;; + --fuzz) fuzz="$2"; shift 2;; + --) shift 1; break ;; + esac +done + +if [ $# -lt 1 ]; then + echo >&2 "Usage: $0 [<options>] <patch>..." + cat >&2 <<EOF +Options: + -f <flavour> specify the 'flavour' of kernel to build, e.g. 686-pae + -g enable debug info + -j <jobs> specify number of compiler jobs to run in parallel + (default: number of available processors) + -s <featureset> specify an optional featureset to apply, e.g. rt + --fuzz <num> set the maximum patch fuzz factor (default: 0) +EOF + exit 2 +fi + +if [ -z "$flavour" ]; then + echo >&2 "You must specify a flavour to build with the -f option" + exit 2 +fi + +profiles=nodoc,noudeb,pkg.linux.nosource,pkg.linux.mintools +if [ -z "$dbginfo" ]; then + profiles="$profiles,pkg.linux.nokerneldbg,pkg.linux.nokerneldbginfo" +fi + +# Check build-dependencies early if possible +if [ -f debian/control ]; then + dpkg-checkbuilddeps -P"$profiles" +fi + +# Append 'a~test' to Debian version; this should be less than any official +# successor and easily recognisable +version="$(dpkg-parsechangelog | sed 's/^Version: //; t; d')" +if [ "${version%a~test}" = "$version" ]; then + version="$version"a~test + dch -v "$version" --distribution UNRELEASED "Testing patches $*" +fi + +# Ignore user's .quiltrc +alias quilt='quilt --quiltrc -' + +# Try to clean up any previous test patches +if [ "$featureset" = none ]; then + patchdir=debian/patches + while patch="$(quilt top 2>/dev/null)" && \ + [ "${patch#test/}" != "$patch" ]; do + quilt pop -f + done + while patch="$(quilt next 2>/dev/null)" && \ + [ "${patch#test/}" != "$patch" ]; do + quilt delete -r "$patch" + done +else + patchdir=debian/patches-${featureset} + sed -i '/^test\//d' $patchdir/series +fi + +# Prepare a new directory for the patches +rm -rf $patchdir/test/ +mkdir $patchdir/test + +# Prepare a new directory for the config; override ABI name, featuresets, flavours +rm -rf debian/config.local +mkdir debian/config.local debian/config.local/"$arch" debian/config.local/"$arch"/"$featureset" +cat >debian/config.local/defines <<EOF +[abi] +abiname: 0.a.test +EOF +cat >debian/config.local/"$arch"/defines <<EOF +[base] +featuresets: $featureset +EOF +cat >debian/config.local/"$arch"/"$featureset"/defines <<EOF +[base] +flavours: $flavour +EOF +if [ "$featureset" = none ]; then + # default-flavour must refer to a flavour that's enabled + cat >>debian/config.local/"$arch"/"$featureset"/defines <<EOF +default-flavour: $flavour +EOF +fi + +# Regenerate control and included rules +rm -f debian/control debian/rules.gen +debian/rules debian/control-real && exit 1 || true +test -f debian/control +test -f debian/rules.gen + +# Check build-dependencies now that we know debian/control exists +dpkg-checkbuilddeps -P"$profiles" + +# Clean up old build; apply existing patches for featureset +debian/rules clean +debian/rules source + +# Apply the additional patches +for patch in "$@"; do + patch_abs="$(readlink -f "$patch")" + (cd "debian/build/source_${featureset}" && \ + quilt import -P "test/$(basename "$patch")" "$patch_abs" && \ + quilt push --fuzz="$fuzz") +done + +# Build selected binaries +dpkg-buildpackage -b -P"$profiles" -j"$jobs" -nc -uc diff --git a/debian/bin/update-bug-taint-list b/debian/bin/update-bug-taint-list new file mode 100755 index 000000000..76bb1fc2e --- /dev/null +++ b/debian/bin/update-bug-taint-list @@ -0,0 +1,24 @@ +#!/bin/sh -eu + +temp="$(mktemp)" +trap 'rm -f "$temp"' EXIT + +# Copy everything above the existing flag checks. +sed -rne '/^ +_check /q; p' \ + < debian/templates/image.bug/include-1tainted >"$temp" + +# Generate flag checks from the table in tainted-kernels.rst. We +# could alternatively extract them from sysctl/kernel.rst or in the C +# sources, but this is easy to find and parse and is likely to have +# the most useful descriptions. +sed -rne '/^Bit +Log +Number +Reason/,/^$/ { + s/^ *([0-9]+) +.\/(.) +[0-9]+ +(.*)/ _check \1 \2 '\''\3'\''/p + }' \ + < Documentation/admin-guide/tainted-kernels.rst >>"$temp" + +# Copy everything below the existing flag checks. +sed -rne '/^ +echo "\*\* Tainted:/,$p' \ + < debian/templates/image.bug/include-1tainted >>"$temp" + +# Update the bug script in-place. +cp "$temp" debian/templates/image.bug/include-1tainted |