summaryrefslogtreecommitdiffstats
path: root/debian/lib
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-11 08:28:00 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-11 08:28:00 +0000
commit3565071f226432336a54d0193d729fa4508a3394 (patch)
tree4cde13f078f84c0a7785d234fd52edce7c90546a /debian/lib
parentAdding upstream version 6.6.15. (diff)
downloadlinux-3565071f226432336a54d0193d729fa4508a3394.tar.xz
linux-3565071f226432336a54d0193d729fa4508a3394.zip
Adding debian version 6.6.15-2.debian/6.6.15-2
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'debian/lib')
-rw-r--r--debian/lib/python/debian_linux/__init__.py1
-rw-r--r--debian/lib/python/debian_linux/abi.py45
-rw-r--r--debian/lib/python/debian_linux/config.py257
-rw-r--r--debian/lib/python/debian_linux/debian.py705
-rw-r--r--debian/lib/python/debian_linux/firmware.py90
-rw-r--r--debian/lib/python/debian_linux/gencontrol.py581
-rw-r--r--debian/lib/python/debian_linux/kconfig.py93
-rw-r--r--debian/lib/python/debian_linux/test_debian.py424
-rw-r--r--debian/lib/python/debian_linux/utils.py76
9 files changed, 2272 insertions, 0 deletions
diff --git a/debian/lib/python/debian_linux/__init__.py b/debian/lib/python/debian_linux/__init__.py
new file mode 100644
index 0000000000..b785cebf71
--- /dev/null
+++ b/debian/lib/python/debian_linux/__init__.py
@@ -0,0 +1 @@
+# Module
diff --git a/debian/lib/python/debian_linux/abi.py b/debian/lib/python/debian_linux/abi.py
new file mode 100644
index 0000000000..abaaeeaa94
--- /dev/null
+++ b/debian/lib/python/debian_linux/abi.py
@@ -0,0 +1,45 @@
+class Symbol(object):
+ def __init__(self, name, namespace, module, version, export):
+ self.name, self.namespace, self.module = name, namespace, module
+ self.version, self.export = version, export
+
+ def __eq__(self, other):
+ if not isinstance(other, Symbol):
+ return NotImplemented
+
+ # Symbols are resolved to modules by depmod at installation/
+ # upgrade time, not compile time, so moving a symbol between
+ # modules is not an ABI change. Compare everything else.
+ if self.name != other.name:
+ return False
+ if self.namespace != other.namespace:
+ return False
+ if self.version != other.version:
+ return False
+ if self.export != other.export:
+ return False
+
+ return True
+
+ def __ne__(self, other):
+ ret = self.__eq__(other)
+ if ret is NotImplemented:
+ return ret
+ return not ret
+
+
+class Symbols(dict):
+ def __init__(self, file=None):
+ if file:
+ self.read(file)
+
+ def read(self, file):
+ for line in file:
+ version, name, module, export, namespace = \
+ line.strip('\r\n').split('\t')
+ self[name] = Symbol(name, namespace, module, version, export)
+
+ def write(self, file):
+ for s in sorted(self.values(), key=lambda i: i.name):
+ file.write("%s\t%s\t%s\t%s\t%s\n" %
+ (s.version, s.name, s.module, s.export, s.namespace))
diff --git a/debian/lib/python/debian_linux/config.py b/debian/lib/python/debian_linux/config.py
new file mode 100644
index 0000000000..7424c6278c
--- /dev/null
+++ b/debian/lib/python/debian_linux/config.py
@@ -0,0 +1,257 @@
+import collections
+import os
+import os.path
+import pickle
+import re
+import sys
+
+from configparser import RawConfigParser
+
+__all__ = [
+ 'ConfigCoreDump',
+ 'ConfigCoreHierarchy',
+ 'ConfigParser',
+]
+
+
+class SchemaItemBoolean(object):
+ def __call__(self, i):
+ i = i.strip().lower()
+ if i in ("true", "1"):
+ return True
+ if i in ("false", "0"):
+ return False
+ raise ValueError
+
+
+class SchemaItemInteger(object):
+ def __call__(self, i):
+ return int(i.strip(), 0)
+
+
+class SchemaItemList(object):
+ def __init__(self, type=r"\s+"):
+ self.type = type
+
+ def __call__(self, i):
+ i = i.strip()
+ if not i:
+ return []
+ return [j.strip() for j in re.split(self.type, i)]
+
+
+# Using OrderedDict instead of dict makes the pickled config reproducible
+class ConfigCore(collections.OrderedDict):
+ def get_merge(self, section, arch, featureset, flavour, key, default=None):
+ temp = []
+
+ if arch and featureset and flavour:
+ temp.append(self.get((section, arch, featureset, flavour), {})
+ .get(key))
+ temp.append(self.get((section, arch, None, flavour), {}).get(key))
+ if arch and featureset:
+ temp.append(self.get((section, arch, featureset), {}).get(key))
+ if arch:
+ temp.append(self.get((section, arch), {}).get(key))
+ if featureset:
+ temp.append(self.get((section, None, featureset), {}).get(key))
+ temp.append(self.get((section,), {}).get(key))
+
+ ret = []
+
+ for i in temp:
+ if i is None:
+ continue
+ elif isinstance(i, (list, tuple)):
+ ret.extend(i)
+ elif ret:
+ # TODO
+ return ret
+ else:
+ return i
+
+ return ret or default
+
+ def merge(self, section, arch=None, featureset=None, flavour=None):
+ ret = {}
+ ret.update(self.get((section,), {}))
+ if featureset:
+ ret.update(self.get((section, None, featureset), {}))
+ if arch:
+ ret.update(self.get((section, arch), {}))
+ if arch and featureset:
+ ret.update(self.get((section, arch, featureset), {}))
+ if arch and featureset and flavour:
+ ret.update(self.get((section, arch, None, flavour), {}))
+ ret.update(self.get((section, arch, featureset, flavour), {}))
+ return ret
+
+ def dump(self, fp):
+ pickle.dump(self, fp, 0)
+
+
+class ConfigCoreDump(object):
+ def __new__(self, fp):
+ return pickle.load(fp)
+
+
+class ConfigCoreHierarchy(object):
+ schema_base = {
+ 'base': {
+ 'arches': SchemaItemList(),
+ 'enabled': SchemaItemBoolean(),
+ 'featuresets': SchemaItemList(),
+ 'flavours': SchemaItemList(),
+ },
+ }
+
+ def __new__(cls, schema, dirs=[]):
+ schema_complete = cls.schema_base.copy()
+ for key, value in schema.items():
+ schema_complete.setdefault(key, {}).update(value)
+ return cls.Reader(dirs, schema_complete)()
+
+ class Reader(object):
+ config_name = "defines"
+
+ def __init__(self, dirs, schema):
+ self.dirs, self.schema = dirs, schema
+
+ def __call__(self):
+ ret = ConfigCore()
+ self.read(ret)
+ return ret
+
+ def get_files(self, *dirs):
+ dirs = list(dirs)
+ dirs.append(self.config_name)
+ return (os.path.join(i, *dirs) for i in self.dirs if i)
+
+ def read_arch(self, ret, arch):
+ config = ConfigParser(self.schema)
+ config.read(self.get_files(arch))
+
+ featuresets = config['base', ].get('featuresets', [])
+ flavours = config['base', ].get('flavours', [])
+
+ for section in iter(config):
+ if section[0] in featuresets:
+ real = (section[-1], arch, section[0])
+ elif len(section) > 1:
+ real = (section[-1], arch, None) + section[:-1]
+ else:
+ real = (section[-1], arch) + section[:-1]
+ s = ret.get(real, {})
+ s.update(config[section])
+ ret[tuple(real)] = s
+
+ for featureset in featuresets:
+ self.read_arch_featureset(ret, arch, featureset)
+
+ if flavours:
+ base = ret['base', arch]
+ featuresets.insert(0, 'none')
+ base['featuresets'] = featuresets
+ del base['flavours']
+ ret['base', arch] = base
+ ret['base', arch, 'none'] = {'flavours': flavours,
+ 'implicit-flavour': True}
+
+ def read_arch_featureset(self, ret, arch, featureset):
+ config = ConfigParser(self.schema)
+ config.read(self.get_files(arch, featureset))
+
+ for section in iter(config):
+ real = (section[-1], arch, featureset) + section[:-1]
+ s = ret.get(real, {})
+ s.update(config[section])
+ ret[tuple(real)] = s
+
+ def read(self, ret):
+ config = ConfigParser(self.schema)
+ config.read(self.get_files())
+
+ arches = config['base', ]['arches']
+ featuresets = config['base', ].get('featuresets', [])
+
+ for section in iter(config):
+ if section[0].startswith('featureset-'):
+ real = (section[-1], None, section[0][11:])
+ else:
+ real = (section[-1],) + section[1:]
+ ret[real] = config[section]
+
+ for arch in arches:
+ self.read_arch(ret, arch)
+ for featureset in featuresets:
+ self.read_featureset(ret, featureset)
+
+ def read_featureset(self, ret, featureset):
+ config = ConfigParser(self.schema)
+ config.read(self.get_files('featureset-%s' % featureset))
+
+ for section in iter(config):
+ real = (section[-1], None, featureset)
+ s = ret.get(real, {})
+ s.update(config[section])
+ ret[real] = s
+
+
+class ConfigParser(object):
+ __slots__ = '_config', 'schemas'
+
+ def __init__(self, schemas):
+ self.schemas = schemas
+
+ self._config = RawConfigParser()
+
+ def __getitem__(self, key):
+ return self._convert()[key]
+
+ def __iter__(self):
+ return iter(self._convert())
+
+ def __str__(self):
+ return '<%s(%s)>' % (self.__class__.__name__, self._convert())
+
+ def _convert(self):
+ ret = {}
+ for section in self._config.sections():
+ data = {}
+ for key, value in self._config.items(section):
+ data[key] = value
+ section_list = section.split('_')
+ section_base = section_list[-1]
+ if section_base in self.schemas:
+ section_ret = tuple(section_list)
+ data = self._convert_one(self.schemas[section_base], data)
+ else:
+ section_ret = (section, )
+ ret[section_ret] = data
+ return ret
+
+ def _convert_one(self, schema, data):
+ ret = {}
+ for key, value in data.items():
+ value = value.replace('\n', ' ')
+ if key in schema:
+ value = schema[key](value)
+ ret[key] = value
+ return ret
+
+ def keys(self):
+ return self._convert().keys()
+
+ def read(self, data):
+ return self._config.read(data)
+
+
+if __name__ == '__main__':
+ sys.path.append('debian/lib/python')
+ config = ConfigCoreDump(open('debian/config.defines.dump', 'rb'))
+ for section, items in sorted(config.items(),
+ key=(lambda a: tuple(i or '' for i in a[0]))):
+ print(u"[%s]" % (section,))
+ for item, value in sorted(items.items()):
+ print(u"%s: %s" % (item, value))
+ print()
diff --git a/debian/lib/python/debian_linux/debian.py b/debian/lib/python/debian_linux/debian.py
new file mode 100644
index 0000000000..78c5cdec7b
--- /dev/null
+++ b/debian/lib/python/debian_linux/debian.py
@@ -0,0 +1,705 @@
+from __future__ import annotations
+
+import collections
+import collections.abc
+import dataclasses
+import enum
+import itertools
+import os.path
+import re
+import typing
+import warnings
+from typing import (
+ Iterable,
+ Self,
+ TypeAlias,
+)
+
+
+class Changelog(list):
+ _top_rules = r"""
+^
+(?P<source>
+ \w[-+0-9a-z.]+
+)
+[ ]
+\(
+(?P<version>
+ [^\(\)\ \t]+
+)
+\)
+\s+
+(?P<distribution>
+ [-+0-9a-zA-Z.]+
+)
+\;\s+urgency=
+(?P<urgency>
+ \w+
+)
+(?:,|\n)
+"""
+ _top_re = re.compile(_top_rules, re.X)
+ _bottom_rules = r"""
+^
+[ ]--[ ]
+(?P<maintainer>
+ \S(?:[ ]?\S)*
+)
+[ ]{2}
+(?P<date>
+ (.*)
+)
+\n
+"""
+ _bottom_re = re.compile(_bottom_rules, re.X)
+ _ignore_re = re.compile(r'^(?: |\s*\n)')
+
+ class Entry(object):
+ __slot__ = ('distribution', 'source', 'version', 'urgency',
+ 'maintainer', 'date')
+
+ def __init__(self, **kwargs):
+ for key, value in kwargs.items():
+ setattr(self, key, value)
+
+ def __init__(self, dir='', version=None, file=None) -> None:
+ if version is None:
+ version = Version
+ if file:
+ self._parse(version, file)
+ else:
+ with open(os.path.join(dir, "debian/changelog"),
+ encoding="UTF-8") as f:
+ self._parse(version, f)
+
+ def _parse(self, version, f) -> None:
+ top_match = None
+ line_no = 0
+
+ for line in f:
+ line_no += 1
+
+ if self._ignore_re.match(line):
+ pass
+ elif top_match is None:
+ top_match = self._top_re.match(line)
+ if not top_match:
+ raise Exception('invalid top line %d in changelog' %
+ line_no)
+ try:
+ v = version(top_match.group('version'))
+ except Exception:
+ if not len(self):
+ raise
+ v = Version(top_match.group('version'))
+ else:
+ bottom_match = self._bottom_re.match(line)
+ if not bottom_match:
+ raise Exception('invalid bottom line %d in changelog' %
+ line_no)
+
+ self.append(self.Entry(
+ distribution=top_match.group('distribution'),
+ source=top_match.group('source'),
+ version=v,
+ urgency=top_match.group('urgency'),
+ maintainer=bottom_match.group('maintainer'),
+ date=bottom_match.group('date')))
+ top_match = bottom_match = None
+
+
+class Version(object):
+ revision: str | None
+
+ _epoch_re = re.compile(r'\d+$')
+ _upstream_re = re.compile(r'[0-9][A-Za-z0-9.+\-:~]*$')
+ _revision_re = re.compile(r'[A-Za-z0-9+.~]+$')
+
+ def __init__(self, version) -> None:
+ try:
+ split = version.index(':')
+ except ValueError:
+ epoch, rest = None, version
+ else:
+ epoch, rest = version[0:split], version[split+1:]
+ try:
+ split = rest.rindex('-')
+ except ValueError:
+ upstream, revision = rest, None
+ else:
+ upstream, revision = rest[0:split], rest[split+1:]
+ if (epoch is not None and not self._epoch_re.match(epoch)) or \
+ not self._upstream_re.match(upstream) or \
+ (revision is not None and not self._revision_re.match(revision)):
+ raise RuntimeError(u"Invalid debian version")
+ self.epoch = epoch and int(epoch)
+ self.upstream = upstream
+ self.revision = revision
+
+ def __str__(self) -> str:
+ return self.complete
+
+ @property
+ def complete(self) -> str:
+ if self.epoch is not None:
+ return u"%d:%s" % (self.epoch, self.complete_noepoch)
+ return self.complete_noepoch
+
+ @property
+ def complete_noepoch(self) -> str:
+ if self.revision is not None:
+ return u"%s-%s" % (self.upstream, self.revision)
+ return self.upstream
+
+ @property
+ def debian(self) -> str | None:
+ from warnings import warn
+ warn(u"debian argument was replaced by revision", DeprecationWarning,
+ stacklevel=2)
+ return self.revision
+
+
+class VersionLinux(Version):
+ _upstream_re = re.compile(r"""
+(?P<version>
+ \d+\.\d+
+)
+(?P<update>
+ (?:\.\d+)?
+ (?:-[a-z]+\d+)?
+)
+(?:
+ ~
+ (?P<modifier>
+ .+?
+ )
+)?
+(?:
+ \.dfsg\.
+ (?P<dfsg>
+ \d+
+ )
+)?
+$
+ """, re.X)
+ _revision_re = re.compile(r"""
+\d+
+(\.\d+)?
+(?:
+ (?P<revision_experimental>
+ ~exp\d+
+ )
+ |
+ (?P<revision_security>
+ (?:[~+]deb\d+u\d+)+
+ )?
+ (?P<revision_backports>
+ ~bpo\d+\+\d+
+ )?
+ |
+ (?P<revision_other>
+ .+?
+ )
+)
+(?:\+b\d+)?
+$
+ """, re.X)
+
+ def __init__(self, version) -> None:
+ super(VersionLinux, self).__init__(version)
+ up_match = self._upstream_re.match(self.upstream)
+ assert self.revision is not None
+ rev_match = self._revision_re.match(self.revision)
+ if up_match is None or rev_match is None:
+ raise RuntimeError(u"Invalid debian linux version")
+ d = up_match.groupdict()
+ self.linux_modifier = d['modifier']
+ self.linux_version = d['version']
+ if d['modifier'] is not None:
+ assert not d['update']
+ self.linux_upstream = '-'.join((d['version'], d['modifier']))
+ else:
+ self.linux_upstream = d['version']
+ self.linux_upstream_full = self.linux_upstream + d['update']
+ self.linux_dfsg = d['dfsg']
+ d = rev_match.groupdict()
+ self.linux_revision_experimental = d['revision_experimental'] and True
+ self.linux_revision_security = d['revision_security'] and True
+ self.linux_revision_backports = d['revision_backports'] and True
+ self.linux_revision_other = d['revision_other'] and True
+
+
+class PackageArchitecture(set[str]):
+ def __init__(
+ self,
+ v: str | Iterable[str] | None = None,
+ /,
+ ) -> None:
+ if v:
+ if isinstance(v, str):
+ v = re.split(r'\s+', v.strip())
+ self |= frozenset(v)
+
+ def __str__(self) -> str:
+ return ' '.join(sorted(self))
+
+
+class PackageDescription:
+ short: list[str]
+ long: list[str]
+
+ def __init__(
+ self,
+ v: str | Self | None = None,
+ /,
+ ) -> None:
+ self.short = []
+ self.long = []
+
+ if v:
+ if isinstance(v, str):
+ desc_split = v.split('\n', 1)
+ self.append_short(desc_split[0])
+ if len(desc_split) == 2:
+ self.append(desc_split[1])
+ else:
+ self.short.extend(v.short)
+ self.long.extend(v.long)
+
+ def __str__(self) -> str:
+ from .utils import TextWrapper
+ wrap = TextWrapper(width=74, fix_sentence_endings=True).wrap
+ short = ', '.join(self.short)
+ long_pars = []
+ for i in self.long:
+ long_pars.append(wrap(i))
+ long = '\n .\n '.join('\n '.join(i) for i in long_pars)
+ return short + '\n ' + long if long else short
+
+ def append(self, long: str) -> None:
+ long = long.strip()
+ if long:
+ self.long.extend(long.split('\n.\n'))
+
+ def append_short(self, short: str) -> None:
+ for i in [i.strip() for i in short.split(',')]:
+ if i:
+ self.short.append(i)
+
+ def extend(self, desc: PackageDescription) -> None:
+ self.short.extend(desc.short)
+ self.long.extend(desc.long)
+
+
+class PackageRelationEntryOperator(enum.StrEnum):
+ OP_LT = '<<'
+ OP_LE = '<='
+ OP_EQ = '='
+ OP_NE = '!='
+ OP_GE = '>='
+ OP_GT = '>>'
+
+ def __neg__(self) -> PackageRelationEntryOperator:
+ return typing.cast(PackageRelationEntryOperator, {
+ self.OP_LT: self.OP_GE,
+ self.OP_LE: self.OP_GT,
+ self.OP_EQ: self.OP_NE,
+ self.OP_NE: self.OP_EQ,
+ self.OP_GE: self.OP_LT,
+ self.OP_GT: self.OP_LE,
+ }[self])
+
+
+class PackageRelationEntry:
+ name: str
+ operator: typing.Optional[PackageRelationEntryOperator]
+ version: typing.Optional[str]
+ arches: PackageArchitecture
+ restrictions: PackageBuildprofile
+
+ __re = re.compile(
+ r'^(?P<name>\S+)'
+ r'(?: \((?P<operator><<|<=|=|!=|>=|>>)\s*(?P<version>[^)]+)\))?'
+ r'(?: \[(?P<arches>[^]]+)\])?'
+ r'(?P<restrictions>(?: <[^>]+>)*)$'
+ )
+
+ def __init__(
+ self,
+ v: str | Self,
+ /, *,
+ name: str | None = None,
+ arches: set[str] | None = None,
+ restrictions: PackageBuildprofile | str | None = None,
+ ) -> None:
+ if isinstance(v, str):
+ match = self.__re.match(v)
+ if not match:
+ raise RuntimeError('Unable to parse dependency "%s"' % v)
+
+ self.name = name or match['name']
+
+ if operator := match['operator']:
+ self.operator = PackageRelationEntryOperator(operator)
+ else:
+ self.operator = None
+
+ self.version = match['version']
+ self.arches = PackageArchitecture(arches or match['arches'])
+ if isinstance(restrictions, PackageBuildprofile):
+ self.restrictions = restrictions.copy()
+ else:
+ self.restrictions = PackageBuildprofile.parse(
+ restrictions or match['restrictions'],
+ )
+
+ else:
+ self.name = name or v.name
+ self.operator = v.operator
+ self.version = v.version
+ self.arches = PackageArchitecture(arches or v.arches)
+ if isinstance(restrictions, str):
+ self.restrictions = PackageBuildprofile.parse(restrictions)
+ else:
+ self.restrictions = (restrictions or v.restrictions).copy()
+
+ def __str__(self):
+ ret = [self.name]
+ if self.operator and self.version:
+ ret.append(f'({self.operator} {self.version})')
+ if self.arches:
+ ret.append(f'[{self.arches}]')
+ if self.restrictions:
+ ret.append(str(self.restrictions))
+ return ' '.join(ret)
+
+
+class PackageRelationGroup(list[PackageRelationEntry]):
+ def __init__(
+ self,
+ v: Iterable[PackageRelationEntry | str] | str | Self | None = None,
+ /, *,
+ arches: set[str] | None = None,
+ ) -> None:
+ if v:
+ if isinstance(v, str):
+ v = (i.strip() for i in re.split(r'\|', v.strip()))
+ self.extend(PackageRelationEntry(i, arches=arches) for i in v if i)
+
+ def __str__(self) -> str:
+ return ' | '.join(str(i) for i in self)
+
+ def _merge_eq(self, v: PackageRelationGroup) -> typing.Optional[PackageRelationGroup]:
+ if all(
+ (
+ i.name == j.name and i.operator == j.operator
+ and i.version == j.version
+ ) for i, j in zip(self, v)
+ ):
+ return self
+ return None
+
+
+class PackageRelation(list[PackageRelationGroup]):
+ Init: TypeAlias = PackageRelationGroup | Iterable[PackageRelationEntry] | str
+
+ def __init__(
+ self,
+ v: Iterable[Init] | str | Self | None = None,
+ /, *,
+ arches: set[str] | None = None,
+ ) -> None:
+ if v:
+ if isinstance(v, str):
+ v = (i.strip() for i in re.split(r',', v.strip()))
+ self.extend(PackageRelationGroup(i, arches=arches) for i in v if i)
+
+ def __str__(self) -> str:
+ return ', '.join(str(i) for i in self)
+
+ def _merge_eq(self, v: PackageRelationGroup) -> typing.Optional[PackageRelationGroup]:
+ for i in self:
+ if i._merge_eq(v):
+ return i
+ return None
+
+ def merge(
+ self,
+ v: Init | str,
+ /,
+ ) -> None:
+ v = PackageRelationGroup(v)
+ if g := self._merge_eq(v):
+ for i, j in zip(g, v):
+ i.arches |= j.arches
+ i.restrictions.update(j.restrictions)
+ else:
+ super().append(v)
+
+
+@dataclasses.dataclass
+class PackageBuildprofileEntry:
+ pos: set[str] = dataclasses.field(default_factory=set)
+ neg: set[str] = dataclasses.field(default_factory=set)
+
+ __re = re.compile(r'^<(?P<profiles>[a-z0-9. !-]+)>$')
+
+ def copy(self) -> Self:
+ return self.__class__(
+ pos=set(self.pos),
+ neg=set(self.neg),
+ )
+
+ @classmethod
+ def parse(cls, v: str, /) -> Self:
+ match = cls.__re.match(v)
+ if not match:
+ raise RuntimeError('Unable to parse build profile "%s"' % v)
+
+ ret = cls()
+ for i in re.split(r' ', match.group('profiles')):
+ if i:
+ if i[0] == '!':
+ ret.neg.add(i[1:])
+ else:
+ ret.pos.add(i)
+ return ret
+
+ def __eq__(self, other: object, /) -> bool:
+ if not isinstance(other, PackageBuildprofileEntry):
+ return NotImplemented
+ return self.pos == other.pos and self.neg == other.neg
+
+ def isdisjoint(self, other: Self, /) -> bool:
+ return not (self.issubset(other)) and not (self.issuperset(other))
+
+ def issubset(self, other: Self, /) -> bool:
+ '''
+ Test wether this build profile would select a subset of packages.
+
+ For positive profile matches: Ading profiles will select a subset.
+ For negative profile matches: Removing profiles will select a subset.
+ '''
+ return self.pos >= other.pos and self.neg <= other.neg
+ __le__ = issubset
+
+ def issuperset(self, other: Self, /) -> bool:
+ '''
+ Test wether this build profile would select a superset of packages.
+
+ For positive profile matches: Removing profiles will select a superset.
+ For negative profile matches: Adding profiles will select a superset.
+ '''
+ return self.pos <= other.pos and self.neg >= other.neg
+ __ge__ = issuperset
+
+ def update(self, other: Self, /) -> None:
+ '''
+ Update the build profiles, adding entries from other, merging if possible.
+
+ Negating entries (profile vs !profile) are completely removed.
+ All others remain if they are used on both sides.
+ '''
+ diff = (self.pos & other.neg) | (self.neg & other.pos)
+ self.pos &= other.pos - diff
+ self.neg &= other.neg - diff
+ __ior__ = update
+
+ def __str__(self) -> str:
+ s = ' '.join(itertools.chain(
+ sorted(self.pos),
+ (f'!{i}' for i in sorted(self.neg)),
+ ))
+ return f'<{s}>'
+
+
+class PackageBuildprofile(list[PackageBuildprofileEntry]):
+ __re = re.compile(r' *(<[^>]+>)(?: +|$)')
+
+ def copy(self) -> Self:
+ return self.__class__(i.copy() for i in self)
+
+ @classmethod
+ def parse(cls, v: str, /) -> Self:
+ ret = cls()
+ for match in cls.__re.finditer(v):
+ ret.append(PackageBuildprofileEntry.parse(match.group(1)))
+ return ret
+
+ def update(self, v: Self, /) -> None:
+ for i in v:
+ for j in self:
+ if not j.isdisjoint(i):
+ j.update(i)
+ break
+ else:
+ self.append(i)
+ __ior__ = update
+
+ def __str__(self) -> str:
+ return ' '.join(str(i) for i in self)
+
+
+class _ControlFileDict(collections.abc.MutableMapping):
+ def __init__(self):
+ self.__data = {}
+ self.meta = {}
+
+ def __getitem__(self, key):
+ return self.__data[key]
+
+ def __setitem__(self, key, value):
+ if key.lower().startswith('meta-'):
+ self.meta[key.lower()[5:]] = value
+ return
+
+ try:
+ cls = self._fields[key]
+ if not isinstance(value, cls):
+ if f := getattr(cls, 'parse', None):
+ value = f(value)
+ else:
+ value = cls(value)
+ except KeyError:
+ warnings.warn(
+ f'setting unknown field { key } in { type(self).__name__ }',
+ stacklevel=2)
+ self.__data[key] = value
+
+ def __delitem__(self, key):
+ del self.__data[key]
+
+ def __iter__(self):
+ keys = set(self.__data.keys())
+ for key in self._fields.keys():
+ if key in self.__data:
+ keys.remove(key)
+ yield key
+ for key in sorted(keys):
+ yield key
+
+ def __len__(self):
+ return len(self.__data)
+
+ def setdefault(self, key):
+ try:
+ return self[key]
+ except KeyError:
+ try:
+ ret = self[key] = self._fields[key]()
+ except KeyError:
+ warnings.warn(
+ f'setting unknown field { key } in { type(self).__name__ }',
+ stacklevel=2)
+ ret = self[key] = ''
+ return ret
+
+ def copy(self):
+ ret = self.__class__()
+ ret.__data = self.__data.copy()
+ ret.meta = self.meta.copy()
+ return ret
+
+ @classmethod
+ def read_rfc822(cls, f):
+ entries = []
+ eof = False
+
+ while not eof:
+ e = cls()
+ last = None
+ lines = []
+ while True:
+ line = f.readline()
+ if not line:
+ eof = True
+ break
+ # Strip comments rather than trying to preserve them
+ if line[0] == '#':
+ continue
+ line = line.strip('\n')
+ if not line:
+ break
+ if line[0] in ' \t':
+ if not last:
+ raise ValueError(
+ 'Continuation line seen before first header')
+ lines.append(line.lstrip())
+ continue
+ if last:
+ e[last] = '\n'.join(lines)
+ i = line.find(':')
+ if i < 0:
+ raise ValueError(u"Not a header, not a continuation: ``%s''" %
+ line)
+ last = line[:i]
+ lines = [line[i + 1:].lstrip()]
+ if last:
+ e[last] = '\n'.join(lines)
+ if e:
+ entries.append(e)
+
+ return entries
+
+
+class SourcePackage(_ControlFileDict):
+ _fields = collections.OrderedDict((
+ ('Source', str),
+ ('Architecture', PackageArchitecture),
+ ('Section', str),
+ ('Priority', str),
+ ('Maintainer', str),
+ ('Uploaders', str),
+ ('Standards-Version', str),
+ ('Build-Depends', PackageRelation),
+ ('Build-Depends-Arch', PackageRelation),
+ ('Build-Depends-Indep', PackageRelation),
+ ('Rules-Requires-Root', str),
+ ('Homepage', str),
+ ('Vcs-Browser', str),
+ ('Vcs-Git', str),
+ ('XS-Autobuild', str),
+ ))
+
+
+class BinaryPackage(_ControlFileDict):
+ _fields = collections.OrderedDict((
+ ('Package', str),
+ ('Package-Type', str), # for udeb only
+ ('Architecture', PackageArchitecture),
+ ('Section', str),
+ ('Priority', str),
+ # Build-Depends* fields aren't allowed for binary packages in
+ # the real control file, but we move them to the source
+ # package
+ ('Build-Depends', PackageRelation),
+ ('Build-Depends-Arch', PackageRelation),
+ ('Build-Depends-Indep', PackageRelation),
+ ('Build-Profiles', PackageBuildprofile),
+ ('Built-Using', PackageRelation),
+ ('Provides', PackageRelation),
+ ('Pre-Depends', PackageRelation),
+ ('Depends', PackageRelation),
+ ('Recommends', PackageRelation),
+ ('Suggests', PackageRelation),
+ ('Replaces', PackageRelation),
+ ('Breaks', PackageRelation),
+ ('Conflicts', PackageRelation),
+ ('Multi-Arch', str),
+ ('Kernel-Version', str), # for udeb only
+ ('Description', PackageDescription),
+ ('Homepage', str),
+ ))
+
+
+class TestsControl(_ControlFileDict):
+ _fields = collections.OrderedDict((
+ ('Tests', str),
+ ('Test-Command', str),
+ ('Architecture', PackageArchitecture),
+ ('Restrictions', str),
+ ('Features', str),
+ ('Depends', PackageRelation),
+ ('Tests-Directory', str),
+ ('Classes', str),
+ ))
diff --git a/debian/lib/python/debian_linux/firmware.py b/debian/lib/python/debian_linux/firmware.py
new file mode 100644
index 0000000000..b1a4a7e85d
--- /dev/null
+++ b/debian/lib/python/debian_linux/firmware.py
@@ -0,0 +1,90 @@
+import re
+
+
+class FirmwareFile(object):
+ def __init__(self, binary, desc=None, source=None, version=None) -> None:
+ self.binary = binary
+ self.desc = desc
+ self.source = source
+ self.version = version
+
+
+class FirmwareSection(object):
+ def __init__(self, driver, files, licence) -> None:
+ self.driver = driver
+ self.files = files
+ self.licence = licence
+
+
+class FirmwareWhence(list):
+ def __init__(self, file) -> None:
+ self.read(file)
+
+ def read(self, file) -> None:
+ in_header = True
+ driver = None
+ files = {}
+ licence = None
+ binary = []
+ desc = None
+ source = []
+ version = None
+
+ for line in file:
+ if line.startswith('----------'):
+ if in_header:
+ in_header = False
+ else:
+ # Finish old section
+ if driver:
+ self.append(FirmwareSection(driver, files, licence))
+ driver = None
+ files = {}
+ licence = None
+ continue
+
+ if in_header:
+ continue
+
+ if line == '\n':
+ # End of field; end of file fields
+ for b in binary:
+ # XXX The WHENCE file isn't yet consistent in its
+ # association of binaries and their sources and
+ # metadata. This associates all sources and
+ # metadata in a group with each binary.
+ files[b] = FirmwareFile(b, desc, source, version)
+ binary = []
+ desc = None
+ source = []
+ version = None
+ continue
+
+ match = re.match(
+ r'(Driver|File|Info|Licen[cs]e|Source|Version'
+ r'|Original licen[cs]e info(?:rmation)?):\s*(.*)\n',
+ line)
+ if match:
+ keyword, value = match.group(1, 2)
+ if keyword == 'Driver':
+ driver = value.split(' ')[0].lower()
+ elif keyword == 'File':
+ match = re.match(r'(\S+)(?:\s+--\s+(.*))?', value)
+ binary.append(match.group(1))
+ desc = match.group(2)
+ elif keyword in ['Info', 'Version']:
+ version = value
+ elif keyword == 'Source':
+ source.append(value)
+ else:
+ licence = value
+ elif licence is not None:
+ licence = (licence + '\n'
+ + re.sub(r'^(?:[/ ]\*| \*/)?\s*(.*?)\s*$', r'\1',
+ line))
+
+ # Finish last section if non-empty
+ for b in binary:
+ files[b] = FirmwareFile(b, desc, source, version)
+ if driver:
+ self.append(FirmwareSection(driver, files, licence))
diff --git a/debian/lib/python/debian_linux/gencontrol.py b/debian/lib/python/debian_linux/gencontrol.py
new file mode 100644
index 0000000000..66769a0639
--- /dev/null
+++ b/debian/lib/python/debian_linux/gencontrol.py
@@ -0,0 +1,581 @@
+from __future__ import annotations
+
+import contextlib
+import pathlib
+import re
+from collections import OrderedDict
+from collections.abc import (
+ Generator,
+)
+from typing import (
+ Any,
+ Iterable,
+ Iterator,
+ IO,
+)
+
+from .debian import Changelog, PackageArchitecture, \
+ Version, _ControlFileDict
+from .utils import Templates
+
+
+class PackagesList(OrderedDict):
+ def append(self, package) -> None:
+ self[package['Package']] = package
+
+ def extend(self, packages) -> None:
+ for package in packages:
+ self[package['Package']] = package
+
+ def setdefault(self, package) -> Any:
+ return super().setdefault(package['Package'], package)
+
+
+class Makefile:
+ rules: dict[str, MakefileRule]
+
+ def __init__(self) -> None:
+ self.rules = {}
+
+ def add_cmds(self, name: str, cmds) -> None:
+ rule = self.rules.setdefault(name, MakefileRule(name))
+ rule.add_cmds(MakefileRuleCmdsSimple(cmds))
+
+ def add_deps(self, name: str, deps) -> None:
+ rule = self.rules.setdefault(name, MakefileRule(name))
+ rule.add_deps(deps)
+
+ for i in deps:
+ self.rules.setdefault(i, MakefileRule(i))
+
+ def add_rules(self, name: str, target, makeflags, packages=set(), packages_extra=set()) -> None:
+ rule = self.rules.setdefault(name, MakefileRule(name))
+ rule.add_cmds(MakefileRuleCmdsRules(target, makeflags, packages, packages_extra))
+
+ def write(self, out) -> None:
+ out.write('''\
+.NOTPARALLEL:
+.PHONY:
+packages_enabled := $(shell dh_listpackages)
+define if_package
+$(if $(filter $(1),$(packages_enabled)),$(2))
+endef
+''')
+ for k, rule in sorted(self.rules.items()):
+ rule.write(out)
+
+
+class MakefileRule:
+ name: str
+ cmds: list[MakefileRuleCmds]
+ deps: set[str]
+
+ def __init__(self, name: str) -> None:
+ self.name = name
+ self.cmds = []
+ self.deps = set()
+
+ def add_cmds(self, cmds: MakefileRuleCmds) -> None:
+ self.cmds.append(cmds)
+
+ def add_deps(self, deps: Iterable[str]) -> None:
+ self.deps.update(deps)
+
+ def write(self, out: IO) -> None:
+ if self.cmds:
+ out.write(f'{self.name}:{" ".join(sorted(self.deps))}\n')
+ for c in self.cmds:
+ c.write(out)
+ else:
+ out.write(f'{self.name}:{" ".join(sorted(self.deps))}\n')
+
+
+class MakefileRuleCmds:
+ def write(self, out: IO) -> None:
+ raise NotImplementedError
+
+
+class MakefileRuleCmdsRules(MakefileRuleCmds):
+ def __init__(self, target, makeflags, packages, packages_extra) -> None:
+ self.target = target
+ self.makeflags = makeflags.copy()
+ self.packages = packages
+ self.packages_extra = packages_extra
+
+ packages_all = packages | packages_extra
+
+ if packages_all:
+ if len(packages_all) == 1:
+ package_name = list(packages_all)[0]
+ self.makeflags['PACKAGE_NAME'] = package_name
+ self.makeflags['DESTDIR'] = f'$(CURDIR)/debian/{package_name}'
+ else:
+ self.makeflags['DESTDIR'] = '$(CURDIR)/debian/tmp'
+
+ self.makeflags['DH_OPTIONS'] = ' '.join(f'-p{i}' for i in sorted(packages_all))
+
+ def write(self, out: IO) -> None:
+ cmd = f'$(MAKE) -f debian/rules.real {self.target} {self.makeflags}'
+ if self.packages:
+ out.write(f'\t$(call if_package, {" ".join(sorted(self.packages))}, {cmd})\n')
+ else:
+ out.write(f'\t{cmd}\n')
+
+
+class MakefileRuleCmdsSimple(MakefileRuleCmds):
+ cmds: list[str]
+
+ def __init__(self, cmds: list[str]) -> None:
+ self.cmds = cmds
+
+ def write(self, out: IO) -> None:
+ for i in self.cmds:
+ out.write(f'\t{i}\n')
+
+
+class MakeFlags(dict):
+ def __str__(self) -> str:
+ return ' '.join("%s='%s'" % i for i in sorted(self.items()))
+
+ def copy(self) -> MakeFlags:
+ return self.__class__(super(MakeFlags, self).copy())
+
+
+class PackagesBundle:
+ name: str | None
+ templates: Templates
+ base: pathlib.Path
+ makefile: Makefile
+ packages: PackagesList
+
+ def __init__(
+ self,
+ name: str | None,
+ templates: Templates,
+ base: pathlib.Path = pathlib.Path('debian'),
+ ) -> None:
+ self.name = name
+ self.templates = templates
+ self.base = base
+ self.makefile = Makefile()
+ self.packages = PackagesList()
+
+ def add(
+ self,
+ pkgid: str,
+ ruleid: Iterable[str],
+ makeflags: MakeFlags,
+ replace: dict[str, str],
+ *,
+ arch: str | None = None,
+ check_packages: bool = True,
+ ) -> list[Any]:
+ ret = []
+ for raw_package in self.templates.get_control(f'{pkgid}.control', replace):
+ package = self.packages.setdefault(raw_package)
+ package_name = package['Package']
+ ret.append(package)
+
+ package.meta.setdefault('rules-ruleids', {})[ruleid] = makeflags
+ if arch:
+ package.meta.setdefault('architectures', PackageArchitecture()).add(arch)
+ package.meta['rules-check-packages'] = check_packages
+
+ for name in (
+ 'NEWS',
+ 'bug-presubj',
+ 'lintian-overrides',
+ 'maintscript',
+ 'postinst',
+ 'postrm',
+ 'preinst',
+ 'prerm',
+ ):
+ try:
+ template = self.templates.get(f'{pkgid}.{name}',
+ replace | {'package': package_name})
+ except KeyError:
+ pass
+ else:
+ with self.open(f'{package_name}.{name}') as f:
+ f.write(template)
+
+ return ret
+
+ def add_packages(
+ self,
+ packages: Iterable[_ControlFileDict],
+ ruleid: Iterable[str],
+ makeflags: MakeFlags,
+ *,
+ arch: str | None = None,
+ check_packages: bool = True,
+ ) -> None:
+ for package in packages:
+ package = self.packages.setdefault(package)
+ package.meta.setdefault('rules-ruleids', {})[ruleid] = makeflags
+ if arch:
+ package.meta.setdefault('architectures', PackageArchitecture()).add(arch)
+ package.meta['rules-check-packages'] = check_packages
+
+ def path(self, name) -> pathlib.Path:
+ if self.name:
+ return self.base / f'generated.{self.name}/{name}'
+ return self.base / name
+
+ @staticmethod
+ def __ruleid_deps(ruleid: tuple[str], name: str) -> Iterator[tuple[str, str]]:
+ """
+ Generate all the rules dependencies.
+ ```
+ build: build_a
+ build_a: build_a_b
+ build_a_b: build_a_b_image
+ ```
+ """
+ r = ruleid + (name, )
+ yield (
+ '',
+ '_' + '_'.join(r[:1]),
+ )
+ for i in range(1, len(r)):
+ yield (
+ '_' + '_'.join(r[:i]),
+ '_' + '_'.join(r[:i + 1]),
+ )
+
+ @contextlib.contextmanager
+ def open(self, name: str, mode: str = 'w') -> Generator[IO, None, None]:
+ path = self.path(name)
+ path.parent.mkdir(parents=True, exist_ok=True)
+ with path.open(mode=mode, encoding='utf-8') as f:
+ yield f
+
+ def extract_makefile(self) -> None:
+ targets: dict[frozenset[str], dict] = {}
+
+ for package_name, package in self.packages.items():
+ target_name = package.meta.get('rules-target')
+ ruleids = package.meta.get('rules-ruleids')
+ makeflags = MakeFlags({
+ # Requires Python 3.9+
+ k.removeprefix('rules-makeflags-').upper(): v
+ for (k, v) in package.meta.items() if k.startswith('rules-makeflags-')
+ })
+
+ if ruleids:
+ arches = package.meta.get('architectures')
+ if arches:
+ package['Architecture'] = arches
+ else:
+ arches = package.get('Architecture')
+
+ if target_name:
+ for ruleid, makeflags_package in ruleids.items():
+ target_key = frozenset(
+ [target_name, ruleid]
+ + [f'{k}_{v}' for (k, v) in makeflags.items()]
+ )
+ target = targets.setdefault(
+ target_key,
+ {
+ 'name': target_name,
+ 'ruleid': ruleid,
+ },
+ )
+
+ if package.meta['rules-check-packages']:
+ target.setdefault('packages', set()).add(package_name)
+ else:
+ target.setdefault('packages_extra', set()).add(package_name)
+ makeflags_package = makeflags_package.copy()
+ makeflags_package.update(makeflags)
+ target['makeflags'] = makeflags_package
+
+ if arches == set(['all']):
+ target['type'] = 'indep'
+ else:
+ target['type'] = 'arch'
+
+ for target in targets.values():
+ name = target['name']
+ ruleid = target['ruleid']
+ packages = target.get('packages', set())
+ packages_extra = target.get('packages_extra', set())
+ makeflags = target['makeflags']
+ ttype = target['type']
+
+ rule = '_'.join(ruleid + (name, ))
+ self.makefile.add_rules(f'setup_{rule}',
+ f'setup_{name}', makeflags, packages, packages_extra)
+ self.makefile.add_rules(f'build-{ttype}_{rule}',
+ f'build_{name}', makeflags, packages, packages_extra)
+ self.makefile.add_rules(f'binary-{ttype}_{rule}',
+ f'binary_{name}', makeflags, packages, packages_extra)
+
+ for i, j in self.__ruleid_deps(ruleid, name):
+ self.makefile.add_deps(f'setup{i}',
+ [f'setup{j}'])
+ self.makefile.add_deps(f'build-{ttype}{i}',
+ [f'build-{ttype}{j}'])
+ self.makefile.add_deps(f'binary-{ttype}{i}',
+ [f'binary-{ttype}{j}'])
+
+ def merge_build_depends(self) -> None:
+ # Merge Build-Depends pseudo-fields from binary packages into the
+ # source package
+ source = self.packages["source"]
+ arch_all = PackageArchitecture("all")
+ for name, package in self.packages.items():
+ if name == "source":
+ continue
+ dep = package.get("Build-Depends")
+ if not dep:
+ continue
+ del package["Build-Depends"]
+ if package["Architecture"] == arch_all:
+ dep_type = "Build-Depends-Indep"
+ else:
+ dep_type = "Build-Depends-Arch"
+ for group in dep:
+ for item in group:
+ if package["Architecture"] != arch_all and not item.arches:
+ item.arches = package["Architecture"]
+ if package.get("Build-Profiles") and not item.restrictions:
+ item.restrictions = package["Build-Profiles"]
+ source.setdefault(dep_type).merge(group)
+
+ def write(self) -> None:
+ self.write_control()
+ self.write_makefile()
+
+ def write_control(self) -> None:
+ with self.open('control') as f:
+ self.write_rfc822(f, self.packages.values())
+
+ def write_makefile(self) -> None:
+ with self.open('rules.gen') as f:
+ self.makefile.write(f)
+
+ def write_rfc822(self, f: IO, entries: Iterable) -> None:
+ for entry in entries:
+ for key, value in entry.items():
+ if value:
+ f.write(u"%s: %s\n" % (key, value))
+ f.write('\n')
+
+
+def iter_featuresets(config) -> Iterable[str]:
+ for featureset in config['base', ]['featuresets']:
+ if config.merge('base', None, featureset).get('enabled', True):
+ yield featureset
+
+
+def iter_arches(config) -> Iterable[str]:
+ return iter(config['base', ]['arches'])
+
+
+def iter_arch_featuresets(config, arch) -> Iterable[str]:
+ for featureset in config['base', arch].get('featuresets', []):
+ if config.merge('base', arch, featureset).get('enabled', True):
+ yield featureset
+
+
+def iter_flavours(config, arch, featureset) -> Iterable[str]:
+ return iter(config['base', arch, featureset]['flavours'])
+
+
+class Gencontrol(object):
+ vars: dict[str, str]
+ bundles: dict[str, PackagesBundle]
+
+ def __init__(self, config, templates, version=Version) -> None:
+ self.config, self.templates = config, templates
+ self.changelog = Changelog(version=version)
+ self.vars = {}
+ self.bundles = {'': PackagesBundle(None, templates)}
+
+ @property
+ def bundle(self) -> PackagesBundle:
+ return self.bundles['']
+
+ def __call__(self) -> None:
+ self.do_source()
+ self.do_main()
+ self.do_extra()
+
+ self.write()
+
+ def do_source(self) -> None:
+ source = self.templates.get_source_control("source.control", self.vars)[0]
+ if not source.get('Source'):
+ source['Source'] = self.changelog[0].source
+ self.bundle.packages['source'] = source
+
+ def do_main(self) -> None:
+ vars = self.vars.copy()
+
+ makeflags = MakeFlags()
+
+ self.do_main_setup(vars, makeflags)
+ self.do_main_makefile(makeflags)
+ self.do_main_packages(vars, makeflags)
+ self.do_main_recurse(vars, makeflags)
+
+ def do_main_setup(self, vars, makeflags) -> None:
+ pass
+
+ def do_main_makefile(self, makeflags) -> None:
+ pass
+
+ def do_main_packages(self, vars, makeflags) -> None:
+ pass
+
+ def do_main_recurse(self, vars, makeflags) -> None:
+ for featureset in iter_featuresets(self.config):
+ self.do_indep_featureset(featureset,
+ vars.copy(), makeflags.copy())
+ for arch in iter_arches(self.config):
+ self.do_arch(arch, vars.copy(),
+ makeflags.copy())
+
+ def do_extra(self) -> None:
+ try:
+ packages_extra = self.templates.get_control("extra.control", self.vars)
+ except KeyError:
+ return
+
+ extra_arches: dict[str, Any] = {}
+ for package in packages_extra:
+ arches = package['Architecture']
+ for arch in arches:
+ i = extra_arches.get(arch, [])
+ i.append(package)
+ extra_arches[arch] = i
+ for arch in sorted(extra_arches.keys()):
+ self.bundle.add_packages(packages_extra, (arch, ),
+ MakeFlags(), check_packages=False)
+
+ def do_indep_featureset(self, featureset, vars, makeflags) -> None:
+ vars['localversion'] = ''
+ if featureset != 'none':
+ vars['localversion'] = '-' + featureset
+
+ self.do_indep_featureset_setup(vars, makeflags, featureset)
+ self.do_indep_featureset_makefile(featureset, makeflags)
+ self.do_indep_featureset_packages(featureset,
+ vars, makeflags)
+
+ def do_indep_featureset_setup(self, vars, makeflags, featureset) -> None:
+ pass
+
+ def do_indep_featureset_makefile(self, featureset, makeflags) -> None:
+ makeflags['FEATURESET'] = featureset
+
+ def do_indep_featureset_packages(self, featureset, vars, makeflags) -> None:
+ pass
+
+ def do_arch(self, arch, vars, makeflags) -> None:
+ vars['arch'] = arch
+
+ self.do_arch_setup(vars, makeflags, arch)
+ self.do_arch_makefile(arch, makeflags)
+ self.do_arch_packages(arch, vars, makeflags)
+ self.do_arch_recurse(arch, vars, makeflags)
+
+ def do_arch_setup(self, vars, makeflags, arch) -> None:
+ pass
+
+ def do_arch_makefile(self, arch, makeflags) -> None:
+ makeflags['ARCH'] = arch
+
+ def do_arch_packages(self, arch, vars, makeflags) -> None:
+ pass
+
+ def do_arch_recurse(self, arch, vars, makeflags) -> None:
+ for featureset in iter_arch_featuresets(self.config, arch):
+ self.do_featureset(arch, featureset,
+ vars.copy(), makeflags.copy())
+
+ def do_featureset(self, arch, featureset, vars, makeflags) -> None:
+ vars['localversion'] = ''
+ if featureset != 'none':
+ vars['localversion'] = '-' + featureset
+
+ self.do_featureset_setup(vars, makeflags, arch, featureset)
+ self.do_featureset_makefile(arch, featureset, makeflags)
+ self.do_featureset_packages(arch, featureset, vars, makeflags)
+ self.do_featureset_recurse(arch, featureset, vars, makeflags)
+
+ def do_featureset_setup(self, vars, makeflags, arch, featureset) -> None:
+ pass
+
+ def do_featureset_makefile(self, arch, featureset, makeflags) -> None:
+ makeflags['FEATURESET'] = featureset
+
+ def do_featureset_packages(self, arch, featureset, vars, makeflags) -> None:
+ pass
+
+ def do_featureset_recurse(self, arch, featureset, vars, makeflags) -> None:
+ for flavour in iter_flavours(self.config, arch, featureset):
+ self.do_flavour(arch, featureset, flavour,
+ vars.copy(), makeflags.copy())
+
+ def do_flavour(self, arch, featureset, flavour, vars,
+ makeflags):
+ vars['localversion'] += '-' + flavour
+
+ self.do_flavour_setup(vars, makeflags, arch, featureset, flavour)
+ self.do_flavour_makefile(arch, featureset, flavour, makeflags)
+ self.do_flavour_packages(arch, featureset, flavour,
+ vars, makeflags)
+
+ def do_flavour_setup(self, vars, makeflags, arch, featureset, flavour) -> None:
+ for i in (
+ ('kernel-arch', 'KERNEL_ARCH'),
+ ('localversion', 'LOCALVERSION'),
+ ):
+ if i[0] in vars:
+ makeflags[i[1]] = vars[i[0]]
+
+ def do_flavour_makefile(self, arch, featureset, flavour, makeflags) -> None:
+ makeflags['FLAVOUR'] = flavour
+
+ def do_flavour_packages(self, arch, featureset, flavour, vars, makeflags) -> None:
+ pass
+
+ def substitute(self, s: str | list | tuple, vars) -> str | list:
+ if isinstance(s, (list, tuple)):
+ return [self.substitute(i, vars) for i in s]
+
+ def subst(match) -> str:
+ return vars[match.group(1)]
+
+ return re.sub(r'@([-_a-z0-9]+)@', subst, str(s))
+
+ def write(self) -> None:
+ for bundle in self.bundles.values():
+ bundle.extract_makefile()
+ bundle.merge_build_depends()
+ bundle.write()
+
+
+def merge_packages(packages, new, arch) -> None:
+ for new_package in new:
+ name = new_package['Package']
+ if name in packages:
+ package = packages.get(name)
+ package['Architecture'].add(arch)
+
+ for field in ('Depends', 'Provides', 'Suggests', 'Recommends',
+ 'Conflicts'):
+ if field in new_package:
+ if field in package:
+ v = package[field]
+ v.extend(new_package[field])
+ else:
+ package[field] = new_package[field]
+
+ else:
+ new_package['Architecture'] = arch
+ packages.append(new_package)
diff --git a/debian/lib/python/debian_linux/kconfig.py b/debian/lib/python/debian_linux/kconfig.py
new file mode 100644
index 0000000000..291ccd8439
--- /dev/null
+++ b/debian/lib/python/debian_linux/kconfig.py
@@ -0,0 +1,93 @@
+from typing import Iterable
+from collections import OrderedDict
+
+__all__ = (
+ "KconfigFile",
+)
+
+
+class KConfigEntry(object):
+ __slots__ = 'name', 'value', 'comments'
+
+ def __init__(self, name, value, comments=None) -> None:
+ self.name, self.value = name, value
+ self.comments = comments or []
+
+ def __eq__(self, other) -> bool:
+ return self.name == other.name and self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.name) | hash(self.value)
+
+ def __repr__(self) -> str:
+ return ('<{}({!r}, {!r}, {!r})>'
+ .format(self.__class__.__name__, self.name, self.value,
+ self.comments))
+
+ def __str__(self) -> str:
+ return 'CONFIG_{}={}'.format(self.name, self.value)
+
+ def write(self) -> Iterable[str]:
+ for comment in self.comments:
+ yield '#. ' + comment
+ yield str(self)
+
+
+class KConfigEntryTristate(KConfigEntry):
+ __slots__ = ()
+
+ VALUE_NO = False
+ VALUE_YES = True
+ VALUE_MOD = object()
+
+ def __init__(self, name, value, comments=None) -> None:
+ if value == 'n' or value is None:
+ value = self.VALUE_NO
+ elif value == 'y':
+ value = self.VALUE_YES
+ elif value == 'm':
+ value = self.VALUE_MOD
+ else:
+ raise NotImplementedError
+ super(KConfigEntryTristate, self).__init__(name, value, comments)
+
+ def __str__(self) -> str:
+ if self.value is self.VALUE_MOD:
+ return 'CONFIG_{}=m'.format(self.name)
+ if self.value:
+ return 'CONFIG_{}=y'.format(self.name)
+ return '# CONFIG_{} is not set'.format(self.name)
+
+
+class KconfigFile(OrderedDict[str, KConfigEntry]):
+ def __str__(self) -> str:
+ ret = []
+ for i in self.str_iter():
+ ret.append(i)
+ return '\n'.join(ret) + '\n'
+
+ def read(self, f) -> None:
+ for line in iter(f.readlines()):
+ line = line.strip()
+ if line.startswith("CONFIG_"):
+ i = line.find('=')
+ option = line[7:i]
+ value = line[i + 1:]
+ self.set(option, value)
+ elif line.startswith("# CONFIG_"):
+ option = line[9:-11]
+ self.set(option, 'n')
+ elif line.startswith("#") or not line:
+ pass
+ else:
+ raise RuntimeError("Can't recognize %s" % line)
+
+ def set(self, key, value) -> None:
+ if value in ('y', 'm', 'n'):
+ self[key] = KConfigEntryTristate(key, value)
+ else:
+ self[key] = KConfigEntry(key, value)
+
+ def str_iter(self) -> Iterable[str]:
+ for key, value in self.items():
+ yield str(value)
diff --git a/debian/lib/python/debian_linux/test_debian.py b/debian/lib/python/debian_linux/test_debian.py
new file mode 100644
index 0000000000..06133dc46e
--- /dev/null
+++ b/debian/lib/python/debian_linux/test_debian.py
@@ -0,0 +1,424 @@
+import pytest
+
+from .debian import (
+ Version,
+ VersionLinux,
+ PackageArchitecture,
+ PackageDescription,
+ PackageRelationEntry,
+ PackageRelationGroup,
+ PackageRelation,
+ PackageBuildprofileEntry,
+ PackageBuildprofile,
+)
+
+
+class TestVersion:
+ def test_native(self) -> None:
+ v = Version('1.2+c~4')
+ assert v.epoch is None
+ assert v.upstream == '1.2+c~4'
+ assert v.revision is None
+ assert v.complete == '1.2+c~4'
+ assert v.complete_noepoch == '1.2+c~4'
+
+ def test_nonnative(self) -> None:
+ v = Version('1-2+d~3')
+ assert v.epoch is None
+ assert v.upstream == '1'
+ assert v.revision == '2+d~3'
+ assert v.complete == '1-2+d~3'
+ assert v.complete_noepoch == '1-2+d~3'
+
+ def test_native_epoch(self) -> None:
+ v = Version('5:1.2.3')
+ assert v.epoch == 5
+ assert v.upstream == '1.2.3'
+ assert v.revision is None
+ assert v.complete == '5:1.2.3'
+ assert v.complete_noepoch == '1.2.3'
+
+ def test_nonnative_epoch(self) -> None:
+ v = Version('5:1.2.3-4')
+ assert v.epoch == 5
+ assert v.upstream == '1.2.3'
+ assert v.revision == '4'
+ assert v.complete == '5:1.2.3-4'
+ assert v.complete_noepoch == '1.2.3-4'
+
+ def test_multi_hyphen(self) -> None:
+ v = Version('1-2-3')
+ assert v.epoch is None
+ assert v.upstream == '1-2'
+ assert v.revision == '3'
+ assert v.complete == '1-2-3'
+
+ def test_multi_colon(self) -> None:
+ v = Version('1:2:3')
+ assert v.epoch == 1
+ assert v.upstream == '2:3'
+ assert v.revision is None
+
+ def test_invalid_epoch(self) -> None:
+ with pytest.raises(RuntimeError):
+ Version('a:1')
+ with pytest.raises(RuntimeError):
+ Version('-1:1')
+ with pytest.raises(RuntimeError):
+ Version('1a:1')
+
+ def test_invalid_upstream(self) -> None:
+ with pytest.raises(RuntimeError):
+ Version('1_2')
+ with pytest.raises(RuntimeError):
+ Version('1/2')
+ with pytest.raises(RuntimeError):
+ Version('a1')
+ with pytest.raises(RuntimeError):
+ Version('1 2')
+
+ def test_invalid_revision(self) -> None:
+ with pytest.raises(RuntimeError):
+ Version('1-2_3')
+ with pytest.raises(RuntimeError):
+ Version('1-2/3')
+ with pytest.raises(RuntimeError):
+ Version('1-2:3')
+
+
+class TestVersionLinux:
+ def test_stable(self) -> None:
+ v = VersionLinux('1.2.3-4')
+ assert v.linux_version == '1.2'
+ assert v.linux_upstream == '1.2'
+ assert v.linux_upstream_full == '1.2.3'
+ assert v.linux_modifier is None
+ assert v.linux_dfsg is None
+ assert not v.linux_revision_experimental
+ assert not v.linux_revision_security
+ assert not v.linux_revision_backports
+ assert not v.linux_revision_other
+
+ def test_rc(self) -> None:
+ v = VersionLinux('1.2~rc3-4')
+ assert v.linux_version == '1.2'
+ assert v.linux_upstream == '1.2-rc3'
+ assert v.linux_upstream_full == '1.2-rc3'
+ assert v.linux_modifier == 'rc3'
+ assert v.linux_dfsg is None
+ assert not v.linux_revision_experimental
+ assert not v.linux_revision_security
+ assert not v.linux_revision_backports
+ assert not v.linux_revision_other
+
+ def test_dfsg(self) -> None:
+ v = VersionLinux('1.2~rc3.dfsg.1-4')
+ assert v.linux_version == '1.2'
+ assert v.linux_upstream == '1.2-rc3'
+ assert v.linux_upstream_full == '1.2-rc3'
+ assert v.linux_modifier == 'rc3'
+ assert v.linux_dfsg == '1'
+ assert not v.linux_revision_experimental
+ assert not v.linux_revision_security
+ assert not v.linux_revision_backports
+ assert not v.linux_revision_other
+
+ def test_experimental(self) -> None:
+ v = VersionLinux('1.2~rc3-4~exp5')
+ assert v.linux_upstream_full == '1.2-rc3'
+ assert v.linux_revision_experimental
+ assert not v.linux_revision_security
+ assert not v.linux_revision_backports
+ assert not v.linux_revision_other
+
+ def test_security(self) -> None:
+ v = VersionLinux('1.2.3-4+deb10u1')
+ assert v.linux_upstream_full == '1.2.3'
+ assert not v.linux_revision_experimental
+ assert v.linux_revision_security
+ assert not v.linux_revision_backports
+ assert not v.linux_revision_other
+
+ def test_backports(self) -> None:
+ v = VersionLinux('1.2.3-4~bpo9+10')
+ assert v.linux_upstream_full == '1.2.3'
+ assert not v.linux_revision_experimental
+ assert not v.linux_revision_security
+ assert v.linux_revision_backports
+ assert not v.linux_revision_other
+
+ def test_security_backports(self) -> None:
+ v = VersionLinux('1.2.3-4+deb10u1~bpo9+10')
+ assert v.linux_upstream_full == '1.2.3'
+ assert not v.linux_revision_experimental
+ assert v.linux_revision_security
+ assert v.linux_revision_backports
+ assert not v.linux_revision_other
+
+ def test_lts_backports(self) -> None:
+ # Backport during LTS, as an extra package in the -security
+ # suite. Since this is not part of a -backports suite it
+ # shouldn't get the linux_revision_backports flag.
+ v = VersionLinux('1.2.3-4~deb9u10')
+ assert v.linux_upstream_full == '1.2.3'
+ assert not v.linux_revision_experimental
+ assert v.linux_revision_security
+ assert not v.linux_revision_backports
+ assert not v.linux_revision_other
+
+ def test_lts_backports_2(self) -> None:
+ # Same but with two security extensions in the revision.
+ v = VersionLinux('1.2.3-4+deb10u1~deb9u10')
+ assert v.linux_upstream_full == '1.2.3'
+ assert not v.linux_revision_experimental
+ assert v.linux_revision_security
+ assert not v.linux_revision_backports
+ assert not v.linux_revision_other
+
+ def test_binnmu(self) -> None:
+ v = VersionLinux('1.2.3-4+b1')
+ assert not v.linux_revision_experimental
+ assert not v.linux_revision_security
+ assert not v.linux_revision_backports
+ assert not v.linux_revision_other
+
+ def test_other_revision(self) -> None:
+ v = VersionLinux('4.16.5-1+revert+crng+ready') # from #898087
+ assert not v.linux_revision_experimental
+ assert not v.linux_revision_security
+ assert not v.linux_revision_backports
+ assert v.linux_revision_other
+
+ def test_other_revision_binnmu(self) -> None:
+ v = VersionLinux('4.16.5-1+revert+crng+ready+b1')
+ assert not v.linux_revision_experimental
+ assert not v.linux_revision_security
+ assert not v.linux_revision_backports
+ assert v.linux_revision_other
+
+
+class TestPackageArchitecture:
+ def test_init(self) -> None:
+ a = PackageArchitecture()
+ assert a == set()
+
+ def test_init_str(self) -> None:
+ a = PackageArchitecture(' foo bar\tbaz ')
+ assert a == {'foo', 'bar', 'baz'}
+
+ def test_init_iter(self) -> None:
+ a = PackageArchitecture(('foo', 'bar'))
+ assert a == {'foo', 'bar'}
+
+ def test_init_self(self) -> None:
+ a = PackageArchitecture(PackageArchitecture(('foo', 'bar')))
+ assert a == {'foo', 'bar'}
+
+ def test_str(self) -> None:
+ a = PackageArchitecture(('foo', 'bar'))
+ assert str(a) == 'bar foo'
+
+
+class TestPackageDescription:
+ def test_init(self) -> None:
+ a = PackageDescription()
+ assert a.short == []
+ assert a.long == []
+
+ def test_init_str(self) -> None:
+ a = PackageDescription('Short\nLong1\n.\nLong2')
+ assert a.short == ['Short']
+ assert a.long == ['Long1', 'Long2']
+
+ def test_init_self(self) -> None:
+ a = PackageDescription(PackageDescription('Short\nLong1\n.\nLong2'))
+ assert a.short == ['Short']
+ assert a.long == ['Long1', 'Long2']
+
+ def test_str(self) -> None:
+ a = PackageDescription('Short\nLong1\n.\nLong2')
+ assert str(a) == 'Short\n Long1\n .\n Long2'
+
+
+class TestPackageRelationEntry:
+ def test_init_str(self) -> None:
+ a = PackageRelationEntry('package (>=version) [arch2 arch1] <profile1 >')
+ assert a.name == 'package'
+ assert a.version == 'version'
+ assert a.arches == {'arch1', 'arch2'}
+ # TODO: assert a.profiles
+ assert str(a) == 'package (>= version) [arch1 arch2] <profile1>'
+
+ def test_init_self(self) -> None:
+ a = PackageRelationEntry(PackageRelationEntry('package [arch2 arch1]'))
+ assert a.name == 'package'
+ assert a.arches == {'arch1', 'arch2'}
+ assert str(a) == 'package [arch1 arch2]'
+
+
+class TestPackageRelationGroup:
+ def test_init(self) -> None:
+ a = PackageRelationGroup()
+ assert a == []
+
+ def test_init_str(self) -> None:
+ a = PackageRelationGroup('foo | bar')
+ assert len(a) == 2
+ assert a[0].name == 'foo'
+ assert a[1].name == 'bar'
+
+ def test_init_iter_entry(self) -> None:
+ a = PackageRelationGroup((PackageRelationEntry('foo'), PackageRelationEntry('bar')))
+ assert len(a) == 2
+ assert a[0].name == 'foo'
+ assert a[1].name == 'bar'
+
+ def test_init_iter_str(self) -> None:
+ a = PackageRelationGroup(('foo', 'bar'))
+ assert len(a) == 2
+ assert a[0].name == 'foo'
+ assert a[1].name == 'bar'
+
+ def test_init_self(self) -> None:
+ a = PackageRelationGroup(PackageRelationGroup(['foo', 'bar']))
+ assert len(a) == 2
+ assert a[0].name == 'foo'
+ assert a[1].name == 'bar'
+
+ def test_str(self) -> None:
+ a = PackageRelationGroup('foo| bar')
+ assert str(a) == 'foo | bar'
+
+
+class TestPackageRelation:
+ def test_init(self) -> None:
+ a = PackageRelation()
+ assert a == []
+
+ def test_init_str(self) -> None:
+ a = PackageRelation('foo1 | foo2, bar')
+ assert len(a) == 2
+ assert len(a[0]) == 2
+ assert a[0][0].name == 'foo1'
+ assert a[0][1].name == 'foo2'
+ assert len(a[1]) == 1
+ assert a[1][0].name == 'bar'
+
+ def test_init_iter_entry(self) -> None:
+ a = PackageRelation([[PackageRelationEntry('foo')], [PackageRelationEntry('bar')]])
+ assert len(a) == 2
+ assert len(a[0]) == 1
+ assert a[0][0].name == 'foo'
+ assert len(a[1]) == 1
+ assert a[1][0].name == 'bar'
+
+ def test_init_iter_str(self) -> None:
+ a = PackageRelation(('foo', 'bar'))
+ assert len(a) == 2
+ assert len(a[0]) == 1
+ assert a[0][0].name == 'foo'
+ assert len(a[1]) == 1
+ assert a[1][0].name == 'bar'
+
+ def test_init_self(self) -> None:
+ a = PackageRelation(PackageRelation(('foo', 'bar')))
+ assert len(a) == 2
+ assert len(a[0]) == 1
+ assert a[0][0].name == 'foo'
+ assert len(a[1]) == 1
+ assert a[1][0].name == 'bar'
+
+ def test_str(self) -> None:
+ a = PackageRelation('foo ,bar')
+ assert str(a) == 'foo, bar'
+
+
+class TestPackageBuildprofileEntry:
+ def test_parse(self) -> None:
+ a = PackageBuildprofileEntry.parse('<profile1 !profile2 profile3 !profile4>')
+ assert a.pos == {'profile1', 'profile3'}
+ assert a.neg == {'profile2', 'profile4'}
+ assert str(a) == '<profile1 profile3 !profile2 !profile4>'
+
+ def test_eq(self) -> None:
+ a = PackageBuildprofileEntry.parse('<profile1 !profile2>')
+ b = PackageBuildprofileEntry(pos={'profile1'}, neg={'profile2'})
+ assert a == b
+
+ def test_isdisjoint(self) -> None:
+ a = PackageBuildprofileEntry.parse('<profile1 profile2>')
+ b = PackageBuildprofileEntry.parse('<profile1 profile3>')
+ assert a.isdisjoint(b)
+
+ def test_issubset_empty(self) -> None:
+ a = PackageBuildprofileEntry.parse('<profile1 profile2>')
+ b = PackageBuildprofileEntry()
+ assert a.issubset(b)
+
+ def test_issubset_pos(self) -> None:
+ a = PackageBuildprofileEntry.parse('<profile1 profile2>')
+ b = PackageBuildprofileEntry.parse('<profile1>')
+ assert a.issubset(b)
+
+ def test_issubset_neg(self) -> None:
+ a = PackageBuildprofileEntry.parse('<!profile1>')
+ b = PackageBuildprofileEntry.parse('<!profile1 !profile2>')
+ assert a.issubset(b)
+
+ def test_issubset_both(self) -> None:
+ a = PackageBuildprofileEntry.parse('<!profile1 !profile2 profile3>')
+ b = PackageBuildprofileEntry.parse('<!profile1 !profile2 !profile3>')
+ assert a.issubset(b)
+
+ def test_issuperset_empty(self) -> None:
+ a = PackageBuildprofileEntry.parse('<profile1 profile2>')
+ b = PackageBuildprofileEntry()
+ assert b.issuperset(a)
+
+ def test_issuperset_pos(self) -> None:
+ a = PackageBuildprofileEntry.parse('<profile1 profile2>')
+ b = PackageBuildprofileEntry.parse('<profile1>')
+ assert b.issuperset(a)
+
+ def test_issuperset_neg(self) -> None:
+ a = PackageBuildprofileEntry.parse('<!profile1>')
+ b = PackageBuildprofileEntry.parse('<!profile1 !profile2>')
+ assert b.issuperset(a)
+
+ def test_issuperset_both(self) -> None:
+ a = PackageBuildprofileEntry.parse('<!profile1 !profile2 profile3>')
+ b = PackageBuildprofileEntry.parse('<!profile1 !profile2 !profile3>')
+ assert b.issuperset(a)
+
+ def test_update_pos(self) -> None:
+ a = PackageBuildprofileEntry.parse('<profile1 profile2>')
+ b = PackageBuildprofileEntry.parse('<profile1>')
+ a.update(b)
+ assert a.pos == {'profile1'}
+ assert a.neg == set()
+
+ def test_update_neg(self) -> None:
+ a = PackageBuildprofileEntry.parse('<!profile1 !profile2>')
+ b = PackageBuildprofileEntry.parse('<!profile1>')
+ a.update(b)
+ assert a.pos == set()
+ assert a.neg == {'profile1'}
+
+ def test_update_both(self) -> None:
+ a = PackageBuildprofileEntry.parse('<profile1 !profile2 profile3>')
+ b = PackageBuildprofileEntry.parse('<profile1 !profile2 !profile3>')
+ a.update(b)
+ assert a.pos == {'profile1'}
+ assert a.neg == {'profile2'}
+
+
+class TestPackageBuildprofile:
+ def test_parse(self) -> None:
+ a = PackageBuildprofile.parse('<profile1> <!profile2> <profile3> <!profile4>')
+ assert str(a) == '<profile1> <!profile2> <profile3> <!profile4>'
+
+ def test_update(self) -> None:
+ a = PackageBuildprofile.parse('<profile1 profile2> <profile2>')
+ b = PackageBuildprofile.parse('<profile1> <profile2 !profile3> <profile3>')
+ a.update(b)
+ assert str(a) == '<profile1> <profile2> <profile3>'
diff --git a/debian/lib/python/debian_linux/utils.py b/debian/lib/python/debian_linux/utils.py
new file mode 100644
index 0000000000..0c6569b5a0
--- /dev/null
+++ b/debian/lib/python/debian_linux/utils.py
@@ -0,0 +1,76 @@
+import io
+import os
+import re
+import textwrap
+import typing
+
+import jinja2
+
+from .debian import SourcePackage, BinaryPackage, TestsControl
+
+
+class Templates(object):
+ dirs: list[str]
+ _cache: dict[str, str]
+ _jinja2: jinja2.Environment
+
+ def __init__(self, dirs: list[str] = ["debian/templates"]) -> None:
+ self.dirs = dirs
+
+ self._cache = {}
+ self._jinja2 = jinja2.Environment(
+ # autoescape uses HTML safe escaping, which does not help us
+ autoescape=False,
+ keep_trailing_newline=True,
+ trim_blocks=True,
+ undefined=jinja2.StrictUndefined,
+ )
+
+ def _read(self, name: str) -> typing.Any:
+ pkgid, name = name.rsplit('.', 1)
+
+ for suffix in ['.j2', '.in', '']:
+ for dir in self.dirs:
+ filename = "%s/%s.%s%s" % (dir, pkgid, name, suffix)
+ if os.path.exists(filename):
+ with open(filename, 'r', encoding='utf-8') as f:
+ return (f.read(), suffix)
+
+ raise KeyError(name)
+
+ def _get(self, key: str) -> typing.Any:
+ try:
+ return self._cache[key]
+ except KeyError:
+ self._cache[key] = value = self._read(key)
+ return value
+
+ def get(self, key: str, context: dict[str, str] = {}) -> str:
+ value = self._get(key)
+ suffix = value[1]
+
+ if context:
+ if suffix == '.in':
+ def subst(match):
+ return context[match.group(1)]
+ return re.sub(r'@([-_a-z0-9]+)@', subst, str(value[0]))
+
+ elif suffix == '.j2':
+ return self._jinja2.from_string(value[0]).render(context)
+
+ return value[0]
+
+ def get_control(self, key: str, context: dict[str, str] = {}) -> BinaryPackage:
+ return BinaryPackage.read_rfc822(io.StringIO(self.get(key, context)))
+
+ def get_source_control(self, key: str, context: dict[str, str] = {}) -> SourcePackage:
+ return SourcePackage.read_rfc822(io.StringIO(self.get(key, context)))
+
+ def get_tests_control(self, key: str, context: dict[str, str] = {}) -> TestsControl:
+ return TestsControl.read_rfc822(io.StringIO(self.get(key, context)))
+
+
+class TextWrapper(textwrap.TextWrapper):
+ wordsep_re = re.compile(
+ r'(\s+|' # any whitespace
+ r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash