Adding debian version 6.12.33-1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
This commit is contained in:
parent
79d69e5050
commit
09ce90cf76
1602 changed files with 165255 additions and 0 deletions
1
debian/lib/python/debian_linux/__init__.py
vendored
Normal file
1
debian/lib/python/debian_linux/__init__.py
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
# Module
|
45
debian/lib/python/debian_linux/abi.py
vendored
Normal file
45
debian/lib/python/debian_linux/abi.py
vendored
Normal file
|
@ -0,0 +1,45 @@
|
|||
class Symbol(object):
|
||||
def __init__(self, name, namespace, module, version, export):
|
||||
self.name, self.namespace, self.module = name, namespace, module
|
||||
self.version, self.export = version, export
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Symbol):
|
||||
return NotImplemented
|
||||
|
||||
# Symbols are resolved to modules by depmod at installation/
|
||||
# upgrade time, not compile time, so moving a symbol between
|
||||
# modules is not an ABI change. Compare everything else.
|
||||
if self.name != other.name:
|
||||
return False
|
||||
if self.namespace != other.namespace:
|
||||
return False
|
||||
if self.version != other.version:
|
||||
return False
|
||||
if self.export != other.export:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def __ne__(self, other):
|
||||
ret = self.__eq__(other)
|
||||
if ret is NotImplemented:
|
||||
return ret
|
||||
return not ret
|
||||
|
||||
|
||||
class Symbols(dict):
|
||||
def __init__(self, file=None):
|
||||
if file:
|
||||
self.read(file)
|
||||
|
||||
def read(self, file):
|
||||
for line in file:
|
||||
version, name, module, export, namespace = \
|
||||
line.strip('\r\n').split('\t')
|
||||
self[name] = Symbol(name, namespace, module, version, export)
|
||||
|
||||
def write(self, file):
|
||||
for s in sorted(self.values(), key=lambda i: i.name):
|
||||
file.write("%s\t%s\t%s\t%s\t%s\n" %
|
||||
(s.version, s.name, s.module, s.export, s.namespace))
|
257
debian/lib/python/debian_linux/config.py
vendored
Normal file
257
debian/lib/python/debian_linux/config.py
vendored
Normal file
|
@ -0,0 +1,257 @@
|
|||
import collections
|
||||
import os
|
||||
import os.path
|
||||
import pickle
|
||||
import re
|
||||
import sys
|
||||
|
||||
from configparser import RawConfigParser
|
||||
|
||||
__all__ = [
|
||||
'ConfigCoreDump',
|
||||
'ConfigCoreHierarchy',
|
||||
'ConfigParser',
|
||||
]
|
||||
|
||||
|
||||
class SchemaItemBoolean(object):
|
||||
def __call__(self, i):
|
||||
i = i.strip().lower()
|
||||
if i in ("true", "1"):
|
||||
return True
|
||||
if i in ("false", "0"):
|
||||
return False
|
||||
raise ValueError
|
||||
|
||||
|
||||
class SchemaItemInteger(object):
|
||||
def __call__(self, i):
|
||||
return int(i.strip(), 0)
|
||||
|
||||
|
||||
class SchemaItemList(object):
|
||||
def __init__(self, type=r"\s+"):
|
||||
self.type = type
|
||||
|
||||
def __call__(self, i):
|
||||
i = i.strip()
|
||||
if not i:
|
||||
return []
|
||||
return [j.strip() for j in re.split(self.type, i)]
|
||||
|
||||
|
||||
# Using OrderedDict instead of dict makes the pickled config reproducible
|
||||
class ConfigCore(collections.OrderedDict):
|
||||
def get_merge(self, section, arch, featureset, flavour, key, default=None):
|
||||
temp = []
|
||||
|
||||
if arch and featureset and flavour:
|
||||
temp.append(self.get((section, arch, featureset, flavour), {})
|
||||
.get(key))
|
||||
temp.append(self.get((section, arch, None, flavour), {}).get(key))
|
||||
if arch and featureset:
|
||||
temp.append(self.get((section, arch, featureset), {}).get(key))
|
||||
if arch:
|
||||
temp.append(self.get((section, arch), {}).get(key))
|
||||
if featureset:
|
||||
temp.append(self.get((section, None, featureset), {}).get(key))
|
||||
temp.append(self.get((section,), {}).get(key))
|
||||
|
||||
ret = []
|
||||
|
||||
for i in temp:
|
||||
if i is None:
|
||||
continue
|
||||
elif isinstance(i, (list, tuple)):
|
||||
ret.extend(i)
|
||||
elif ret:
|
||||
# TODO
|
||||
return ret
|
||||
else:
|
||||
return i
|
||||
|
||||
return ret or default
|
||||
|
||||
def merge(self, section, arch=None, featureset=None, flavour=None):
|
||||
ret = {}
|
||||
ret.update(self.get((section,), {}))
|
||||
if featureset:
|
||||
ret.update(self.get((section, None, featureset), {}))
|
||||
if arch:
|
||||
ret.update(self.get((section, arch), {}))
|
||||
if arch and featureset:
|
||||
ret.update(self.get((section, arch, featureset), {}))
|
||||
if arch and featureset and flavour:
|
||||
ret.update(self.get((section, arch, None, flavour), {}))
|
||||
ret.update(self.get((section, arch, featureset, flavour), {}))
|
||||
return ret
|
||||
|
||||
def dump(self, fp):
|
||||
pickle.dump(self, fp, 0)
|
||||
|
||||
|
||||
class ConfigCoreDump(object):
|
||||
def __new__(self, fp):
|
||||
return pickle.load(fp)
|
||||
|
||||
|
||||
class ConfigCoreHierarchy(object):
|
||||
schema_base = {
|
||||
'base': {
|
||||
'arches': SchemaItemList(),
|
||||
'enabled': SchemaItemBoolean(),
|
||||
'featuresets': SchemaItemList(),
|
||||
'flavours': SchemaItemList(),
|
||||
},
|
||||
}
|
||||
|
||||
def __new__(cls, schema, dirs=[]):
|
||||
schema_complete = cls.schema_base.copy()
|
||||
for key, value in schema.items():
|
||||
schema_complete.setdefault(key, {}).update(value)
|
||||
return cls.Reader(dirs, schema_complete)()
|
||||
|
||||
class Reader(object):
|
||||
config_name = "defines"
|
||||
|
||||
def __init__(self, dirs, schema):
|
||||
self.dirs, self.schema = dirs, schema
|
||||
|
||||
def __call__(self):
|
||||
ret = ConfigCore()
|
||||
self.read(ret)
|
||||
return ret
|
||||
|
||||
def get_files(self, *dirs):
|
||||
dirs = list(dirs)
|
||||
dirs.append(self.config_name)
|
||||
return (os.path.join(i, *dirs) for i in self.dirs if i)
|
||||
|
||||
def read_arch(self, ret, arch):
|
||||
config = ConfigParser(self.schema)
|
||||
config.read(self.get_files(arch))
|
||||
|
||||
featuresets = config['base', ].get('featuresets', [])
|
||||
flavours = config['base', ].get('flavours', [])
|
||||
|
||||
for section in iter(config):
|
||||
if section[0] in featuresets:
|
||||
real = (section[-1], arch, section[0])
|
||||
elif len(section) > 1:
|
||||
real = (section[-1], arch, None) + section[:-1]
|
||||
else:
|
||||
real = (section[-1], arch) + section[:-1]
|
||||
s = ret.get(real, {})
|
||||
s.update(config[section])
|
||||
ret[tuple(real)] = s
|
||||
|
||||
for featureset in featuresets:
|
||||
self.read_arch_featureset(ret, arch, featureset)
|
||||
|
||||
if flavours:
|
||||
base = ret['base', arch]
|
||||
featuresets.insert(0, 'none')
|
||||
base['featuresets'] = featuresets
|
||||
del base['flavours']
|
||||
ret['base', arch] = base
|
||||
ret['base', arch, 'none'] = {'flavours': flavours,
|
||||
'implicit-flavour': True}
|
||||
|
||||
def read_arch_featureset(self, ret, arch, featureset):
|
||||
config = ConfigParser(self.schema)
|
||||
config.read(self.get_files(arch, featureset))
|
||||
|
||||
for section in iter(config):
|
||||
real = (section[-1], arch, featureset) + section[:-1]
|
||||
s = ret.get(real, {})
|
||||
s.update(config[section])
|
||||
ret[tuple(real)] = s
|
||||
|
||||
def read(self, ret):
|
||||
config = ConfigParser(self.schema)
|
||||
config.read(self.get_files())
|
||||
|
||||
arches = config['base', ]['arches']
|
||||
featuresets = config['base', ].get('featuresets', [])
|
||||
|
||||
for section in iter(config):
|
||||
if section[0].startswith('featureset-'):
|
||||
real = (section[-1], None, section[0][11:])
|
||||
else:
|
||||
real = (section[-1],) + section[1:]
|
||||
ret[real] = config[section]
|
||||
|
||||
for arch in arches:
|
||||
self.read_arch(ret, arch)
|
||||
for featureset in featuresets:
|
||||
self.read_featureset(ret, featureset)
|
||||
|
||||
def read_featureset(self, ret, featureset):
|
||||
config = ConfigParser(self.schema)
|
||||
config.read(self.get_files('featureset-%s' % featureset))
|
||||
|
||||
for section in iter(config):
|
||||
real = (section[-1], None, featureset)
|
||||
s = ret.get(real, {})
|
||||
s.update(config[section])
|
||||
ret[real] = s
|
||||
|
||||
|
||||
class ConfigParser(object):
|
||||
__slots__ = '_config', 'schemas'
|
||||
|
||||
def __init__(self, schemas):
|
||||
self.schemas = schemas
|
||||
|
||||
self._config = RawConfigParser()
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._convert()[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._convert())
|
||||
|
||||
def __str__(self):
|
||||
return '<%s(%s)>' % (self.__class__.__name__, self._convert())
|
||||
|
||||
def _convert(self):
|
||||
ret = {}
|
||||
for section in self._config.sections():
|
||||
data = {}
|
||||
for key, value in self._config.items(section):
|
||||
data[key] = value
|
||||
section_list = section.split('_')
|
||||
section_base = section_list[-1]
|
||||
if section_base in self.schemas:
|
||||
section_ret = tuple(section_list)
|
||||
data = self._convert_one(self.schemas[section_base], data)
|
||||
else:
|
||||
section_ret = (section, )
|
||||
ret[section_ret] = data
|
||||
return ret
|
||||
|
||||
def _convert_one(self, schema, data):
|
||||
ret = {}
|
||||
for key, value in data.items():
|
||||
value = value.replace('\n', ' ')
|
||||
if key in schema:
|
||||
value = schema[key](value)
|
||||
ret[key] = value
|
||||
return ret
|
||||
|
||||
def keys(self):
|
||||
return self._convert().keys()
|
||||
|
||||
def read(self, data):
|
||||
return self._config.read(data)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.path.append('debian/lib/python')
|
||||
config = ConfigCoreDump(open('debian/config.defines.dump', 'rb'))
|
||||
for section, items in sorted(config.items(),
|
||||
key=(lambda a: tuple(i or '' for i in a[0]))):
|
||||
print(u"[%s]" % (section,))
|
||||
for item, value in sorted(items.items()):
|
||||
print(u"%s: %s" % (item, value))
|
||||
print()
|
654
debian/lib/python/debian_linux/config_v2.py
vendored
Normal file
654
debian/lib/python/debian_linux/config_v2.py
vendored
Normal file
|
@ -0,0 +1,654 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import functools
|
||||
import re
|
||||
import subprocess
|
||||
import tomllib
|
||||
from collections.abc import (
|
||||
Iterable,
|
||||
)
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Optional,
|
||||
Self,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
import dacite
|
||||
|
||||
from . import dataclasses_extra
|
||||
from .debian import PackageRelationGroup
|
||||
|
||||
|
||||
# Wrapper for regex objects, whose type is not a documented API
|
||||
class _RegexWrapper:
|
||||
def __init__(self, s):
|
||||
self._re = re.compile(s)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._re, name)
|
||||
|
||||
|
||||
_dacite_config = dacite.Config(
|
||||
cast=[
|
||||
PackageRelationGroup,
|
||||
Path,
|
||||
_RegexWrapper,
|
||||
],
|
||||
strict=True,
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ConfigBuild:
|
||||
cflags: Optional[str] = None
|
||||
compiler: Optional[str] = None
|
||||
compiler_gnutype: Optional[str] = None
|
||||
compiler_gnutype_compat: Optional[str] = None
|
||||
config: list[Path] = dataclasses.field(default_factory=list)
|
||||
config_default: list[Path] = dataclasses.field(default_factory=list, repr=False)
|
||||
enable_signed: Optional[bool] = None
|
||||
enable_vdso: Optional[bool] = None
|
||||
kernel_file: Optional[str] = None
|
||||
kernel_stem: Optional[str] = None
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ConfigDescription:
|
||||
hardware: Optional[str] = None
|
||||
hardware_long: Optional[str] = None
|
||||
parts: list[str] = dataclasses.field(default_factory=list)
|
||||
short: dict[str, str] = dataclasses.field(default_factory=dict)
|
||||
long: dict[str, str] = dataclasses.field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ConfigPackages:
|
||||
docs: Optional[bool] = dataclasses.field(default=None, metadata={'default': True})
|
||||
installer: Optional[bool] = dataclasses.field(default=None, metadata={'default': False})
|
||||
libc_dev: Optional[bool] = dataclasses.field(default=None, metadata={'default': True})
|
||||
meta: Optional[bool] = dataclasses.field(default=None, metadata={'default': True})
|
||||
source: Optional[bool] = dataclasses.field(default=None, metadata={'default': True})
|
||||
tools_unversioned: Optional[bool] = dataclasses.field(default=None, metadata={'default': True})
|
||||
tools_versioned: Optional[bool] = dataclasses.field(default=None, metadata={'default': True})
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ConfigRelationsSingle:
|
||||
depends: list[PackageRelationGroup] = dataclasses.field(default_factory=list)
|
||||
recommends: list[PackageRelationGroup] = dataclasses.field(default_factory=list)
|
||||
suggests: list[PackageRelationGroup] = dataclasses.field(default_factory=list)
|
||||
breaks: list[PackageRelationGroup] = dataclasses.field(default_factory=list)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ConfigRelations:
|
||||
image: ConfigRelationsSingle = dataclasses.field(default_factory=ConfigRelationsSingle)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ConfigDebianarchDefs:
|
||||
__arch: Optional[str] = dataclasses.field(default=None, init=False)
|
||||
|
||||
def __post_init_defs__(self, parent: ConfigDebianarch) -> None:
|
||||
self.__arch = parent.name
|
||||
|
||||
@staticmethod
|
||||
@functools.cache
|
||||
def __dpkg_architecture(arch: str, query: str) -> str:
|
||||
return subprocess.check_output(
|
||||
[
|
||||
'dpkg-architecture',
|
||||
'-f',
|
||||
'-a', arch,
|
||||
'-q', query,
|
||||
],
|
||||
stderr=subprocess.DEVNULL,
|
||||
encoding='ascii',
|
||||
).strip()
|
||||
|
||||
@property
|
||||
def gnutype(self) -> str:
|
||||
assert self.__arch is not None
|
||||
return self.__dpkg_architecture(self.__arch, 'DEB_HOST_GNU_TYPE')
|
||||
|
||||
@property
|
||||
def gnutype_package(self) -> str:
|
||||
return self.gnutype.replace("_", "-")
|
||||
|
||||
@property
|
||||
def multiarch(self) -> str:
|
||||
assert self.__arch is not None
|
||||
return self.__dpkg_architecture(self.__arch, 'DEB_HOST_MULTIARCH')
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ConfigFlavourDefs:
|
||||
is_default: bool = False
|
||||
is_quick: bool = False
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ConfigDebianrelease:
|
||||
name_regex: _RegexWrapper
|
||||
abi_version_full: bool = True
|
||||
abi_suffix: str = ''
|
||||
revision_regex: _RegexWrapper = _RegexWrapper('.*')
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ConfigBase:
|
||||
name: str
|
||||
enable: bool = True
|
||||
path: Optional[Path] = None
|
||||
|
||||
build: ConfigBuild = dataclasses.field(default_factory=ConfigBuild)
|
||||
description: ConfigDescription = dataclasses.field(default_factory=ConfigDescription)
|
||||
packages: ConfigPackages = dataclasses.field(default_factory=ConfigPackages)
|
||||
relations: ConfigRelations = dataclasses.field(default_factory=ConfigRelations)
|
||||
|
||||
def __post_init_hierarchy__(self, path: Path) -> None:
|
||||
'''
|
||||
Setup path and default config in the complete hierarchy
|
||||
'''
|
||||
self.path = path
|
||||
self.build.config_default = [path / 'config']
|
||||
|
||||
def read_replace(self, bases: Iterable[Path], path: Path) -> Self:
|
||||
'''
|
||||
Read defines.toml at specified path in all bases and merged them
|
||||
'''
|
||||
config = self
|
||||
|
||||
try:
|
||||
for base in bases:
|
||||
if (file := base / path / 'defines.toml').exists():
|
||||
with file.open('rb') as f:
|
||||
data = dataclasses.asdict(self) | tomllib.load(f)
|
||||
|
||||
config = dataclasses_extra.merge(config, dacite.from_dict(
|
||||
data_class=self.__class__,
|
||||
data=data,
|
||||
config=_dacite_config,
|
||||
))
|
||||
except tomllib.TOMLDecodeError as e:
|
||||
raise RuntimeError(f'{file}: {e}') from None
|
||||
|
||||
return config
|
||||
|
||||
|
||||
ConfigT = TypeVar('ConfigT', bound=ConfigBase)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Config(ConfigBase):
|
||||
# Disable basic fields
|
||||
name: str = dataclasses.field(init=False, repr=False, default='')
|
||||
enable: bool = dataclasses.field(init=False, repr=False, default=True)
|
||||
|
||||
featureset: list[ConfigFeatureset] = dataclasses.field(
|
||||
default_factory=list, metadata={'merge': 'assoclist'},
|
||||
)
|
||||
kernelarch: list[ConfigKernelarch] = dataclasses.field(
|
||||
default_factory=list, metadata={'merge': 'assoclist'},
|
||||
)
|
||||
debianrelease: list[ConfigDebianrelease] = dataclasses.field(
|
||||
default_factory=list,
|
||||
)
|
||||
|
||||
def __post_init_hierarchy__(self, path: Path) -> None:
|
||||
super().__post_init_hierarchy__(path)
|
||||
|
||||
for featureset in self.featureset:
|
||||
featureset.__post_init_hierarchy_featureset__(
|
||||
Path(f'featureset-{featureset.name}'),
|
||||
None,
|
||||
)
|
||||
for kernelarch in self.kernelarch:
|
||||
kernelarch.__post_init_hierarchy__(
|
||||
Path(f'kernelarch-{kernelarch.name}'),
|
||||
)
|
||||
|
||||
@property
|
||||
def merged(self) -> ConfigMerged:
|
||||
return ConfigMerged(root=self)
|
||||
|
||||
@classmethod
|
||||
def read_orig(cls, bases: Iterable[Path]) -> Config:
|
||||
'''
|
||||
Read defines.toml at the root in all bases and merge them
|
||||
'''
|
||||
config = cls()
|
||||
found = False
|
||||
|
||||
try:
|
||||
for base in bases:
|
||||
if (file := base / 'defines.toml').exists():
|
||||
with file.open('rb') as f:
|
||||
data = tomllib.load(f)
|
||||
found = True
|
||||
|
||||
config = dataclasses_extra.merge(config, dacite.from_dict(
|
||||
data_class=cls,
|
||||
data=data,
|
||||
config=_dacite_config,
|
||||
))
|
||||
except (tomllib.TOMLDecodeError, dacite.exceptions.UnexpectedDataError) as e:
|
||||
raise RuntimeError(f'{file}: {e}') from None
|
||||
if not found:
|
||||
raise FileNotFoundError('Did not find defines.toml in any directory')
|
||||
|
||||
config.__post_init_hierarchy__(Path())
|
||||
|
||||
config.featureset = list(cls._read_hierarchy(bases, config.featureset))
|
||||
config.kernelarch = list(cls._read_hierarchy(bases, config.kernelarch))
|
||||
for kernelarch in config.kernelarch:
|
||||
kernelarch.debianarch = list(cls._read_hierarchy(bases, kernelarch.debianarch))
|
||||
|
||||
config.__post_init_hierarchy__(Path())
|
||||
|
||||
return config
|
||||
|
||||
@classmethod
|
||||
def _read_hierarchy(
|
||||
cls, bases: Iterable[Path], orig: Iterable[ConfigT],
|
||||
) -> Iterable[ConfigT]:
|
||||
for i in orig:
|
||||
try:
|
||||
assert i.path is not None
|
||||
yield i.read_replace(bases, i.path)
|
||||
except FileNotFoundError:
|
||||
yield i
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ConfigKernelarch(ConfigBase):
|
||||
debianarch: list[ConfigDebianarch] = dataclasses.field(
|
||||
default_factory=list, metadata={'merge': 'assoclist'},
|
||||
)
|
||||
|
||||
def __post_init_hierarchy__(self, path: Path) -> None:
|
||||
super().__post_init_hierarchy__(path)
|
||||
|
||||
for debianarch in self.debianarch:
|
||||
debianarch.__post_init_hierarchy__(
|
||||
Path(debianarch.name),
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ConfigDebianarch(ConfigBase):
|
||||
defs: ConfigDebianarchDefs = dataclasses.field(default_factory=ConfigDebianarchDefs)
|
||||
|
||||
featureset: list[ConfigFeatureset] = dataclasses.field(
|
||||
default_factory=list, metadata={'merge': 'assoclist'},
|
||||
)
|
||||
flavour: list[ConfigFlavour] = dataclasses.field(
|
||||
default_factory=list, metadata={'merge': 'assoclist'},
|
||||
)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
self.defs.__post_init_defs__(self)
|
||||
|
||||
def __post_init_hierarchy__(self, path: Path) -> None:
|
||||
super().__post_init_hierarchy__(path)
|
||||
|
||||
for featureset in self.featureset:
|
||||
featureset.__post_init_hierarchy_featureset__(
|
||||
Path(path / featureset.name),
|
||||
self,
|
||||
)
|
||||
|
||||
for flavour in self.flavour:
|
||||
flavour.__post_init_hierarchy__(path)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ConfigFeatureset(ConfigBase):
|
||||
flavour: list[ConfigFlavour] = dataclasses.field(default_factory=list)
|
||||
|
||||
def __post_init_hierarchy__(self, path: Path) -> None:
|
||||
super().__post_init_hierarchy__(path)
|
||||
|
||||
for flavour in self.flavour:
|
||||
flavour.__post_init_hierarchy__(path)
|
||||
|
||||
def __post_init_hierarchy_featureset__(
|
||||
self,
|
||||
path: Path,
|
||||
debianarch: Optional[ConfigDebianarch],
|
||||
) -> None:
|
||||
# If we have no flavours defined within a featureset, we copy it from debianarch
|
||||
if not self.flavour and debianarch:
|
||||
self.flavour = [
|
||||
ConfigFlavour(name=flavour.name, defs=flavour.defs)
|
||||
for flavour in debianarch.flavour
|
||||
]
|
||||
|
||||
if self.flavour:
|
||||
# XXX: Remove special case of name
|
||||
if self.name == 'none':
|
||||
flavour_default = [i for i in self.flavour if i.defs.is_default]
|
||||
flavour_quick = [i for i in self.flavour if i.defs.is_quick]
|
||||
|
||||
if not flavour_quick:
|
||||
flavour_quick = flavour_default or self.flavour[0:1]
|
||||
flavour_quick[0].defs.is_quick = True
|
||||
|
||||
# Flavours in other featuresets can never be default or quick
|
||||
else:
|
||||
for flavour in self.flavour:
|
||||
flavour.defs.is_default = False
|
||||
flavour.defs.is_quick = False
|
||||
|
||||
self.__post_init_hierarchy__(path)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ConfigFlavour(ConfigBase):
|
||||
defs: ConfigFlavourDefs = dataclasses.field(default_factory=ConfigFlavourDefs)
|
||||
|
||||
def __post_init_hierarchy__(self, path: Path) -> None:
|
||||
self.path = path
|
||||
self.build.config_default = [path / f'config.{self.name}']
|
||||
|
||||
|
||||
class ConfigMergedBase:
|
||||
_entries: list[ConfigBase]
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._entries = []
|
||||
|
||||
@property
|
||||
def enable(self) -> bool:
|
||||
for entry in self._entries:
|
||||
if not entry.enable:
|
||||
return False
|
||||
return True
|
||||
|
||||
@property
|
||||
def build(self) -> ConfigBuild:
|
||||
return dataclasses_extra.merge_default(
|
||||
ConfigBuild, *(i.build for i in self._entries)
|
||||
)
|
||||
|
||||
@property
|
||||
def config(self) -> list[Path]:
|
||||
ret: list[Path] = []
|
||||
for entry in self._entries:
|
||||
ret += entry.build.config + entry.build.config_default
|
||||
return ret
|
||||
|
||||
@property
|
||||
def description(self) -> ConfigDescription:
|
||||
return dataclasses_extra.merge_default(
|
||||
ConfigDescription, *(i.description for i in self._entries)
|
||||
)
|
||||
|
||||
@property
|
||||
def packages(self) -> ConfigPackages:
|
||||
return dataclasses_extra.merge_default(
|
||||
ConfigPackages, *(i.packages for i in self._entries)
|
||||
)
|
||||
|
||||
@property
|
||||
def relations(self) -> ConfigRelations:
|
||||
return dataclasses_extra.merge_default(
|
||||
ConfigRelations, *(i.relations for i in self._entries)
|
||||
)
|
||||
|
||||
|
||||
class ConfigMerged(ConfigMergedBase):
|
||||
_root: Config
|
||||
|
||||
def __init__(
|
||||
self, *,
|
||||
root: Optional[ConfigBase],
|
||||
**kw: Optional[ConfigBase],
|
||||
) -> None:
|
||||
super().__init__(**kw)
|
||||
|
||||
assert isinstance(root, Config)
|
||||
self._root = root
|
||||
self._entries.append(root)
|
||||
|
||||
@property
|
||||
def root_featuresets(self) -> Iterable[ConfigMergedFeatureset]:
|
||||
for featureset in self._root.featureset:
|
||||
yield ConfigMergedFeatureset(
|
||||
root=self._root,
|
||||
root_featureset=None,
|
||||
kernelarch=None,
|
||||
debianarch=None,
|
||||
debianarch_flavour=None,
|
||||
featureset=featureset,
|
||||
)
|
||||
|
||||
@property
|
||||
def kernelarchs(self) -> Iterable[ConfigMergedKernelarch]:
|
||||
for kernelarch in self._root.kernelarch:
|
||||
yield ConfigMergedKernelarch(
|
||||
root=self._root,
|
||||
kernelarch=kernelarch,
|
||||
)
|
||||
|
||||
@property
|
||||
def debianreleases(self) -> Iterable[ConfigDebianrelease]:
|
||||
return self._root.debianrelease
|
||||
|
||||
|
||||
class ConfigMergedKernelarch(ConfigMerged):
|
||||
_kernelarch: ConfigKernelarch
|
||||
|
||||
def __init__(
|
||||
self, *,
|
||||
kernelarch: Optional[ConfigBase],
|
||||
**kw: Optional[ConfigBase],
|
||||
) -> None:
|
||||
super().__init__(**kw)
|
||||
|
||||
if kernelarch is not None:
|
||||
assert isinstance(kernelarch, ConfigKernelarch)
|
||||
self._kernelarch = kernelarch
|
||||
self._entries.append(kernelarch)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._kernelarch.name
|
||||
|
||||
@property
|
||||
def name_kernelarch(self) -> str:
|
||||
return self._kernelarch.name
|
||||
|
||||
@property
|
||||
def debianarchs(self) -> Iterable[ConfigMergedDebianarch]:
|
||||
for debianarch in self._kernelarch.debianarch:
|
||||
yield ConfigMergedDebianarch(
|
||||
root=self._root,
|
||||
kernelarch=self._kernelarch,
|
||||
debianarch=debianarch,
|
||||
)
|
||||
|
||||
|
||||
class ConfigMergedDebianarch(ConfigMergedKernelarch):
|
||||
_debianarch: ConfigDebianarch
|
||||
|
||||
def __init__(
|
||||
self, *,
|
||||
debianarch: Optional[ConfigBase],
|
||||
**kw: Optional[ConfigBase],
|
||||
) -> None:
|
||||
super().__init__(**kw)
|
||||
|
||||
if debianarch is not None:
|
||||
assert isinstance(debianarch, ConfigDebianarch)
|
||||
self._debianarch = debianarch
|
||||
self._entries.append(debianarch)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._debianarch.name
|
||||
|
||||
@property
|
||||
def name_debianarch(self) -> str:
|
||||
return self._debianarch.name
|
||||
|
||||
@property
|
||||
def defs_debianarch(self) -> ConfigDebianarchDefs:
|
||||
return self._debianarch.defs
|
||||
|
||||
@property
|
||||
def featuresets(self) -> Iterable[ConfigMergedFeatureset]:
|
||||
root_featureset = {
|
||||
i.name: i
|
||||
for i in self._root.featureset
|
||||
}
|
||||
|
||||
for featureset in self._debianarch.featureset:
|
||||
yield ConfigMergedFeatureset(
|
||||
root=self._root,
|
||||
root_featureset=root_featureset[featureset.name],
|
||||
kernelarch=self._kernelarch,
|
||||
debianarch=self._debianarch,
|
||||
debianarch_flavour=None,
|
||||
featureset=featureset,
|
||||
)
|
||||
|
||||
|
||||
class ConfigMergedFeatureset(ConfigMergedDebianarch):
|
||||
_featureset: ConfigFeatureset
|
||||
_root_featureset: Optional[ConfigFeatureset] = None
|
||||
_debianarch_flavour: Optional[ConfigFlavour] = None
|
||||
|
||||
def __init__(
|
||||
self, *,
|
||||
featureset: Optional[ConfigBase],
|
||||
root_featureset: Optional[ConfigBase],
|
||||
debianarch_flavour: Optional[ConfigBase],
|
||||
**kw: Optional[ConfigBase],
|
||||
) -> None:
|
||||
super().__init__(**kw)
|
||||
|
||||
if debianarch_flavour is not None:
|
||||
assert isinstance(debianarch_flavour, ConfigFlavour)
|
||||
self._debianarch_flavour = debianarch_flavour
|
||||
self._entries.append(debianarch_flavour)
|
||||
|
||||
if root_featureset is not None:
|
||||
assert isinstance(root_featureset, ConfigFeatureset)
|
||||
self._root_featureset = root_featureset
|
||||
self._entries.append(root_featureset)
|
||||
|
||||
if featureset is not None:
|
||||
assert isinstance(featureset, ConfigFeatureset)
|
||||
self._featureset = featureset
|
||||
self._entries.append(featureset)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._featureset.name
|
||||
|
||||
@property
|
||||
def name_featureset(self) -> str:
|
||||
return self._featureset.name
|
||||
|
||||
@property
|
||||
def flavours(self) -> Iterable[ConfigMergedFlavour]:
|
||||
debianarch_flavour = {
|
||||
i.name: i
|
||||
for i in self._debianarch.flavour
|
||||
}
|
||||
|
||||
for flavour in self._featureset.flavour:
|
||||
yield ConfigMergedFlavour(
|
||||
root=self._root,
|
||||
root_featureset=self._root_featureset,
|
||||
kernelarch=self._kernelarch,
|
||||
debianarch=self._debianarch,
|
||||
debianarch_flavour=debianarch_flavour[flavour.name],
|
||||
featureset=self._featureset,
|
||||
flavour=flavour,
|
||||
)
|
||||
|
||||
|
||||
class ConfigMergedFlavour(ConfigMergedFeatureset):
|
||||
_flavour: ConfigFlavour
|
||||
|
||||
def __init__(
|
||||
self, *,
|
||||
flavour: Optional[ConfigBase],
|
||||
**kw: Optional[ConfigBase],
|
||||
) -> None:
|
||||
super().__init__(**kw)
|
||||
|
||||
if flavour is not None:
|
||||
assert isinstance(flavour, ConfigFlavour)
|
||||
self._flavour = flavour
|
||||
self._entries.append(flavour)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._flavour.name
|
||||
|
||||
@property
|
||||
def name_flavour(self) -> str:
|
||||
return self._flavour.name
|
||||
|
||||
@property
|
||||
def defs_flavour(self) -> ConfigFlavourDefs:
|
||||
return self._flavour.defs
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'dir',
|
||||
default=[Path('debian/config')],
|
||||
nargs='+',
|
||||
type=Path,
|
||||
)
|
||||
args = parser.parse_args()
|
||||
config = Config.read_orig(args.dir)
|
||||
merged = config.merged
|
||||
|
||||
# from pprint import pprint
|
||||
# pprint(config)
|
||||
|
||||
def print_indent(indent: int, s: str, *args: str) -> None:
|
||||
print(' ' * indent * 4 + s, *args)
|
||||
|
||||
for kernelarch in merged.kernelarchs:
|
||||
print_indent(
|
||||
0,
|
||||
f'Kernelarch: {kernelarch.name}',
|
||||
f'enable={kernelarch.enable}',
|
||||
)
|
||||
|
||||
for debianarch in kernelarch.debianarchs:
|
||||
print_indent(
|
||||
1,
|
||||
f'Debianarch: {debianarch.name}',
|
||||
f'enable={debianarch.enable}',
|
||||
)
|
||||
|
||||
for featureset in debianarch.featuresets:
|
||||
print_indent(
|
||||
2,
|
||||
f'Featureset: {featureset.name}',
|
||||
f'enable={featureset.enable}',
|
||||
)
|
||||
|
||||
for flavour in featureset.flavours:
|
||||
print_indent(
|
||||
3,
|
||||
f'Flavour: {flavour.name}',
|
||||
f'enable={flavour.enable}',
|
||||
f'is_default={flavour.defs_flavour.is_default}',
|
||||
)
|
||||
print_indent(4, f'Config: {" ".join(str(i) for i in flavour.config)}')
|
||||
|
||||
else:
|
||||
print()
|
234
debian/lib/python/debian_linux/dataclasses_deb822.py
vendored
Normal file
234
debian/lib/python/debian_linux/dataclasses_deb822.py
vendored
Normal file
|
@ -0,0 +1,234 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import re
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Generic,
|
||||
IO,
|
||||
Iterable,
|
||||
Optional,
|
||||
overload,
|
||||
TypeVar,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from dataclasses import _DataclassT
|
||||
else:
|
||||
# We can only get to _DataclassT during type checking, use a generic type during runtime
|
||||
_DataclassT = _T
|
||||
|
||||
__all__ = [
|
||||
'field_deb822',
|
||||
'read_deb822',
|
||||
'write_deb822',
|
||||
'Deb822DecodeError',
|
||||
]
|
||||
|
||||
|
||||
class Deb822Field(Generic[_T]):
|
||||
key: str
|
||||
load: Optional[Callable[[str], _T]]
|
||||
dump: Optional[Callable[[_T], str]]
|
||||
|
||||
def __init__(
|
||||
self, *,
|
||||
key: str,
|
||||
load: Optional[Callable[[str], _T]],
|
||||
dump: Optional[Callable[[_T], str]],
|
||||
) -> None:
|
||||
self.key = key
|
||||
self.load = load
|
||||
self.dump = dump
|
||||
|
||||
|
||||
# The return type _T is technically wrong, but it allows checking if during
|
||||
# runtime we get the correct type.
|
||||
@overload
|
||||
def field_deb822(
|
||||
deb822_key: str,
|
||||
/, *,
|
||||
deb822_load: Optional[Callable[[str], _T]] = None,
|
||||
deb822_dump: Optional[Callable[[_T], str]] = str,
|
||||
default: _T,
|
||||
) -> _T:
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def field_deb822(
|
||||
deb822_key: str,
|
||||
/, *,
|
||||
deb822_load: Optional[Callable[[str], _T]] = None,
|
||||
deb822_dump: Optional[Callable[[_T], str]] = str,
|
||||
default_factory: Callable[[], _T],
|
||||
) -> _T:
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def field_deb822(
|
||||
deb822_key: str,
|
||||
/, *,
|
||||
deb822_load: Optional[Callable[[str], _T]] = None,
|
||||
deb822_dump: Optional[Callable[[_T], str]] = str,
|
||||
) -> _T:
|
||||
...
|
||||
|
||||
|
||||
def field_deb822(
|
||||
deb822_key: str,
|
||||
/, *,
|
||||
deb822_load: Optional[Callable[[str], _T]] = None,
|
||||
deb822_dump: Optional[Callable[[_T], str]] = str,
|
||||
default: Any = dataclasses.MISSING,
|
||||
default_factory: Any = dataclasses.MISSING,
|
||||
) -> Any:
|
||||
metadata: dict[str, Any] = {
|
||||
'deb822': Deb822Field(
|
||||
key=deb822_key,
|
||||
load=deb822_load,
|
||||
dump=deb822_dump,
|
||||
),
|
||||
}
|
||||
|
||||
if default is not dataclasses.MISSING:
|
||||
return dataclasses.field(
|
||||
default=default,
|
||||
metadata=metadata,
|
||||
)
|
||||
else:
|
||||
return dataclasses.field(
|
||||
default_factory=default_factory,
|
||||
metadata=metadata,
|
||||
)
|
||||
|
||||
|
||||
class Deb822DecodeError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class Deb822DecodeState(Generic[_DataclassT]):
|
||||
cls: type[_DataclassT]
|
||||
fields: dict[str, dataclasses.Field]
|
||||
ignore_unknown: bool
|
||||
|
||||
data: dict[Optional[dataclasses.Field], str]
|
||||
current: Optional[dataclasses.Field]
|
||||
|
||||
_line_re = re.compile(r'''
|
||||
^
|
||||
(
|
||||
[ \t](?P<cont>.*)
|
||||
|
|
||||
(?P<key>[^: \t\n\r\f\v]+)\s*:\s*(?P<value>.*)
|
||||
)
|
||||
$
|
||||
''', re.VERBOSE)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cls: type[_DataclassT],
|
||||
ignore_unknown: bool,
|
||||
) -> None:
|
||||
self.reset()
|
||||
|
||||
self.cls = cls
|
||||
self.fields = {}
|
||||
self.ignore_unknown = ignore_unknown
|
||||
|
||||
for i in dataclasses.fields(cls):
|
||||
if i.init and (deb822_field := i.metadata.get('deb822')):
|
||||
self.fields[deb822_field.key] = i
|
||||
|
||||
def reset(self) -> None:
|
||||
self.data = {}
|
||||
self.current = None
|
||||
|
||||
def line(self, linenr: int, line: str) -> None:
|
||||
m = self._line_re.match(line)
|
||||
if not m:
|
||||
raise Deb822DecodeError(
|
||||
f'Not a header, not a continuation at line {linenr + 1}')
|
||||
elif cont := m.group('cont'):
|
||||
self.data[self.current] += '\n' + cont
|
||||
elif deb822_key := m.group('key'):
|
||||
field = self.fields.get(deb822_key)
|
||||
if not field and not self.ignore_unknown:
|
||||
raise Deb822DecodeError(
|
||||
f'Unknown field "{deb822_key}" at line {linenr + 1}')
|
||||
|
||||
self.current = field
|
||||
self.data[field] = m.group('value')
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
def generate(self) -> _DataclassT | None:
|
||||
if not self.data:
|
||||
return None
|
||||
|
||||
r: dict[str, Any] = {}
|
||||
for field, value in self.data.items():
|
||||
field_factory: Optional[Callable[[str], Any]] = None
|
||||
if field is None:
|
||||
continue
|
||||
elif (deb822_field := field.metadata.get('deb822')) and (load := deb822_field.load):
|
||||
field_factory = load
|
||||
elif isinstance(field.default_factory, type):
|
||||
field_factory = field.default_factory
|
||||
elif field.type in ('str', 'Optional[str]'):
|
||||
field_factory = str
|
||||
else:
|
||||
raise RuntimeError(f'Unable to parse type {field.type}')
|
||||
|
||||
if field_factory is not None:
|
||||
r[field.name] = field_factory(value)
|
||||
|
||||
self.reset()
|
||||
return self.cls(**r)
|
||||
|
||||
|
||||
def read_deb822(
|
||||
cls: type[_DataclassT],
|
||||
file: IO[str],
|
||||
/,
|
||||
ignore_unknown: bool = False,
|
||||
) -> Iterable[_DataclassT]:
|
||||
state = Deb822DecodeState(cls, ignore_unknown)
|
||||
|
||||
for linenr, line in enumerate(file):
|
||||
line = line.rstrip('\n')
|
||||
|
||||
# Empty line, end of record
|
||||
if line == '':
|
||||
if (obj := state.generate()):
|
||||
yield obj
|
||||
# Strip comments rather than trying to preserve them
|
||||
elif line[0] == '#':
|
||||
continue
|
||||
else:
|
||||
state.line(linenr, line)
|
||||
|
||||
if (obj := state.generate()):
|
||||
yield obj
|
||||
|
||||
|
||||
def write_deb822(
|
||||
objs: Iterable[_DataclassT],
|
||||
file: IO[str],
|
||||
/,
|
||||
) -> None:
|
||||
for obj in objs:
|
||||
for field in dataclasses.fields(obj):
|
||||
if (
|
||||
(value := getattr(obj, field.name, None))
|
||||
and (deb822_field := field.metadata.get('deb822'))
|
||||
and (dump := deb822_field.dump) is not None
|
||||
):
|
||||
folded = '\n '.join(dump(value).strip().split('\n'))
|
||||
file.write(f'{deb822_field.key}: {folded}\n')
|
||||
file.write('\n')
|
113
debian/lib/python/debian_linux/dataclasses_extra.py
vendored
Normal file
113
debian/lib/python/debian_linux/dataclasses_extra.py
vendored
Normal file
|
@ -0,0 +1,113 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from dataclasses import (
|
||||
fields,
|
||||
is_dataclass,
|
||||
replace,
|
||||
)
|
||||
from typing import (
|
||||
Protocol,
|
||||
TypeVar,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from _typeshed import DataclassInstance as _DataclassInstance
|
||||
|
||||
class _HasName(Protocol, _DataclassInstance):
|
||||
name: str
|
||||
|
||||
_DataclassT = TypeVar('_DataclassT', bound=_DataclassInstance)
|
||||
_HasNameT = TypeVar('_HasNameT', bound=_HasName)
|
||||
|
||||
|
||||
def default(
|
||||
cls: type[_DataclassT],
|
||||
/,
|
||||
) -> _DataclassT:
|
||||
f = {}
|
||||
|
||||
for field in fields(cls):
|
||||
if 'default' in field.metadata:
|
||||
f[field.name] = field.metadata['default']
|
||||
|
||||
return cls(**f)
|
||||
|
||||
|
||||
def merge(
|
||||
self: _DataclassT,
|
||||
other: _DataclassT | None, /,
|
||||
) -> _DataclassT:
|
||||
if other is None:
|
||||
return self
|
||||
|
||||
f = {}
|
||||
|
||||
for field in fields(self):
|
||||
if not field.init:
|
||||
continue
|
||||
|
||||
field_default_type = object
|
||||
if isinstance(field.default_factory, type):
|
||||
field_default_type = field.default_factory
|
||||
|
||||
self_field = getattr(self, field.name)
|
||||
other_field = getattr(other, field.name)
|
||||
|
||||
if field.name == 'name':
|
||||
assert self_field == other_field
|
||||
elif field.type == 'bool':
|
||||
f[field.name] = other_field
|
||||
elif field.metadata.get('merge') == 'assoclist':
|
||||
f[field.name] = _merge_assoclist(self_field, other_field)
|
||||
elif is_dataclass(field_default_type):
|
||||
f[field.name] = merge(self_field, other_field)
|
||||
elif issubclass(field_default_type, list):
|
||||
f[field.name] = self_field + other_field
|
||||
elif issubclass(field_default_type, dict):
|
||||
f[field.name] = self_field | other_field
|
||||
elif field.default is None:
|
||||
if other_field is not None:
|
||||
f[field.name] = other_field
|
||||
else:
|
||||
raise RuntimeError(f'Unable to merge for type {field.type}')
|
||||
|
||||
return replace(self, **f)
|
||||
|
||||
|
||||
def merge_default(
|
||||
cls: type[_DataclassT],
|
||||
/,
|
||||
*others: _DataclassT,
|
||||
) -> _DataclassT:
|
||||
ret: _DataclassT = default(cls)
|
||||
for o in others:
|
||||
ret = merge(ret, o)
|
||||
return ret
|
||||
|
||||
|
||||
def _merge_assoclist(
|
||||
self_list: list[_HasNameT],
|
||||
other_list: list[_HasNameT],
|
||||
/,
|
||||
) -> list[_HasNameT]:
|
||||
'''
|
||||
Merge lists where each item got a "name" attribute
|
||||
'''
|
||||
if not self_list:
|
||||
return other_list
|
||||
if not other_list:
|
||||
return self_list
|
||||
|
||||
ret: list[_HasNameT] = []
|
||||
other_dict = {
|
||||
i.name: i
|
||||
for i in other_list
|
||||
}
|
||||
for i in self_list:
|
||||
if i.name in other_dict:
|
||||
ret.append(merge(i, other_dict.pop(i.name)))
|
||||
else:
|
||||
ret.append(i)
|
||||
ret.extend(other_dict.values())
|
||||
return ret
|
716
debian/lib/python/debian_linux/debian.py
vendored
Normal file
716
debian/lib/python/debian_linux/debian.py
vendored
Normal file
|
@ -0,0 +1,716 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import enum
|
||||
import itertools
|
||||
import os.path
|
||||
import re
|
||||
import shlex
|
||||
import typing
|
||||
from typing import (
|
||||
Iterable,
|
||||
Optional,
|
||||
Self,
|
||||
TypeAlias,
|
||||
)
|
||||
|
||||
from .dataclasses_deb822 import field_deb822
|
||||
|
||||
|
||||
class Changelog(list):
|
||||
_top_rules = r"""
|
||||
^
|
||||
(?P<source>
|
||||
\w[-+0-9a-z.]+
|
||||
)
|
||||
[ ]
|
||||
\(
|
||||
(?P<version>
|
||||
[^\(\)\ \t]+
|
||||
)
|
||||
\)
|
||||
\s+
|
||||
(?P<distribution>
|
||||
[-+0-9a-zA-Z.]+
|
||||
)
|
||||
\;\s+urgency=
|
||||
(?P<urgency>
|
||||
\w+
|
||||
)
|
||||
(?:,|\n)
|
||||
"""
|
||||
_top_re = re.compile(_top_rules, re.X)
|
||||
_bottom_rules = r"""
|
||||
^
|
||||
[ ]--[ ]
|
||||
(?P<maintainer>
|
||||
\S(?:[ ]?\S)*
|
||||
)
|
||||
[ ]{2}
|
||||
(?P<date>
|
||||
(.*)
|
||||
)
|
||||
\n
|
||||
"""
|
||||
_bottom_re = re.compile(_bottom_rules, re.X)
|
||||
_ignore_re = re.compile(r'^(?: |\s*\n)')
|
||||
|
||||
class Entry(object):
|
||||
__slot__ = ('distribution', 'source', 'version', 'urgency',
|
||||
'maintainer', 'date')
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
for key, value in kwargs.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
def __init__(self, dir='', version=None, file=None) -> None:
|
||||
if version is None:
|
||||
version = Version
|
||||
if file:
|
||||
self._parse(version, file)
|
||||
else:
|
||||
with open(os.path.join(dir, "debian/changelog"),
|
||||
encoding="UTF-8") as f:
|
||||
self._parse(version, f)
|
||||
|
||||
def _parse(self, version, f) -> None:
|
||||
top_match = None
|
||||
line_no = 0
|
||||
|
||||
for line in f:
|
||||
line_no += 1
|
||||
|
||||
if self._ignore_re.match(line):
|
||||
pass
|
||||
elif top_match is None:
|
||||
top_match = self._top_re.match(line)
|
||||
if not top_match:
|
||||
raise Exception('invalid top line %d in changelog' %
|
||||
line_no)
|
||||
try:
|
||||
v = version(top_match.group('version'))
|
||||
except Exception:
|
||||
if not len(self):
|
||||
raise
|
||||
v = Version(top_match.group('version'))
|
||||
else:
|
||||
bottom_match = self._bottom_re.match(line)
|
||||
if not bottom_match:
|
||||
raise Exception('invalid bottom line %d in changelog' %
|
||||
line_no)
|
||||
|
||||
self.append(self.Entry(
|
||||
distribution=top_match.group('distribution'),
|
||||
source=top_match.group('source'),
|
||||
version=v,
|
||||
urgency=top_match.group('urgency'),
|
||||
maintainer=bottom_match.group('maintainer'),
|
||||
date=bottom_match.group('date')))
|
||||
top_match = bottom_match = None
|
||||
|
||||
|
||||
class Version(object):
|
||||
revision: str | None
|
||||
|
||||
_epoch_re = re.compile(r'\d+$')
|
||||
_upstream_re = re.compile(r'[0-9][A-Za-z0-9.+\-:~]*$')
|
||||
_revision_re = re.compile(r'[A-Za-z0-9+.~]+$')
|
||||
|
||||
def __init__(self, version) -> None:
|
||||
try:
|
||||
split = version.index(':')
|
||||
except ValueError:
|
||||
epoch, rest = None, version
|
||||
else:
|
||||
epoch, rest = version[0:split], version[split+1:]
|
||||
try:
|
||||
split = rest.rindex('-')
|
||||
except ValueError:
|
||||
upstream, revision = rest, None
|
||||
else:
|
||||
upstream, revision = rest[0:split], rest[split+1:]
|
||||
if (epoch is not None and not self._epoch_re.match(epoch)) or \
|
||||
not self._upstream_re.match(upstream) or \
|
||||
(revision is not None and not self._revision_re.match(revision)):
|
||||
raise RuntimeError(u"Invalid debian version")
|
||||
self.epoch = epoch and int(epoch)
|
||||
self.upstream = upstream
|
||||
self.revision = revision
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.complete
|
||||
|
||||
@property
|
||||
def complete(self) -> str:
|
||||
if self.epoch is not None:
|
||||
return u"%d:%s" % (self.epoch, self.complete_noepoch)
|
||||
return self.complete_noepoch
|
||||
|
||||
@property
|
||||
def complete_noepoch(self) -> str:
|
||||
if self.revision is not None:
|
||||
return u"%s-%s" % (self.upstream, self.revision)
|
||||
return self.upstream
|
||||
|
||||
@property
|
||||
def debian(self) -> str | None:
|
||||
from warnings import warn
|
||||
warn(u"debian argument was replaced by revision", DeprecationWarning,
|
||||
stacklevel=2)
|
||||
return self.revision
|
||||
|
||||
|
||||
class VersionLinux(Version):
|
||||
_upstream_re = re.compile(r"""
|
||||
(?P<version>
|
||||
\d+\.\d+
|
||||
)
|
||||
(?P<update>
|
||||
(?:\.\d+)?
|
||||
(?:-[a-z]+\d+)?
|
||||
)
|
||||
(?:
|
||||
~
|
||||
(?P<modifier>
|
||||
.+?
|
||||
)
|
||||
)?
|
||||
(?:
|
||||
\.dfsg\.
|
||||
(?P<dfsg>
|
||||
\d+
|
||||
)
|
||||
)?
|
||||
$
|
||||
""", re.X)
|
||||
|
||||
def __init__(self, version) -> None:
|
||||
super(VersionLinux, self).__init__(version)
|
||||
up_match = self._upstream_re.match(self.upstream)
|
||||
assert self.revision is not None
|
||||
rev_match = self._revision_re.match(self.revision)
|
||||
if up_match is None or rev_match is None:
|
||||
raise RuntimeError(u"Invalid debian linux version")
|
||||
d = up_match.groupdict()
|
||||
self.linux_modifier = d['modifier']
|
||||
self.linux_version = d['version']
|
||||
if d['modifier'] is not None:
|
||||
assert not d['update']
|
||||
self.linux_upstream = '-'.join((d['version'], d['modifier']))
|
||||
else:
|
||||
self.linux_upstream = d['version']
|
||||
self.linux_upstream_full = self.linux_upstream + d['update']
|
||||
self.linux_dfsg = d['dfsg']
|
||||
|
||||
|
||||
class PackageArchitecture(set[str]):
|
||||
def __init__(
|
||||
self,
|
||||
v: str | Iterable[str] | None = None,
|
||||
/,
|
||||
) -> None:
|
||||
if v:
|
||||
if isinstance(v, str):
|
||||
v = re.split(r'\s+', v.strip())
|
||||
self |= frozenset(v)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return ' '.join(sorted(self))
|
||||
|
||||
|
||||
class PackageDescription:
|
||||
short: list[str]
|
||||
long: list[str]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
v: str | Self | None = None,
|
||||
/,
|
||||
) -> None:
|
||||
self.short = []
|
||||
self.long = []
|
||||
|
||||
if v:
|
||||
if isinstance(v, str):
|
||||
desc_split = v.split('\n', 1)
|
||||
self.append_short(desc_split[0])
|
||||
if len(desc_split) == 2:
|
||||
self.append(desc_split[1])
|
||||
else:
|
||||
self.short.extend(v.short)
|
||||
self.long.extend(v.long)
|
||||
|
||||
def __str__(self) -> str:
|
||||
from .utils import TextWrapper
|
||||
wrap = TextWrapper(width=74, fix_sentence_endings=True).wrap
|
||||
short = ', '.join(self.short)
|
||||
long_pars = []
|
||||
for i in self.long:
|
||||
long_pars.append(wrap(i))
|
||||
long = '\n.\n'.join('\n'.join(i) for i in long_pars)
|
||||
return short + '\n' + long if long else short
|
||||
|
||||
def append(self, long: str) -> None:
|
||||
long = long.strip()
|
||||
if long:
|
||||
self.long.extend(long.split('\n.\n'))
|
||||
|
||||
def append_short(self, short: str) -> None:
|
||||
for i in [i.strip() for i in short.split(',')]:
|
||||
if i:
|
||||
self.short.append(i)
|
||||
|
||||
def extend(self, desc: PackageDescription) -> None:
|
||||
self.short.extend(desc.short)
|
||||
self.long.extend(desc.long)
|
||||
|
||||
|
||||
class PackageRelationEntryOperator(enum.StrEnum):
|
||||
OP_LT = '<<'
|
||||
OP_LE = '<='
|
||||
OP_EQ = '='
|
||||
OP_NE = '!='
|
||||
OP_GE = '>='
|
||||
OP_GT = '>>'
|
||||
|
||||
def __neg__(self) -> PackageRelationEntryOperator:
|
||||
return typing.cast(PackageRelationEntryOperator, {
|
||||
self.OP_LT: self.OP_GE,
|
||||
self.OP_LE: self.OP_GT,
|
||||
self.OP_EQ: self.OP_NE,
|
||||
self.OP_NE: self.OP_EQ,
|
||||
self.OP_GE: self.OP_LT,
|
||||
self.OP_GT: self.OP_LE,
|
||||
}[self])
|
||||
|
||||
|
||||
class PackageRelationEntry:
|
||||
name: str
|
||||
operator: typing.Optional[PackageRelationEntryOperator]
|
||||
version: typing.Optional[str]
|
||||
arches: PackageArchitecture
|
||||
restrictions: PackageBuildprofile
|
||||
|
||||
__re = re.compile(
|
||||
r'^(?P<name>\S+)'
|
||||
r'(?: \((?P<operator><<|<=|=|!=|>=|>>)\s*(?P<version>[^)]+)\))?'
|
||||
r'(?: \[(?P<arches>[^]]+)\])?'
|
||||
r'(?P<restrictions>(?: <[^>]+>)*)$'
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
v: str | Self,
|
||||
/, *,
|
||||
name: str | None = None,
|
||||
arches: set[str] | None = None,
|
||||
restrictions: PackageBuildprofile | str | None = None,
|
||||
) -> None:
|
||||
if isinstance(v, str):
|
||||
match = self.__re.match(v)
|
||||
if not match:
|
||||
raise RuntimeError('Unable to parse dependency "%s"' % v)
|
||||
|
||||
self.name = name or match['name']
|
||||
|
||||
if operator := match['operator']:
|
||||
self.operator = PackageRelationEntryOperator(operator)
|
||||
else:
|
||||
self.operator = None
|
||||
|
||||
self.version = match['version']
|
||||
self.arches = PackageArchitecture(arches or match['arches'])
|
||||
if isinstance(restrictions, PackageBuildprofile):
|
||||
self.restrictions = restrictions.copy()
|
||||
else:
|
||||
self.restrictions = PackageBuildprofile.parse(
|
||||
restrictions or match['restrictions'],
|
||||
)
|
||||
|
||||
else:
|
||||
self.name = name or v.name
|
||||
self.operator = v.operator
|
||||
self.version = v.version
|
||||
self.arches = PackageArchitecture(arches or v.arches)
|
||||
if isinstance(restrictions, str):
|
||||
self.restrictions = PackageBuildprofile.parse(restrictions)
|
||||
else:
|
||||
self.restrictions = (restrictions or v.restrictions).copy()
|
||||
|
||||
def __str__(self):
|
||||
ret = [self.name]
|
||||
if self.operator and self.version:
|
||||
ret.append(f'({self.operator} {self.version})')
|
||||
if self.arches:
|
||||
ret.append(f'[{self.arches}]')
|
||||
if self.restrictions:
|
||||
ret.append(str(self.restrictions))
|
||||
return ' '.join(ret)
|
||||
|
||||
|
||||
class PackageRelationGroup(list[PackageRelationEntry]):
|
||||
def __init__(
|
||||
self,
|
||||
v: Iterable[PackageRelationEntry | str] | str | Self | None = None,
|
||||
/, *,
|
||||
arches: set[str] | None = None,
|
||||
) -> None:
|
||||
if v:
|
||||
if isinstance(v, str):
|
||||
v = (i.strip() for i in re.split(r'\|', v.strip()))
|
||||
self.extend(PackageRelationEntry(i, arches=arches) for i in v if i)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return ' | '.join(str(i) for i in self)
|
||||
|
||||
def _merge_eq(self, v: PackageRelationGroup) -> typing.Optional[PackageRelationGroup]:
|
||||
if all(
|
||||
(
|
||||
i.name == j.name and i.operator == j.operator
|
||||
and i.version == j.version
|
||||
) for i, j in zip(self, v)
|
||||
):
|
||||
return self
|
||||
return None
|
||||
|
||||
|
||||
class PackageRelation(list[PackageRelationGroup]):
|
||||
Init: TypeAlias = PackageRelationGroup | Iterable[PackageRelationEntry] | str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
v: Iterable[Init] | str | Self | None = None,
|
||||
/, *,
|
||||
arches: set[str] | None = None,
|
||||
) -> None:
|
||||
if v:
|
||||
if isinstance(v, str):
|
||||
v = (i.strip() for i in re.split(r',', v.strip()))
|
||||
self.extend(PackageRelationGroup(i, arches=arches) for i in v if i)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return ', '.join(str(i) for i in self)
|
||||
|
||||
def _merge_eq(self, v: PackageRelationGroup) -> typing.Optional[PackageRelationGroup]:
|
||||
for i in self:
|
||||
if i._merge_eq(v):
|
||||
return i
|
||||
return None
|
||||
|
||||
def merge(
|
||||
self,
|
||||
v: Init | str,
|
||||
/,
|
||||
) -> None:
|
||||
v = PackageRelationGroup(v)
|
||||
if g := self._merge_eq(v):
|
||||
for i, j in zip(g, v):
|
||||
i.arches |= j.arches
|
||||
i.restrictions.update(j.restrictions)
|
||||
else:
|
||||
super().append(v)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class PackageBuildprofileEntry:
|
||||
pos: set[str] = dataclasses.field(default_factory=set)
|
||||
neg: set[str] = dataclasses.field(default_factory=set)
|
||||
|
||||
__re = re.compile(r'^<(?P<profiles>[a-z0-9. !-]+)>$')
|
||||
|
||||
def copy(self) -> Self:
|
||||
return self.__class__(
|
||||
pos=set(self.pos),
|
||||
neg=set(self.neg),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def parse(cls, v: str, /) -> Self:
|
||||
match = cls.__re.match(v)
|
||||
if not match:
|
||||
raise RuntimeError('Unable to parse build profile "%s"' % v)
|
||||
|
||||
ret = cls()
|
||||
for i in re.split(r' ', match.group('profiles')):
|
||||
if i:
|
||||
if i[0] == '!':
|
||||
ret.neg.add(i[1:])
|
||||
else:
|
||||
ret.pos.add(i)
|
||||
return ret
|
||||
|
||||
def __eq__(self, other: object, /) -> bool:
|
||||
if not isinstance(other, PackageBuildprofileEntry):
|
||||
return NotImplemented
|
||||
return self.pos == other.pos and self.neg == other.neg
|
||||
|
||||
def isdisjoint(self, other: Self, /) -> bool:
|
||||
return not (self.issubset(other)) and not (self.issuperset(other))
|
||||
|
||||
def issubset(self, other: Self, /) -> bool:
|
||||
'''
|
||||
Test wether this build profile would select a subset of packages.
|
||||
|
||||
For positive profile matches: Ading profiles will select a subset.
|
||||
For negative profile matches: Removing profiles will select a subset.
|
||||
'''
|
||||
return self.pos >= other.pos and self.neg <= other.neg
|
||||
__le__ = issubset
|
||||
|
||||
def issuperset(self, other: Self, /) -> bool:
|
||||
'''
|
||||
Test wether this build profile would select a superset of packages.
|
||||
|
||||
For positive profile matches: Removing profiles will select a superset.
|
||||
For negative profile matches: Adding profiles will select a superset.
|
||||
'''
|
||||
return self.pos <= other.pos and self.neg >= other.neg
|
||||
__ge__ = issuperset
|
||||
|
||||
def update(self, other: Self, /) -> None:
|
||||
'''
|
||||
Update the build profiles, adding entries from other, merging if possible.
|
||||
|
||||
Negating entries (profile vs !profile) are completely removed.
|
||||
All others remain if they are used on both sides.
|
||||
'''
|
||||
diff = (self.pos & other.neg) | (self.neg & other.pos)
|
||||
self.pos &= other.pos - diff
|
||||
self.neg &= other.neg - diff
|
||||
__ior__ = update
|
||||
|
||||
def __str__(self) -> str:
|
||||
s = ' '.join(itertools.chain(
|
||||
sorted(self.pos),
|
||||
(f'!{i}' for i in sorted(self.neg)),
|
||||
))
|
||||
return f'<{s}>'
|
||||
|
||||
|
||||
class PackageBuildprofile(list[PackageBuildprofileEntry]):
|
||||
__re = re.compile(r' *(<[^>]+>)(?: +|$)')
|
||||
|
||||
def copy(self) -> Self:
|
||||
return self.__class__(i.copy() for i in self)
|
||||
|
||||
@classmethod
|
||||
def parse(cls, v: str, /) -> Self:
|
||||
ret = cls()
|
||||
for match in cls.__re.finditer(v):
|
||||
ret.append(PackageBuildprofileEntry.parse(match.group(1)))
|
||||
return ret
|
||||
|
||||
def update(self, v: Self, /) -> None:
|
||||
for i in v:
|
||||
for j in self:
|
||||
if not j.isdisjoint(i):
|
||||
j.update(i)
|
||||
break
|
||||
else:
|
||||
self.append(i)
|
||||
__ior__ = update
|
||||
|
||||
def __str__(self) -> str:
|
||||
return ' '.join(str(i) for i in self)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class _BasePackage:
|
||||
name: Optional[str]
|
||||
architecture: PackageArchitecture = field_deb822(
|
||||
'Architecture',
|
||||
default_factory=PackageArchitecture,
|
||||
)
|
||||
section: Optional[str] = field_deb822(
|
||||
'Section',
|
||||
default=None,
|
||||
)
|
||||
priority: Optional[str] = field_deb822(
|
||||
'Priority',
|
||||
default=None,
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class SourcePackage(_BasePackage):
|
||||
name: Optional[str] = field_deb822(
|
||||
'Source',
|
||||
default=None,
|
||||
)
|
||||
maintainer: Optional[str] = field_deb822(
|
||||
'Maintainer',
|
||||
default=None,
|
||||
)
|
||||
uploaders: Optional[str] = field_deb822(
|
||||
'Uploaders',
|
||||
default=None,
|
||||
)
|
||||
standards_version: Optional[str] = field_deb822(
|
||||
'Standards-Version',
|
||||
default=None,
|
||||
)
|
||||
build_depends: PackageRelation = field_deb822(
|
||||
'Build-Depends',
|
||||
default_factory=PackageRelation,
|
||||
)
|
||||
build_depends_arch: PackageRelation = field_deb822(
|
||||
'Build-Depends-Arch',
|
||||
default_factory=PackageRelation,
|
||||
)
|
||||
build_depends_indep: PackageRelation = field_deb822(
|
||||
'Build-Depends-Indep',
|
||||
default_factory=PackageRelation,
|
||||
)
|
||||
rules_requires_root: Optional[str] = field_deb822(
|
||||
'Rules-Requires-Root',
|
||||
default=None,
|
||||
)
|
||||
homepage: Optional[str] = field_deb822(
|
||||
'Homepage',
|
||||
default=None,
|
||||
)
|
||||
vcs_browser: Optional[str] = field_deb822(
|
||||
'Vcs-Browser',
|
||||
default=None,
|
||||
)
|
||||
vcs_git: Optional[str] = field_deb822(
|
||||
'Vcs-Git',
|
||||
default=None,
|
||||
)
|
||||
autobuild: Optional[str] = field_deb822(
|
||||
'XS-Autobuild',
|
||||
default=None,
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class BinaryPackage(_BasePackage):
|
||||
name: str = field_deb822('Package')
|
||||
# Build-Depends* fields aren't allowed for binary packages in
|
||||
# the real control file, but we move them to the source
|
||||
# package
|
||||
build_depends: PackageRelation = field_deb822(
|
||||
'Build-Depends',
|
||||
default_factory=PackageRelation,
|
||||
deb822_dump=None,
|
||||
)
|
||||
package_type: Optional[str] = field_deb822(
|
||||
'Package-Type',
|
||||
default=None,
|
||||
) # for udeb only
|
||||
build_profiles: PackageBuildprofile = field_deb822(
|
||||
'Build-Profiles',
|
||||
deb822_load=PackageBuildprofile.parse,
|
||||
default_factory=PackageBuildprofile,
|
||||
)
|
||||
built_using: PackageRelation = field_deb822(
|
||||
'Built-Using',
|
||||
default_factory=PackageRelation,
|
||||
)
|
||||
provides: PackageRelation = field_deb822(
|
||||
'Provides',
|
||||
default_factory=PackageRelation,
|
||||
)
|
||||
pre_depends: PackageRelation = field_deb822(
|
||||
'Pre-Depends',
|
||||
default_factory=PackageRelation,
|
||||
)
|
||||
depends: PackageRelation = field_deb822(
|
||||
'Depends',
|
||||
default_factory=PackageRelation,
|
||||
)
|
||||
recommends: PackageRelation = field_deb822(
|
||||
'Recommends',
|
||||
default_factory=PackageRelation,
|
||||
)
|
||||
suggests: PackageRelation = field_deb822(
|
||||
'Suggests',
|
||||
default_factory=PackageRelation,
|
||||
)
|
||||
replaces: PackageRelation = field_deb822(
|
||||
'Replaces',
|
||||
default_factory=PackageRelation,
|
||||
)
|
||||
breaks: PackageRelation = field_deb822(
|
||||
'Breaks',
|
||||
default_factory=PackageRelation,
|
||||
)
|
||||
conflicts: PackageRelation = field_deb822(
|
||||
'Conflicts',
|
||||
default_factory=PackageRelation,
|
||||
)
|
||||
multi_arch: Optional[str] = field_deb822(
|
||||
'Multi-Arch',
|
||||
default=None,
|
||||
)
|
||||
udeb_kernel_version: Optional[str] = field_deb822(
|
||||
'Kernel-Version',
|
||||
default=None,
|
||||
) # for udeb only
|
||||
description: PackageDescription = field_deb822(
|
||||
'Description',
|
||||
default_factory=PackageDescription,
|
||||
)
|
||||
meta_architectures: PackageArchitecture = dataclasses.field(
|
||||
default_factory=PackageArchitecture,
|
||||
)
|
||||
meta_rules_check_packages: bool = False
|
||||
meta_rules_makeflags: dict = field_deb822(
|
||||
'Meta-Rules-Makeflags',
|
||||
default_factory=dict,
|
||||
deb822_load=lambda v: dict(i.split('=', 1) for i in shlex.split(v)),
|
||||
deb822_dump=None,
|
||||
)
|
||||
meta_rules_ruleids: dict = dataclasses.field(default_factory=dict)
|
||||
meta_rules_target: Optional[str] = field_deb822(
|
||||
'Meta-Rules-Target',
|
||||
default=None,
|
||||
deb822_dump=None,
|
||||
)
|
||||
meta_sign_package: Optional[str] = field_deb822(
|
||||
'Meta-Sign-Package',
|
||||
default=None,
|
||||
deb822_dump=None,
|
||||
)
|
||||
meta_sign_files: list[str] = field_deb822(
|
||||
'Meta-Sign-Files',
|
||||
default_factory=list,
|
||||
deb822_load=lambda v: v.split(),
|
||||
deb822_dump=None,
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class TestsControl:
|
||||
tests: Optional[str] = field_deb822(
|
||||
'Tests',
|
||||
default=None,
|
||||
)
|
||||
test_command: Optional[str] = field_deb822(
|
||||
'Test-Command',
|
||||
default=None,
|
||||
)
|
||||
architecture: PackageArchitecture = field_deb822(
|
||||
'Architecture',
|
||||
default_factory=PackageArchitecture,
|
||||
)
|
||||
restrictions: Optional[str] = field_deb822(
|
||||
'Restrictions',
|
||||
default=None,
|
||||
)
|
||||
features: Optional[str] = field_deb822(
|
||||
'Features',
|
||||
default=None,
|
||||
)
|
||||
depends: PackageRelation = field_deb822(
|
||||
'Depends',
|
||||
default_factory=PackageRelation,
|
||||
)
|
||||
tests_directory: Optional[str] = field_deb822(
|
||||
'Tests-Directory',
|
||||
default=None,
|
||||
)
|
||||
classes: Optional[str] = field_deb822(
|
||||
'Classes',
|
||||
default=None,
|
||||
)
|
102
debian/lib/python/debian_linux/firmware.py
vendored
Normal file
102
debian/lib/python/debian_linux/firmware.py
vendored
Normal file
|
@ -0,0 +1,102 @@
|
|||
import re
|
||||
|
||||
|
||||
class FirmwareFile(object):
|
||||
def __init__(self, binary, desc=None, source=None, version=None) -> None:
|
||||
self.binary = binary
|
||||
self.desc = desc
|
||||
self.source = source
|
||||
self.version = version
|
||||
|
||||
|
||||
class FirmwareGroup(object):
|
||||
def __init__(self, driver, files, licence) -> None:
|
||||
self.driver = driver
|
||||
self.files = files
|
||||
self.licence = licence
|
||||
|
||||
|
||||
class FirmwareWhence(list):
|
||||
def __init__(self, file) -> None:
|
||||
self.read(file)
|
||||
|
||||
@staticmethod
|
||||
def _unquote(name):
|
||||
if len(name) >= 3 and name[0] == '"' and name[-1] == '"':
|
||||
name = name[1:-1]
|
||||
return name
|
||||
|
||||
def read(self, file) -> None:
|
||||
in_header = True
|
||||
driver = None
|
||||
files = {}
|
||||
licence = None
|
||||
binary = []
|
||||
desc = None
|
||||
source = []
|
||||
version = None
|
||||
|
||||
for line in file:
|
||||
if line.startswith('----------'):
|
||||
if in_header:
|
||||
in_header = False
|
||||
else:
|
||||
# Finish old group
|
||||
if driver and files:
|
||||
self.append(FirmwareGroup(driver, files, licence))
|
||||
driver = None
|
||||
files = {}
|
||||
licence = None
|
||||
continue
|
||||
|
||||
if in_header:
|
||||
continue
|
||||
|
||||
if line == '\n':
|
||||
# End of field; end of file fields
|
||||
for b in binary:
|
||||
# XXX The WHENCE file isn't yet consistent in its
|
||||
# association of binaries and their sources and
|
||||
# metadata. This associates all sources and
|
||||
# metadata in a group with each binary.
|
||||
files[b] = FirmwareFile(b, desc, source, version)
|
||||
binary = []
|
||||
desc = None
|
||||
source = []
|
||||
version = None
|
||||
continue
|
||||
|
||||
match = re.match(
|
||||
r'(Driver|(?:Raw)?File|Info|Licen[cs]e|Source|Version'
|
||||
r'|Original licen[cs]e info(?:rmation)?):\s*(.*)\n',
|
||||
line)
|
||||
if match:
|
||||
# If we've seen a license for the previous group,
|
||||
# start a new group
|
||||
if licence:
|
||||
self.append(FirmwareGroup(driver, files, licence))
|
||||
files = {}
|
||||
licence = None
|
||||
keyword, value = match.group(1, 2)
|
||||
if keyword == 'Driver':
|
||||
driver = value.split(' ')[0].lower()
|
||||
elif keyword in ['File', 'RawFile']:
|
||||
match = re.match(r'("[^"\n]+"|\S+)(?:\s+--\s+(.*))?', value)
|
||||
binary.append(self._unquote(match.group(1)))
|
||||
desc = match.group(2)
|
||||
elif keyword in ['Info', 'Version']:
|
||||
version = value
|
||||
elif keyword == 'Source':
|
||||
source.append(self._unquote(value))
|
||||
else:
|
||||
licence = value
|
||||
elif licence is not None:
|
||||
licence = (licence + '\n'
|
||||
+ re.sub(r'^(?:[/ ]\*| \*/)?\s*(.*?)\s*$', r'\1',
|
||||
line))
|
||||
|
||||
# Finish last group if non-empty
|
||||
for b in binary:
|
||||
files[b] = FirmwareFile(b, desc, source, version)
|
||||
if driver:
|
||||
self.append(FirmwareGroup(driver, files, licence))
|
670
debian/lib/python/debian_linux/gencontrol.py
vendored
Normal file
670
debian/lib/python/debian_linux/gencontrol.py
vendored
Normal file
|
@ -0,0 +1,670 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import itertools
|
||||
import pathlib
|
||||
import re
|
||||
from collections.abc import (
|
||||
Generator,
|
||||
)
|
||||
from typing import (
|
||||
Any,
|
||||
Iterable,
|
||||
Iterator,
|
||||
IO,
|
||||
)
|
||||
|
||||
from .config_v2 import (
|
||||
ConfigMerged,
|
||||
ConfigMergedDebianarch,
|
||||
ConfigMergedFeatureset,
|
||||
ConfigMergedFlavour,
|
||||
)
|
||||
from .dataclasses_deb822 import write_deb822
|
||||
from .debian import Changelog, PackageArchitecture, \
|
||||
Version, SourcePackage, BinaryPackage
|
||||
from .utils import Templates
|
||||
|
||||
|
||||
class Makefile:
|
||||
rules: dict[str, MakefileRule]
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.rules = {}
|
||||
|
||||
def add_cmds(self, name: str, cmds) -> None:
|
||||
rule = self.rules.setdefault(name, MakefileRule(name))
|
||||
rule.add_cmds(MakefileRuleCmdsSimple(cmds))
|
||||
|
||||
def add_deps(self, name: str, deps) -> None:
|
||||
rule = self.rules.setdefault(name, MakefileRule(name))
|
||||
rule.add_deps(deps)
|
||||
|
||||
for i in deps:
|
||||
self.rules.setdefault(i, MakefileRule(i))
|
||||
|
||||
def add_rules(self, name: str, target, makeflags, packages=set(), packages_extra=set()) -> None:
|
||||
rule = self.rules.setdefault(name, MakefileRule(name))
|
||||
rule.add_cmds(MakefileRuleCmdsRules(target, makeflags, packages, packages_extra))
|
||||
|
||||
def write(self, out) -> None:
|
||||
out.write('''\
|
||||
.NOTPARALLEL:
|
||||
.PHONY:
|
||||
packages_enabled := $(shell dh_listpackages)
|
||||
define if_package
|
||||
$(if $(filter $(1),$(packages_enabled)),$(2))
|
||||
endef
|
||||
''')
|
||||
for k, rule in sorted(self.rules.items()):
|
||||
rule.write(out)
|
||||
|
||||
|
||||
class MakefileRule:
|
||||
name: str
|
||||
cmds: list[MakefileRuleCmds]
|
||||
deps: set[str]
|
||||
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name = name
|
||||
self.cmds = []
|
||||
self.deps = set()
|
||||
|
||||
def add_cmds(self, cmds: MakefileRuleCmds) -> None:
|
||||
self.cmds.append(cmds)
|
||||
|
||||
def add_deps(self, deps: Iterable[str]) -> None:
|
||||
self.deps.update(deps)
|
||||
|
||||
def write(self, out: IO) -> None:
|
||||
if self.cmds:
|
||||
out.write(f'{self.name}:{" ".join(sorted(self.deps))}\n')
|
||||
for c in self.cmds:
|
||||
c.write(out)
|
||||
else:
|
||||
out.write(f'{self.name}:{" ".join(sorted(self.deps))}\n')
|
||||
|
||||
|
||||
class MakefileRuleCmds:
|
||||
def write(self, out: IO) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class MakefileRuleCmdsRules(MakefileRuleCmds):
|
||||
def __init__(self, target, makeflags, packages, packages_extra) -> None:
|
||||
self.target = target
|
||||
self.makeflags = makeflags.copy()
|
||||
self.packages = packages
|
||||
self.packages_extra = packages_extra
|
||||
|
||||
packages_all = packages | packages_extra
|
||||
|
||||
if packages_all:
|
||||
if len(packages_all) == 1:
|
||||
package_name = list(packages_all)[0]
|
||||
self.makeflags['PACKAGE_NAME'] = package_name
|
||||
self.makeflags['DESTDIR'] = f'$(CURDIR)/debian/{package_name}'
|
||||
else:
|
||||
self.makeflags['DESTDIR'] = '$(CURDIR)/debian/tmp'
|
||||
|
||||
self.makeflags['DH_OPTIONS'] = ' '.join(f'-p{i}' for i in sorted(packages_all))
|
||||
|
||||
def write(self, out: IO) -> None:
|
||||
cmd = f'$(MAKE) -f debian/rules.real {self.target} {self.makeflags}'
|
||||
if self.packages:
|
||||
out.write(f'\t$(call if_package, {" ".join(sorted(self.packages))}, {cmd})\n')
|
||||
else:
|
||||
out.write(f'\t{cmd}\n')
|
||||
|
||||
|
||||
class MakefileRuleCmdsSimple(MakefileRuleCmds):
|
||||
cmds: list[str]
|
||||
|
||||
def __init__(self, cmds: list[str]) -> None:
|
||||
self.cmds = cmds
|
||||
|
||||
def write(self, out: IO) -> None:
|
||||
for i in self.cmds:
|
||||
out.write(f'\t{i}\n')
|
||||
|
||||
|
||||
class MakeFlags(dict):
|
||||
def __str__(self) -> str:
|
||||
return ' '.join("%s='%s'" % i for i in sorted(self.items()))
|
||||
|
||||
def copy(self) -> MakeFlags:
|
||||
return self.__class__(super(MakeFlags, self).copy())
|
||||
|
||||
|
||||
class PackagesBundle:
|
||||
class BinaryPackages(dict[str, BinaryPackage]):
|
||||
def add(self, package: BinaryPackage) -> BinaryPackage:
|
||||
return super().setdefault(package.name, package)
|
||||
|
||||
name: str | None
|
||||
templates: Templates
|
||||
base: pathlib.Path
|
||||
makefile: Makefile
|
||||
source: SourcePackage
|
||||
packages: BinaryPackages
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str | None,
|
||||
source_template: str,
|
||||
replace: dict[str, str],
|
||||
templates: Templates,
|
||||
base: pathlib.Path = pathlib.Path('debian'),
|
||||
override_name: str | None = None,
|
||||
) -> None:
|
||||
self.name = name
|
||||
self.templates = templates
|
||||
self.base = base
|
||||
self.makefile = Makefile()
|
||||
self.source = list(self.templates.get_source_control(source_template, replace))[0]
|
||||
self.packages = self.BinaryPackages()
|
||||
|
||||
if not self.source.name:
|
||||
self.source.name = override_name
|
||||
|
||||
def add(
|
||||
self,
|
||||
pkgid: str,
|
||||
ruleid: Iterable[str],
|
||||
makeflags: MakeFlags,
|
||||
replace: dict[str, str],
|
||||
*,
|
||||
arch: str | None = None,
|
||||
check_packages: bool = True,
|
||||
) -> list[Any]:
|
||||
ret = []
|
||||
for raw_package in self.templates.get_control(f'{pkgid}.control', replace):
|
||||
package = self.packages.add(raw_package)
|
||||
package_name = package.name
|
||||
ret.append(package)
|
||||
|
||||
package.meta_rules_ruleids[ruleid] = makeflags
|
||||
if arch:
|
||||
package.meta_architectures.add(arch)
|
||||
package.meta_rules_check_packages = check_packages
|
||||
|
||||
for name in (
|
||||
'NEWS',
|
||||
'bug-presubj',
|
||||
'lintian-overrides',
|
||||
'maintscript',
|
||||
'postinst',
|
||||
'postrm',
|
||||
'preinst',
|
||||
'prerm',
|
||||
):
|
||||
try:
|
||||
template = self.templates.get(f'{pkgid}.{name}',
|
||||
replace | {'package': package_name})
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
with self.open(f'{package_name}.{name}') as f:
|
||||
f.write(template)
|
||||
|
||||
return ret
|
||||
|
||||
def add_packages(
|
||||
self,
|
||||
packages: Iterable[BinaryPackage],
|
||||
ruleid: Iterable[str],
|
||||
makeflags: MakeFlags,
|
||||
*,
|
||||
arch: str | None = None,
|
||||
check_packages: bool = True,
|
||||
) -> None:
|
||||
for package in packages:
|
||||
package = self.packages.add(package)
|
||||
package.meta_rules_ruleids[ruleid] = makeflags
|
||||
if arch:
|
||||
package.meta_architectures.add(arch)
|
||||
package.meta_rules_check_packages = check_packages
|
||||
|
||||
def path(self, name) -> pathlib.Path:
|
||||
if self.name:
|
||||
return self.base / f'generated.{self.name}/{name}'
|
||||
return self.base / name
|
||||
|
||||
@staticmethod
|
||||
def __ruleid_deps(ruleid: tuple[str], name: str) -> Iterator[tuple[str, str]]:
|
||||
"""
|
||||
Generate all the rules dependencies.
|
||||
```
|
||||
build: build_a
|
||||
build_a: build_a_b
|
||||
build_a_b: build_a_b_image
|
||||
```
|
||||
"""
|
||||
r = ruleid + (name, )
|
||||
yield (
|
||||
'',
|
||||
'_' + '_'.join(r[:1]),
|
||||
)
|
||||
for i in range(1, len(r)):
|
||||
yield (
|
||||
'_' + '_'.join(r[:i]),
|
||||
'_' + '_'.join(r[:i + 1]),
|
||||
)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def open(self, name: str, mode: str = 'w') -> Generator[IO, None, None]:
|
||||
path = self.path(name)
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with path.open(mode=mode, encoding='utf-8') as f:
|
||||
yield f
|
||||
|
||||
def extract_makefile(self) -> None:
|
||||
targets: dict[frozenset[str], dict] = {}
|
||||
|
||||
for package_name, package in self.packages.items():
|
||||
if not isinstance(package, BinaryPackage):
|
||||
continue
|
||||
|
||||
target_name = package.meta_rules_target
|
||||
ruleids = package.meta_rules_ruleids
|
||||
makeflags = MakeFlags(package.meta_rules_makeflags)
|
||||
|
||||
if ruleids:
|
||||
arches = package.meta_architectures
|
||||
if arches:
|
||||
package.architecture = arches
|
||||
else:
|
||||
arches = package.architecture
|
||||
|
||||
if target_name:
|
||||
for ruleid, makeflags_package in ruleids.items():
|
||||
target_key = frozenset(
|
||||
[target_name, ruleid]
|
||||
+ [f'{k}_{v}' for (k, v) in makeflags.items()]
|
||||
)
|
||||
target = targets.setdefault(
|
||||
target_key,
|
||||
{
|
||||
'name': target_name,
|
||||
'ruleid': ruleid,
|
||||
},
|
||||
)
|
||||
|
||||
if package.meta_rules_check_packages:
|
||||
target.setdefault('packages', set()).add(package_name)
|
||||
else:
|
||||
target.setdefault('packages_extra', set()).add(package_name)
|
||||
makeflags_package = makeflags_package.copy()
|
||||
makeflags_package.update(makeflags)
|
||||
target['makeflags'] = makeflags_package
|
||||
|
||||
if arches == set(['all']):
|
||||
target['type'] = 'indep'
|
||||
else:
|
||||
target['type'] = 'arch'
|
||||
|
||||
for target in targets.values():
|
||||
name = target['name']
|
||||
ruleid = target['ruleid']
|
||||
packages = target.get('packages', set())
|
||||
packages_extra = target.get('packages_extra', set())
|
||||
makeflags = target['makeflags']
|
||||
ttype = target['type']
|
||||
|
||||
rule = '_'.join(ruleid + (name, ))
|
||||
self.makefile.add_rules(f'setup_{rule}',
|
||||
f'setup_{name}', makeflags, packages, packages_extra)
|
||||
self.makefile.add_rules(f'build-{ttype}_{rule}',
|
||||
f'build_{name}', makeflags, packages, packages_extra)
|
||||
self.makefile.add_rules(f'binary-{ttype}_{rule}',
|
||||
f'binary_{name}', makeflags, packages, packages_extra)
|
||||
|
||||
for i, j in self.__ruleid_deps(ruleid, name):
|
||||
self.makefile.add_deps(f'setup{i}',
|
||||
[f'setup{j}'])
|
||||
self.makefile.add_deps(f'build-{ttype}{i}',
|
||||
[f'build-{ttype}{j}'])
|
||||
self.makefile.add_deps(f'binary-{ttype}{i}',
|
||||
[f'binary-{ttype}{j}'])
|
||||
|
||||
def merge_build_depends(self) -> None:
|
||||
# Merge Build-Depends pseudo-fields from binary packages into the
|
||||
# source package
|
||||
arch_all = PackageArchitecture("all")
|
||||
for name, package in self.packages.items():
|
||||
dep = package.build_depends
|
||||
if not dep:
|
||||
continue
|
||||
if package.architecture == arch_all:
|
||||
build_dep = self.source.build_depends_indep
|
||||
else:
|
||||
build_dep = self.source.build_depends_arch
|
||||
for group in dep:
|
||||
for item in group:
|
||||
if package.architecture != arch_all and not item.arches:
|
||||
item.arches = package.architecture
|
||||
if package.build_profiles and not item.restrictions:
|
||||
item.restrictions = package.build_profiles
|
||||
build_dep.merge(group)
|
||||
|
||||
def write(self) -> None:
|
||||
self.write_control()
|
||||
self.write_makefile()
|
||||
|
||||
def write_control(self) -> None:
|
||||
p = [self.source] + sorted(
|
||||
self.packages.values(),
|
||||
# Sort deb before udeb and then according to name
|
||||
key=lambda i: (i.package_type or '', i.name),
|
||||
)
|
||||
with self.open('control') as f:
|
||||
write_deb822(p, f)
|
||||
|
||||
def write_makefile(self) -> None:
|
||||
with self.open('rules.gen') as f:
|
||||
self.makefile.write(f)
|
||||
|
||||
|
||||
class Gencontrol(object):
|
||||
config: ConfigMerged
|
||||
vars: dict[str, str]
|
||||
bundles: dict[str, PackagesBundle]
|
||||
|
||||
def __init__(self, config: ConfigMerged, templates, version=Version) -> None:
|
||||
self.config, self.templates = config, templates
|
||||
self.changelog = Changelog(version=version)
|
||||
self.vars = {}
|
||||
self.bundles = {}
|
||||
|
||||
@property
|
||||
def bundle(self) -> PackagesBundle:
|
||||
return self.bundles['']
|
||||
|
||||
def __call__(self) -> None:
|
||||
self.do_source()
|
||||
self.do_main()
|
||||
self.do_extra()
|
||||
|
||||
self.write()
|
||||
|
||||
def do_source(self) -> None:
|
||||
self.bundles[''] = PackagesBundle(
|
||||
None, 'source.control', self.vars, self.templates,
|
||||
override_name=self.changelog[0].source,
|
||||
)
|
||||
|
||||
def do_main(self) -> None:
|
||||
vars = self.vars.copy()
|
||||
|
||||
makeflags = MakeFlags()
|
||||
|
||||
self.do_main_setup(self.config, vars, makeflags)
|
||||
self.do_main_makefile(self.config, vars, makeflags)
|
||||
self.do_main_packages(self.config, vars, makeflags)
|
||||
self.do_main_recurse(self.config, vars, makeflags)
|
||||
|
||||
def do_main_setup(
|
||||
self,
|
||||
config: ConfigMerged,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def do_main_makefile(
|
||||
self,
|
||||
config: ConfigMerged,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def do_main_packages(
|
||||
self,
|
||||
config: ConfigMerged,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def do_main_recurse(
|
||||
self,
|
||||
config: ConfigMerged,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
for featureset in config.root_featuresets:
|
||||
if featureset.enable:
|
||||
self.do_indep_featureset(featureset, vars.copy(), makeflags.copy())
|
||||
|
||||
# Sort the output the same way as before
|
||||
for arch in sorted(
|
||||
itertools.chain.from_iterable(
|
||||
i.debianarchs for i in config.kernelarchs
|
||||
),
|
||||
key=lambda i: i.name
|
||||
):
|
||||
if arch.enable:
|
||||
self.do_arch(arch, vars.copy(), makeflags.copy())
|
||||
|
||||
def do_extra(self) -> None:
|
||||
try:
|
||||
packages_extra = self.templates.get_control("extra.control", self.vars)
|
||||
except KeyError:
|
||||
return
|
||||
|
||||
for package in packages_extra:
|
||||
package.meta_rules_target = 'meta'
|
||||
if not package.architecture:
|
||||
raise RuntimeError('Require Architecture in debian/templates/extra.control')
|
||||
for arch in package.architecture:
|
||||
self.bundle.add_packages([package], (arch, ),
|
||||
MakeFlags(), arch=arch, check_packages=False)
|
||||
|
||||
def do_indep_featureset(
|
||||
self,
|
||||
config: ConfigMergedFeatureset,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
vars['localversion'] = ''
|
||||
if config.name_featureset != 'none':
|
||||
vars['localversion'] = '-' + config.name_featureset
|
||||
|
||||
self.do_indep_featureset_setup(config, vars, makeflags)
|
||||
self.do_indep_featureset_makefile(config, vars, makeflags)
|
||||
self.do_indep_featureset_packages(config, vars, makeflags)
|
||||
|
||||
def do_indep_featureset_setup(
|
||||
self,
|
||||
config: ConfigMergedFeatureset,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def do_indep_featureset_makefile(
|
||||
self,
|
||||
config: ConfigMergedFeatureset,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
makeflags['FEATURESET'] = config.name
|
||||
|
||||
def do_indep_featureset_packages(
|
||||
self,
|
||||
config: ConfigMergedFeatureset,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def do_arch(
|
||||
self,
|
||||
config: ConfigMergedDebianarch,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
vars['arch'] = config.name
|
||||
|
||||
self.do_arch_setup(config, vars, makeflags)
|
||||
self.do_arch_makefile(config, vars, makeflags)
|
||||
self.do_arch_packages(config, vars, makeflags)
|
||||
self.do_arch_recurse(config, vars, makeflags)
|
||||
|
||||
def do_arch_setup(
|
||||
self,
|
||||
config: ConfigMergedDebianarch,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def do_arch_makefile(
|
||||
self,
|
||||
config: ConfigMergedDebianarch,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
makeflags['ARCH'] = config.name
|
||||
|
||||
def do_arch_packages(
|
||||
self,
|
||||
config: ConfigMergedDebianarch,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def do_arch_recurse(
|
||||
self,
|
||||
config: ConfigMergedDebianarch,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
for featureset in config.featuresets:
|
||||
if featureset.enable:
|
||||
self.do_featureset(featureset, vars.copy(), makeflags.copy())
|
||||
|
||||
def do_featureset(
|
||||
self,
|
||||
config: ConfigMergedFeatureset,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
vars['localversion'] = ''
|
||||
if config.name_featureset != 'none':
|
||||
vars['localversion'] = '-' + config.name_featureset
|
||||
|
||||
self.do_featureset_setup(config, vars, makeflags)
|
||||
self.do_featureset_makefile(config, vars, makeflags)
|
||||
self.do_featureset_packages(config, vars, makeflags)
|
||||
self.do_featureset_recurse(config, vars, makeflags)
|
||||
|
||||
def do_featureset_setup(
|
||||
self,
|
||||
config: ConfigMergedFeatureset,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def do_featureset_makefile(
|
||||
self,
|
||||
config: ConfigMergedFeatureset,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
makeflags['FEATURESET'] = config.name
|
||||
|
||||
def do_featureset_packages(
|
||||
self,
|
||||
config: ConfigMergedFeatureset,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def do_featureset_recurse(
|
||||
self,
|
||||
config: ConfigMergedFeatureset,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
for flavour in config.flavours:
|
||||
if flavour.enable:
|
||||
self.do_flavour(flavour, vars.copy(), makeflags.copy())
|
||||
|
||||
def do_flavour(
|
||||
self,
|
||||
config: ConfigMergedFlavour,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
vars['localversion'] += '-' + config.name_flavour
|
||||
|
||||
self.do_flavour_setup(config, vars, makeflags)
|
||||
self.do_flavour_makefile(config, vars, makeflags)
|
||||
self.do_flavour_packages(config, vars, makeflags)
|
||||
|
||||
def do_flavour_setup(
|
||||
self,
|
||||
config: ConfigMergedFlavour,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
for i in (
|
||||
('kernel-arch', 'KERNEL_ARCH'),
|
||||
('localversion', 'LOCALVERSION'),
|
||||
):
|
||||
if i[0] in vars:
|
||||
makeflags[i[1]] = vars[i[0]]
|
||||
|
||||
def do_flavour_makefile(
|
||||
self,
|
||||
config: ConfigMergedFlavour,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
makeflags['FLAVOUR'] = config.name
|
||||
|
||||
def do_flavour_packages(
|
||||
self,
|
||||
config: ConfigMergedFlavour,
|
||||
vars: dict[str, str],
|
||||
makeflags: MakeFlags,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def substitute(self, s: str, vars) -> str:
|
||||
def subst(match) -> str:
|
||||
return vars[match.group(1)]
|
||||
|
||||
return re.sub(r'@([-_a-z0-9]+)@', subst, str(s))
|
||||
|
||||
def write(self) -> None:
|
||||
for bundle in self.bundles.values():
|
||||
bundle.extract_makefile()
|
||||
bundle.merge_build_depends()
|
||||
bundle.write()
|
||||
|
||||
|
||||
def merge_packages(packages, new, arch) -> None:
|
||||
for new_package in new:
|
||||
name = new_package['Package']
|
||||
if name in packages:
|
||||
package = packages.get(name)
|
||||
package['Architecture'].add(arch)
|
||||
|
||||
for field in ('Depends', 'Provides', 'Suggests', 'Recommends',
|
||||
'Conflicts'):
|
||||
if field in new_package:
|
||||
if field in package:
|
||||
v = package[field]
|
||||
v.extend(new_package[field])
|
||||
else:
|
||||
package[field] = new_package[field]
|
||||
|
||||
else:
|
||||
new_package['Architecture'] = arch
|
||||
packages.append(new_package)
|
93
debian/lib/python/debian_linux/kconfig.py
vendored
Normal file
93
debian/lib/python/debian_linux/kconfig.py
vendored
Normal file
|
@ -0,0 +1,93 @@
|
|||
from typing import Iterable
|
||||
from collections import OrderedDict
|
||||
|
||||
__all__ = (
|
||||
"KconfigFile",
|
||||
)
|
||||
|
||||
|
||||
class KConfigEntry(object):
|
||||
__slots__ = 'name', 'value', 'comments'
|
||||
|
||||
def __init__(self, name, value, comments=None) -> None:
|
||||
self.name, self.value = name, value
|
||||
self.comments = comments or []
|
||||
|
||||
def __eq__(self, other) -> bool:
|
||||
return self.name == other.name and self.value == other.value
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self.name) | hash(self.value)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return ('<{}({!r}, {!r}, {!r})>'
|
||||
.format(self.__class__.__name__, self.name, self.value,
|
||||
self.comments))
|
||||
|
||||
def __str__(self) -> str:
|
||||
return 'CONFIG_{}={}'.format(self.name, self.value)
|
||||
|
||||
def write(self) -> Iterable[str]:
|
||||
for comment in self.comments:
|
||||
yield '#. ' + comment
|
||||
yield str(self)
|
||||
|
||||
|
||||
class KConfigEntryTristate(KConfigEntry):
|
||||
__slots__ = ()
|
||||
|
||||
VALUE_NO = False
|
||||
VALUE_YES = True
|
||||
VALUE_MOD = object()
|
||||
|
||||
def __init__(self, name, value, comments=None) -> None:
|
||||
if value == 'n' or value is None:
|
||||
value = self.VALUE_NO
|
||||
elif value == 'y':
|
||||
value = self.VALUE_YES
|
||||
elif value == 'm':
|
||||
value = self.VALUE_MOD
|
||||
else:
|
||||
raise NotImplementedError
|
||||
super(KConfigEntryTristate, self).__init__(name, value, comments)
|
||||
|
||||
def __str__(self) -> str:
|
||||
if self.value is self.VALUE_MOD:
|
||||
return 'CONFIG_{}=m'.format(self.name)
|
||||
if self.value:
|
||||
return 'CONFIG_{}=y'.format(self.name)
|
||||
return '# CONFIG_{} is not set'.format(self.name)
|
||||
|
||||
|
||||
class KconfigFile(OrderedDict[str, KConfigEntry]):
|
||||
def __str__(self) -> str:
|
||||
ret = []
|
||||
for i in self.str_iter():
|
||||
ret.append(i)
|
||||
return '\n'.join(ret) + '\n'
|
||||
|
||||
def read(self, f) -> None:
|
||||
for line in iter(f.readlines()):
|
||||
line = line.strip()
|
||||
if line.startswith("CONFIG_"):
|
||||
i = line.find('=')
|
||||
option = line[7:i]
|
||||
value = line[i + 1:]
|
||||
self.set(option, value)
|
||||
elif line.startswith("# CONFIG_"):
|
||||
option = line[9:-11]
|
||||
self.set(option, 'n')
|
||||
elif line.startswith("#") or not line:
|
||||
pass
|
||||
else:
|
||||
raise RuntimeError("Can't recognize %s" % line)
|
||||
|
||||
def set(self, key, value) -> None:
|
||||
if value in ('y', 'm', 'n'):
|
||||
self[key] = KConfigEntryTristate(key, value)
|
||||
else:
|
||||
self[key] = KConfigEntry(key, value)
|
||||
|
||||
def str_iter(self) -> Iterable[str]:
|
||||
for key, value in self.items():
|
||||
yield str(value)
|
339
debian/lib/python/debian_linux/test_debian.py
vendored
Normal file
339
debian/lib/python/debian_linux/test_debian.py
vendored
Normal file
|
@ -0,0 +1,339 @@
|
|||
import pytest
|
||||
|
||||
from .debian import (
|
||||
Version,
|
||||
VersionLinux,
|
||||
PackageArchitecture,
|
||||
PackageDescription,
|
||||
PackageRelationEntry,
|
||||
PackageRelationGroup,
|
||||
PackageRelation,
|
||||
PackageBuildprofileEntry,
|
||||
PackageBuildprofile,
|
||||
)
|
||||
|
||||
|
||||
class TestVersion:
|
||||
def test_native(self) -> None:
|
||||
v = Version('1.2+c~4')
|
||||
assert v.epoch is None
|
||||
assert v.upstream == '1.2+c~4'
|
||||
assert v.revision is None
|
||||
assert v.complete == '1.2+c~4'
|
||||
assert v.complete_noepoch == '1.2+c~4'
|
||||
|
||||
def test_nonnative(self) -> None:
|
||||
v = Version('1-2+d~3')
|
||||
assert v.epoch is None
|
||||
assert v.upstream == '1'
|
||||
assert v.revision == '2+d~3'
|
||||
assert v.complete == '1-2+d~3'
|
||||
assert v.complete_noepoch == '1-2+d~3'
|
||||
|
||||
def test_native_epoch(self) -> None:
|
||||
v = Version('5:1.2.3')
|
||||
assert v.epoch == 5
|
||||
assert v.upstream == '1.2.3'
|
||||
assert v.revision is None
|
||||
assert v.complete == '5:1.2.3'
|
||||
assert v.complete_noepoch == '1.2.3'
|
||||
|
||||
def test_nonnative_epoch(self) -> None:
|
||||
v = Version('5:1.2.3-4')
|
||||
assert v.epoch == 5
|
||||
assert v.upstream == '1.2.3'
|
||||
assert v.revision == '4'
|
||||
assert v.complete == '5:1.2.3-4'
|
||||
assert v.complete_noepoch == '1.2.3-4'
|
||||
|
||||
def test_multi_hyphen(self) -> None:
|
||||
v = Version('1-2-3')
|
||||
assert v.epoch is None
|
||||
assert v.upstream == '1-2'
|
||||
assert v.revision == '3'
|
||||
assert v.complete == '1-2-3'
|
||||
|
||||
def test_multi_colon(self) -> None:
|
||||
v = Version('1:2:3')
|
||||
assert v.epoch == 1
|
||||
assert v.upstream == '2:3'
|
||||
assert v.revision is None
|
||||
|
||||
def test_invalid_epoch(self) -> None:
|
||||
with pytest.raises(RuntimeError):
|
||||
Version('a:1')
|
||||
with pytest.raises(RuntimeError):
|
||||
Version('-1:1')
|
||||
with pytest.raises(RuntimeError):
|
||||
Version('1a:1')
|
||||
|
||||
def test_invalid_upstream(self) -> None:
|
||||
with pytest.raises(RuntimeError):
|
||||
Version('1_2')
|
||||
with pytest.raises(RuntimeError):
|
||||
Version('1/2')
|
||||
with pytest.raises(RuntimeError):
|
||||
Version('a1')
|
||||
with pytest.raises(RuntimeError):
|
||||
Version('1 2')
|
||||
|
||||
def test_invalid_revision(self) -> None:
|
||||
with pytest.raises(RuntimeError):
|
||||
Version('1-2_3')
|
||||
with pytest.raises(RuntimeError):
|
||||
Version('1-2/3')
|
||||
with pytest.raises(RuntimeError):
|
||||
Version('1-2:3')
|
||||
|
||||
|
||||
class TestVersionLinux:
|
||||
def test_stable(self) -> None:
|
||||
v = VersionLinux('1.2.3-4')
|
||||
assert v.linux_version == '1.2'
|
||||
assert v.linux_upstream == '1.2'
|
||||
assert v.linux_upstream_full == '1.2.3'
|
||||
assert v.linux_modifier is None
|
||||
assert v.linux_dfsg is None
|
||||
|
||||
def test_rc(self) -> None:
|
||||
v = VersionLinux('1.2~rc3-4')
|
||||
assert v.linux_version == '1.2'
|
||||
assert v.linux_upstream == '1.2-rc3'
|
||||
assert v.linux_upstream_full == '1.2-rc3'
|
||||
assert v.linux_modifier == 'rc3'
|
||||
assert v.linux_dfsg is None
|
||||
|
||||
def test_dfsg(self) -> None:
|
||||
v = VersionLinux('1.2~rc3.dfsg.1-4')
|
||||
assert v.linux_version == '1.2'
|
||||
assert v.linux_upstream == '1.2-rc3'
|
||||
assert v.linux_upstream_full == '1.2-rc3'
|
||||
assert v.linux_modifier == 'rc3'
|
||||
assert v.linux_dfsg == '1'
|
||||
|
||||
|
||||
class TestPackageArchitecture:
|
||||
def test_init(self) -> None:
|
||||
a = PackageArchitecture()
|
||||
assert a == set()
|
||||
|
||||
def test_init_str(self) -> None:
|
||||
a = PackageArchitecture(' foo bar\tbaz ')
|
||||
assert a == {'foo', 'bar', 'baz'}
|
||||
|
||||
def test_init_iter(self) -> None:
|
||||
a = PackageArchitecture(('foo', 'bar'))
|
||||
assert a == {'foo', 'bar'}
|
||||
|
||||
def test_init_self(self) -> None:
|
||||
a = PackageArchitecture(PackageArchitecture(('foo', 'bar')))
|
||||
assert a == {'foo', 'bar'}
|
||||
|
||||
def test_str(self) -> None:
|
||||
a = PackageArchitecture(('foo', 'bar'))
|
||||
assert str(a) == 'bar foo'
|
||||
|
||||
|
||||
class TestPackageDescription:
|
||||
def test_init(self) -> None:
|
||||
a = PackageDescription()
|
||||
assert a.short == []
|
||||
assert a.long == []
|
||||
|
||||
def test_init_str(self) -> None:
|
||||
a = PackageDescription('Short\nLong1\n.\nLong2')
|
||||
assert a.short == ['Short']
|
||||
assert a.long == ['Long1', 'Long2']
|
||||
|
||||
def test_init_self(self) -> None:
|
||||
a = PackageDescription(PackageDescription('Short\nLong1\n.\nLong2'))
|
||||
assert a.short == ['Short']
|
||||
assert a.long == ['Long1', 'Long2']
|
||||
|
||||
def test_str(self) -> None:
|
||||
a = PackageDescription('Short\nLong1\n.\nLong2')
|
||||
assert str(a) == 'Short\nLong1\n.\nLong2'
|
||||
|
||||
|
||||
class TestPackageRelationEntry:
|
||||
def test_init_str(self) -> None:
|
||||
a = PackageRelationEntry('package (>=version) [arch2 arch1] <profile1 >')
|
||||
assert a.name == 'package'
|
||||
assert a.version == 'version'
|
||||
assert a.arches == {'arch1', 'arch2'}
|
||||
# TODO: assert a.profiles
|
||||
assert str(a) == 'package (>= version) [arch1 arch2] <profile1>'
|
||||
|
||||
def test_init_self(self) -> None:
|
||||
a = PackageRelationEntry(PackageRelationEntry('package [arch2 arch1]'))
|
||||
assert a.name == 'package'
|
||||
assert a.arches == {'arch1', 'arch2'}
|
||||
assert str(a) == 'package [arch1 arch2]'
|
||||
|
||||
|
||||
class TestPackageRelationGroup:
|
||||
def test_init(self) -> None:
|
||||
a = PackageRelationGroup()
|
||||
assert a == []
|
||||
|
||||
def test_init_str(self) -> None:
|
||||
a = PackageRelationGroup('foo | bar')
|
||||
assert len(a) == 2
|
||||
assert a[0].name == 'foo'
|
||||
assert a[1].name == 'bar'
|
||||
|
||||
def test_init_iter_entry(self) -> None:
|
||||
a = PackageRelationGroup((PackageRelationEntry('foo'), PackageRelationEntry('bar')))
|
||||
assert len(a) == 2
|
||||
assert a[0].name == 'foo'
|
||||
assert a[1].name == 'bar'
|
||||
|
||||
def test_init_iter_str(self) -> None:
|
||||
a = PackageRelationGroup(('foo', 'bar'))
|
||||
assert len(a) == 2
|
||||
assert a[0].name == 'foo'
|
||||
assert a[1].name == 'bar'
|
||||
|
||||
def test_init_self(self) -> None:
|
||||
a = PackageRelationGroup(PackageRelationGroup(['foo', 'bar']))
|
||||
assert len(a) == 2
|
||||
assert a[0].name == 'foo'
|
||||
assert a[1].name == 'bar'
|
||||
|
||||
def test_str(self) -> None:
|
||||
a = PackageRelationGroup('foo| bar')
|
||||
assert str(a) == 'foo | bar'
|
||||
|
||||
|
||||
class TestPackageRelation:
|
||||
def test_init(self) -> None:
|
||||
a = PackageRelation()
|
||||
assert a == []
|
||||
|
||||
def test_init_str(self) -> None:
|
||||
a = PackageRelation('foo1 | foo2, bar')
|
||||
assert len(a) == 2
|
||||
assert len(a[0]) == 2
|
||||
assert a[0][0].name == 'foo1'
|
||||
assert a[0][1].name == 'foo2'
|
||||
assert len(a[1]) == 1
|
||||
assert a[1][0].name == 'bar'
|
||||
|
||||
def test_init_iter_entry(self) -> None:
|
||||
a = PackageRelation([[PackageRelationEntry('foo')], [PackageRelationEntry('bar')]])
|
||||
assert len(a) == 2
|
||||
assert len(a[0]) == 1
|
||||
assert a[0][0].name == 'foo'
|
||||
assert len(a[1]) == 1
|
||||
assert a[1][0].name == 'bar'
|
||||
|
||||
def test_init_iter_str(self) -> None:
|
||||
a = PackageRelation(('foo', 'bar'))
|
||||
assert len(a) == 2
|
||||
assert len(a[0]) == 1
|
||||
assert a[0][0].name == 'foo'
|
||||
assert len(a[1]) == 1
|
||||
assert a[1][0].name == 'bar'
|
||||
|
||||
def test_init_self(self) -> None:
|
||||
a = PackageRelation(PackageRelation(('foo', 'bar')))
|
||||
assert len(a) == 2
|
||||
assert len(a[0]) == 1
|
||||
assert a[0][0].name == 'foo'
|
||||
assert len(a[1]) == 1
|
||||
assert a[1][0].name == 'bar'
|
||||
|
||||
def test_str(self) -> None:
|
||||
a = PackageRelation('foo ,bar')
|
||||
assert str(a) == 'foo, bar'
|
||||
|
||||
|
||||
class TestPackageBuildprofileEntry:
|
||||
def test_parse(self) -> None:
|
||||
a = PackageBuildprofileEntry.parse('<profile1 !profile2 profile3 !profile4>')
|
||||
assert a.pos == {'profile1', 'profile3'}
|
||||
assert a.neg == {'profile2', 'profile4'}
|
||||
assert str(a) == '<profile1 profile3 !profile2 !profile4>'
|
||||
|
||||
def test_eq(self) -> None:
|
||||
a = PackageBuildprofileEntry.parse('<profile1 !profile2>')
|
||||
b = PackageBuildprofileEntry(pos={'profile1'}, neg={'profile2'})
|
||||
assert a == b
|
||||
|
||||
def test_isdisjoint(self) -> None:
|
||||
a = PackageBuildprofileEntry.parse('<profile1 profile2>')
|
||||
b = PackageBuildprofileEntry.parse('<profile1 profile3>')
|
||||
assert a.isdisjoint(b)
|
||||
|
||||
def test_issubset_empty(self) -> None:
|
||||
a = PackageBuildprofileEntry.parse('<profile1 profile2>')
|
||||
b = PackageBuildprofileEntry()
|
||||
assert a.issubset(b)
|
||||
|
||||
def test_issubset_pos(self) -> None:
|
||||
a = PackageBuildprofileEntry.parse('<profile1 profile2>')
|
||||
b = PackageBuildprofileEntry.parse('<profile1>')
|
||||
assert a.issubset(b)
|
||||
|
||||
def test_issubset_neg(self) -> None:
|
||||
a = PackageBuildprofileEntry.parse('<!profile1>')
|
||||
b = PackageBuildprofileEntry.parse('<!profile1 !profile2>')
|
||||
assert a.issubset(b)
|
||||
|
||||
def test_issubset_both(self) -> None:
|
||||
a = PackageBuildprofileEntry.parse('<!profile1 !profile2 profile3>')
|
||||
b = PackageBuildprofileEntry.parse('<!profile1 !profile2 !profile3>')
|
||||
assert a.issubset(b)
|
||||
|
||||
def test_issuperset_empty(self) -> None:
|
||||
a = PackageBuildprofileEntry.parse('<profile1 profile2>')
|
||||
b = PackageBuildprofileEntry()
|
||||
assert b.issuperset(a)
|
||||
|
||||
def test_issuperset_pos(self) -> None:
|
||||
a = PackageBuildprofileEntry.parse('<profile1 profile2>')
|
||||
b = PackageBuildprofileEntry.parse('<profile1>')
|
||||
assert b.issuperset(a)
|
||||
|
||||
def test_issuperset_neg(self) -> None:
|
||||
a = PackageBuildprofileEntry.parse('<!profile1>')
|
||||
b = PackageBuildprofileEntry.parse('<!profile1 !profile2>')
|
||||
assert b.issuperset(a)
|
||||
|
||||
def test_issuperset_both(self) -> None:
|
||||
a = PackageBuildprofileEntry.parse('<!profile1 !profile2 profile3>')
|
||||
b = PackageBuildprofileEntry.parse('<!profile1 !profile2 !profile3>')
|
||||
assert b.issuperset(a)
|
||||
|
||||
def test_update_pos(self) -> None:
|
||||
a = PackageBuildprofileEntry.parse('<profile1 profile2>')
|
||||
b = PackageBuildprofileEntry.parse('<profile1>')
|
||||
a.update(b)
|
||||
assert a.pos == {'profile1'}
|
||||
assert a.neg == set()
|
||||
|
||||
def test_update_neg(self) -> None:
|
||||
a = PackageBuildprofileEntry.parse('<!profile1 !profile2>')
|
||||
b = PackageBuildprofileEntry.parse('<!profile1>')
|
||||
a.update(b)
|
||||
assert a.pos == set()
|
||||
assert a.neg == {'profile1'}
|
||||
|
||||
def test_update_both(self) -> None:
|
||||
a = PackageBuildprofileEntry.parse('<profile1 !profile2 profile3>')
|
||||
b = PackageBuildprofileEntry.parse('<profile1 !profile2 !profile3>')
|
||||
a.update(b)
|
||||
assert a.pos == {'profile1'}
|
||||
assert a.neg == {'profile2'}
|
||||
|
||||
|
||||
class TestPackageBuildprofile:
|
||||
def test_parse(self) -> None:
|
||||
a = PackageBuildprofile.parse('<profile1> <!profile2> <profile3> <!profile4>')
|
||||
assert str(a) == '<profile1> <!profile2> <profile3> <!profile4>'
|
||||
|
||||
def test_update(self) -> None:
|
||||
a = PackageBuildprofile.parse('<profile1 profile2> <profile2>')
|
||||
b = PackageBuildprofile.parse('<profile1> <profile2 !profile3> <profile3>')
|
||||
a.update(b)
|
||||
assert str(a) == '<profile1> <profile2> <profile3>'
|
89
debian/lib/python/debian_linux/utils.py
vendored
Normal file
89
debian/lib/python/debian_linux/utils.py
vendored
Normal file
|
@ -0,0 +1,89 @@
|
|||
import io
|
||||
import os
|
||||
import re
|
||||
import textwrap
|
||||
import typing
|
||||
|
||||
import jinja2
|
||||
|
||||
from .dataclasses_deb822 import read_deb822
|
||||
from .debian import SourcePackage, BinaryPackage, TestsControl
|
||||
|
||||
|
||||
class Templates(object):
|
||||
dirs: list[str]
|
||||
_cache: dict[str, str]
|
||||
_jinja2: jinja2.Environment
|
||||
|
||||
def __init__(self, dirs: list[str] = ["debian/templates"]) -> None:
|
||||
self.dirs = dirs
|
||||
|
||||
self._cache = {}
|
||||
self._jinja2 = jinja2.Environment(
|
||||
# autoescape uses HTML safe escaping, which does not help us
|
||||
autoescape=False,
|
||||
keep_trailing_newline=True,
|
||||
trim_blocks=True,
|
||||
undefined=jinja2.StrictUndefined,
|
||||
)
|
||||
|
||||
def _read(self, name: str) -> typing.Any:
|
||||
pkgid, name = name.rsplit('.', 1)
|
||||
|
||||
for suffix in ['.j2', '.in', '']:
|
||||
for dir in self.dirs:
|
||||
filename = "%s/%s.%s%s" % (dir, pkgid, name, suffix)
|
||||
if os.path.exists(filename):
|
||||
with open(filename, 'r', encoding='utf-8') as f:
|
||||
return (f.read(), suffix)
|
||||
|
||||
raise KeyError(name)
|
||||
|
||||
def _get(self, key: str) -> typing.Any:
|
||||
try:
|
||||
return self._cache[key]
|
||||
except KeyError:
|
||||
self._cache[key] = value = self._read(key)
|
||||
return value
|
||||
|
||||
def get(self, key: str, context: dict[str, str] = {}) -> str:
|
||||
value = self._get(key)
|
||||
suffix = value[1]
|
||||
|
||||
if context:
|
||||
if suffix == '.in':
|
||||
try:
|
||||
def subst(match):
|
||||
return context[match.group(1)]
|
||||
return re.sub(r'@([-_a-z0-9]+)@', subst, str(value[0]))
|
||||
except KeyError as e:
|
||||
raise RuntimeError(f'templates/{key}.in: {e} is undefined') from None
|
||||
|
||||
elif suffix == '.j2':
|
||||
try:
|
||||
return self._jinja2.from_string(value[0]).render(context)
|
||||
except jinja2.exceptions.UndefinedError as e:
|
||||
raise RuntimeError(f'templates/{key}.j2: {e}') from None
|
||||
|
||||
return value[0]
|
||||
|
||||
def get_control(
|
||||
self, key: str, context: dict[str, str] = {},
|
||||
) -> typing.Iterable[BinaryPackage]:
|
||||
return read_deb822(BinaryPackage, io.StringIO(self.get(key, context)))
|
||||
|
||||
def get_source_control(
|
||||
self, key: str, context: dict[str, str] = {},
|
||||
) -> typing.Iterable[SourcePackage]:
|
||||
return read_deb822(SourcePackage, io.StringIO(self.get(key, context)))
|
||||
|
||||
def get_tests_control(
|
||||
self, key: str, context: dict[str, str] = {},
|
||||
) -> typing.Iterable[TestsControl]:
|
||||
return read_deb822(TestsControl, io.StringIO(self.get(key, context)))
|
||||
|
||||
|
||||
class TextWrapper(textwrap.TextWrapper):
|
||||
wordsep_re = re.compile(
|
||||
r'(\s+|' # any whitespace
|
||||
r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash
|
Loading…
Add table
Add a link
Reference in a new issue