Coverage for src/debputy/deb_packaging_support.py: 13%
799 statements
« prev ^ index » next coverage.py v7.2.7, created at 2024-04-07 12:14 +0200
« prev ^ index » next coverage.py v7.2.7, created at 2024-04-07 12:14 +0200
1import collections
2import contextlib
3import dataclasses
4import datetime
5import functools
6import hashlib
7import itertools
8import operator
9import os
10import re
11import subprocess
12import tempfile
13import textwrap
14from contextlib import ExitStack
15from tempfile import mkstemp
16from typing import (
17 Iterable,
18 List,
19 Optional,
20 Set,
21 Dict,
22 Sequence,
23 Tuple,
24 Iterator,
25 Literal,
26 TypeVar,
27 FrozenSet,
28 cast,
29 Any,
30 Union,
31 Mapping,
32)
34import debian.deb822
35from debian.changelog import Changelog
36from debian.deb822 import Deb822
38from debputy._deb_options_profiles import DebBuildOptionsAndProfiles
39from debputy.architecture_support import DpkgArchitectureBuildProcessValuesTable
40from debputy.debhelper_emulation import (
41 dhe_install_pkg_file_as_ctrl_file_if_present,
42 dhe_dbgsym_root_dir,
43)
44from debputy.elf_util import find_all_elf_files, ELF_MAGIC
45from debputy.exceptions import DebputyDpkgGensymbolsError
46from debputy.filesystem_scan import FSPath, FSROOverlay
47from debputy.highlevel_manifest import (
48 HighLevelManifest,
49 PackageTransformationDefinition,
50 BinaryPackageData,
51)
52from debputy.maintscript_snippet import (
53 ALL_CONTROL_SCRIPTS,
54 MaintscriptSnippetContainer,
55 STD_CONTROL_SCRIPTS,
56)
57from debputy.packages import BinaryPackage, SourcePackage
58from debputy.packaging.alternatives import process_alternatives
59from debputy.packaging.debconf_templates import process_debconf_templates
60from debputy.packaging.makeshlibs import (
61 compute_shlibs,
62 ShlibsContent,
63 generate_shlib_dirs,
64)
65from debputy.plugin.api.feature_set import PluginProvidedFeatureSet
66from debputy.plugin.api.impl import ServiceRegistryImpl
67from debputy.plugin.api.impl_types import (
68 MetadataOrMaintscriptDetector,
69 PackageDataTable,
70 ServiceManagerDetails,
71)
72from debputy.plugin.api.spec import (
73 FlushableSubstvars,
74 VirtualPath,
75 PackageProcessingContext,
76 ServiceDefinition,
77)
78from debputy.plugin.debputy.binary_package_rules import ServiceRule
79from debputy.util import (
80 _error,
81 ensure_dir,
82 assume_not_none,
83 perl_module_dirs,
84 perlxs_api_dependency,
85 detect_fakeroot,
86 grouper,
87 _info,
88 xargs,
89 escape_shell,
90 generated_content_dir,
91 print_command,
92 _warn,
93)
95VP = TypeVar("VP", bound=VirtualPath, covariant=True)
97_T64_REGEX = re.compile("^lib.*t64(?:-nss)?$")
98_T64_PROVIDES = "t64:Provides"
101def generate_md5sums_file(control_output_dir: str, fs_root: VirtualPath) -> None:
102 conffiles = os.path.join(control_output_dir, "conffiles")
103 md5sums = os.path.join(control_output_dir, "md5sums")
104 exclude = set()
105 if os.path.isfile(conffiles):
106 with open(conffiles, "rt") as fd:
107 for line in fd:
108 if not line.startswith("/"):
109 continue
110 exclude.add("." + line.rstrip("\n"))
111 had_content = False
112 files = sorted(
113 (
114 path
115 for path in fs_root.all_paths()
116 if path.is_file and path.path not in exclude
117 ),
118 # Sort in the same order as dh_md5sums, which is not quite the same as dpkg/`all_paths()`
119 # Compare `.../doc/...` vs `.../doc-base/...` if you want to see the difference between
120 # the two approaches.
121 key=lambda p: p.path,
122 )
123 with open(md5sums, "wt") as md5fd:
124 for member in files:
125 path = member.path
126 assert path.startswith("./")
127 path = path[2:]
128 with member.open(byte_io=True) as f:
129 file_hash = hashlib.md5()
130 while chunk := f.read(8192):
131 file_hash.update(chunk)
132 had_content = True
133 md5fd.write(f"{file_hash.hexdigest()} {path}\n")
134 if not had_content:
135 os.unlink(md5sums)
138def install_or_generate_conffiles(
139 binary_package: BinaryPackage,
140 root_dir: str,
141 fs_root: VirtualPath,
142 debian_dir: VirtualPath,
143) -> None:
144 conffiles_dest = os.path.join(root_dir, "conffiles")
145 dhe_install_pkg_file_as_ctrl_file_if_present(
146 debian_dir,
147 binary_package,
148 "conffiles",
149 root_dir,
150 0o0644,
151 )
152 etc_dir = fs_root.lookup("etc")
153 if etc_dir:
154 _add_conffiles(conffiles_dest, (p for p in etc_dir.all_paths() if p.is_file))
155 if os.path.isfile(conffiles_dest):
156 os.chmod(conffiles_dest, 0o0644)
159PERL_DEP_PROGRAM = 1
160PERL_DEP_INDEP_PM_MODULE = 2
161PERL_DEP_XS_MODULE = 4
162PERL_DEP_ARCH_PM_MODULE = 8
163PERL_DEP_MA_ANY_INCOMPATIBLE_TYPES = ~(PERL_DEP_PROGRAM | PERL_DEP_INDEP_PM_MODULE)
166@functools.lru_cache(2) # In practice, param will be "perl" or "perl-base"
167def _dpkg_perl_version(package: str) -> str:
168 dpkg_version = None
169 lines = (
170 subprocess.check_output(["dpkg", "-s", package])
171 .decode("utf-8")
172 .splitlines(keepends=False)
173 )
174 for line in lines:
175 if line.startswith("Version: "):
176 dpkg_version = line[8:].strip()
177 break
178 assert dpkg_version is not None
179 return dpkg_version
182def handle_perl_code(
183 dctrl_bin: BinaryPackage,
184 dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable,
185 fs_root: FSPath,
186 substvars: FlushableSubstvars,
187) -> None:
188 known_perl_inc_dirs = perl_module_dirs(dpkg_architecture_variables, dctrl_bin)
189 detected_dep_requirements = 0
191 # MakeMaker always makes lib and share dirs, but typically only one directory is actually used.
192 for perl_inc_dir in known_perl_inc_dirs:
193 p = fs_root.lookup(perl_inc_dir)
194 if p and p.is_dir:
195 p.prune_if_empty_dir()
197 # FIXME: 80% of this belongs in a metadata detector, but that requires us to expose .walk() in the public API,
198 # which will not be today.
199 for d, pm_mode in [
200 (known_perl_inc_dirs.vendorlib, PERL_DEP_INDEP_PM_MODULE),
201 (known_perl_inc_dirs.vendorarch, PERL_DEP_ARCH_PM_MODULE),
202 ]:
203 inc_dir = fs_root.lookup(d)
204 if not inc_dir:
205 continue
206 for path in inc_dir.all_paths():
207 if not path.is_file:
208 continue
209 if path.name.endswith(".so"):
210 detected_dep_requirements |= PERL_DEP_XS_MODULE
211 elif path.name.endswith(".pm"):
212 detected_dep_requirements |= pm_mode
214 for path, children in fs_root.walk():
215 if path.path == "./usr/share/doc":
216 children.clear()
217 continue
218 if (
219 not path.is_file
220 or not path.has_fs_path
221 or not (path.is_executable or path.name.endswith(".pl"))
222 ):
223 continue
225 interpreter = path.interpreter()
226 if interpreter is not None and interpreter.command_full_basename == "perl":
227 detected_dep_requirements |= PERL_DEP_PROGRAM
229 if not detected_dep_requirements:
230 return
231 dpackage = "perl"
232 # FIXME: Currently, dh_perl supports perl-base via manual toggle.
234 dependency = dpackage
235 if not (detected_dep_requirements & PERL_DEP_MA_ANY_INCOMPATIBLE_TYPES):
236 dependency += ":any"
238 if detected_dep_requirements & PERL_DEP_XS_MODULE:
239 dpkg_version = _dpkg_perl_version(dpackage)
240 dependency += f" (>= {dpkg_version})"
241 substvars.add_dependency("perl:Depends", dependency)
243 if detected_dep_requirements & (PERL_DEP_XS_MODULE | PERL_DEP_ARCH_PM_MODULE):
244 substvars.add_dependency("perl:Depends", perlxs_api_dependency())
247def usr_local_transformation(dctrl: BinaryPackage, fs_root: VirtualPath) -> None:
248 path = fs_root.lookup("./usr/local")
249 if path and any(path.iterdir):
250 # There are two key issues:
251 # 1) Getting the generated maintscript carried on to the final maintscript
252 # 2) Making sure that manifest created directories do not trigger the "unused error".
253 _error(
254 f"Replacement of /usr/local paths is currently not supported in debputy (triggered by: {dctrl.name})."
255 )
258def _find_and_analyze_systemd_service_files(
259 fs_root: VirtualPath,
260 systemd_service_dir: Literal["system", "user"],
261) -> Iterable[VirtualPath]:
262 service_dirs = [
263 f"./usr/lib/systemd/{systemd_service_dir}",
264 f"./lib/systemd/{systemd_service_dir}",
265 ]
266 aliases: Dict[str, List[str]] = collections.defaultdict(list)
267 seen = set()
268 all_files = []
270 for d in service_dirs:
271 system_dir = fs_root.lookup(d)
272 if not system_dir:
273 continue
274 for child in system_dir.iterdir:
275 if child.is_symlink:
276 dest = os.path.basename(child.readlink())
277 aliases[dest].append(child.name)
278 elif child.is_file and child.name not in seen:
279 seen.add(child.name)
280 all_files.append(child)
282 return all_files
285def detect_systemd_user_service_files(
286 dctrl: BinaryPackage,
287 fs_root: VirtualPath,
288) -> None:
289 for service_file in _find_and_analyze_systemd_service_files(fs_root, "user"):
290 _error(
291 f'Sorry, systemd user services files are not supported at the moment (saw "{service_file.path}"'
292 f" in {dctrl.name})"
293 )
296# Generally, this should match the release date of oldstable or oldoldstable
297_DCH_PRUNE_CUT_OFF_DATE = datetime.date(2019, 7, 6)
298_DCH_MIN_NUM_OF_ENTRIES = 4
301def _prune_dch_file(
302 package: BinaryPackage,
303 path: VirtualPath,
304 is_changelog: bool,
305 keep_versions: Optional[Set[str]],
306 *,
307 trim: bool = True,
308) -> Tuple[bool, Optional[Set[str]]]:
309 # TODO: Process `d/changelog` once
310 # Note we cannot assume that changelog_file is always `d/changelog` as you can have
311 # per-package changelogs.
312 with path.open() as fd:
313 dch = Changelog(fd)
314 shortened = False
315 important_entries = 0
316 binnmu_entries = []
317 if is_changelog:
318 kept_entries = []
319 for block in dch:
320 if block.other_pairs.get("binary-only", "no") == "yes":
321 # Always keep binNMU entries (they are always in the top) and they do not count
322 # towards our kept_entries limit
323 binnmu_entries.append(block)
324 continue
325 block_date = block.date
326 if block_date is None:
327 _error(f"The Debian changelog was missing date in sign off line")
328 entry_date = datetime.datetime.strptime(
329 block_date, "%a, %d %b %Y %H:%M:%S %z"
330 ).date()
331 if (
332 trim
333 and entry_date < _DCH_PRUNE_CUT_OFF_DATE
334 and important_entries >= _DCH_MIN_NUM_OF_ENTRIES
335 ):
336 shortened = True
337 break
338 # Match debhelper in incrementing after the check.
339 important_entries += 1
340 kept_entries.append(block)
341 else:
342 assert keep_versions is not None
343 # The NEWS files should match the version for the dch to avoid lintian warnings.
344 # If that means we remove all entries in the NEWS file, then we delete the NEWS
345 # file (see #1021607)
346 kept_entries = [b for b in dch if b.version in keep_versions]
347 shortened = len(dch) > len(kept_entries)
348 if shortened and not kept_entries:
349 path.unlink()
350 return True, None
352 if not shortened and not binnmu_entries:
353 return False, None
355 parent_dir = assume_not_none(path.parent_dir)
357 with path.replace_fs_path_content() as fs_path, open(
358 fs_path, "wt", encoding="utf-8"
359 ) as fd:
360 for entry in kept_entries:
361 fd.write(str(entry))
363 if is_changelog and shortened:
364 # For changelog (rather than NEWS) files, add a note about how to
365 # get the full version.
366 msg = textwrap.dedent(
367 f"""\
368 # Older entries have been removed from this changelog.
369 # To read the complete changelog use `apt changelog {package.name}`.
370 """
371 )
372 fd.write(msg)
374 if binnmu_entries:
375 if package.is_arch_all:
376 _error(
377 f"The package {package.name} is architecture all, but it is built during a binNMU. A binNMU build"
378 " must not include architecture all packages"
379 )
381 with parent_dir.add_file(
382 f"{path.name}.{package.resolved_architecture}"
383 ) as binnmu_changelog, open(
384 binnmu_changelog.fs_path,
385 "wt",
386 encoding="utf-8",
387 ) as binnmu_fd:
388 for entry in binnmu_entries:
389 binnmu_fd.write(str(entry))
391 if not shortened:
392 return False, None
393 return True, {b.version for b in kept_entries}
396def fixup_debian_changelog_and_news_file(
397 dctrl: BinaryPackage,
398 fs_root: VirtualPath,
399 is_native: bool,
400 build_env: DebBuildOptionsAndProfiles,
401) -> None:
402 doc_dir = fs_root.lookup(f"./usr/share/doc/{dctrl.name}")
403 if not doc_dir:
404 return
405 changelog = doc_dir.get("changelog.Debian")
406 if changelog and is_native:
407 changelog.name = "changelog"
408 elif is_native:
409 changelog = doc_dir.get("changelog")
411 trim = False if "notrimdch" in build_env.deb_build_options else True
413 kept_entries = None
414 pruned_changelog = False
415 if changelog and changelog.has_fs_path:
416 pruned_changelog, kept_entries = _prune_dch_file(
417 dctrl, changelog, True, None, trim=trim
418 )
420 if not trim:
421 return
423 news_file = doc_dir.get("NEWS.Debian")
424 if news_file and news_file.has_fs_path and pruned_changelog:
425 _prune_dch_file(dctrl, news_file, False, kept_entries)
428_UPSTREAM_CHANGELOG_SOURCE_DIRS = [
429 ".",
430 "doc",
431 "docs",
432]
433_UPSTREAM_CHANGELOG_NAMES = {
434 # The value is a priority to match the debhelper order.
435 # - The suffix weights heavier than the basename (because that is what debhelper did)
436 #
437 # We list the name/suffix in order of priority in the code. That makes it easier to
438 # see the priority directly, but it gives the "lowest" value to the most important items
439 f"{n}{s}": (sw, nw)
440 for (nw, n), (sw, s) in itertools.product(
441 enumerate(["changelog", "changes", "history"], start=1),
442 enumerate(["", ".txt", ".md", ".rst"], start=1),
443 )
444}
445_NONE_TUPLE = (None, (0, 0))
448def _detect_upstream_changelog(names: Iterable[str]) -> Optional[str]:
449 matches = []
450 for name in names:
451 match_priority = _UPSTREAM_CHANGELOG_NAMES.get(name.lower())
452 if match_priority is not None:
453 matches.append((name, match_priority))
454 return min(matches, default=_NONE_TUPLE, key=operator.itemgetter(1))[0]
457def install_upstream_changelog(
458 dctrl_bin: BinaryPackage,
459 fs_root: FSPath,
460 source_fs_root: VirtualPath,
461) -> None:
462 doc_dir = f"./usr/share/doc/{dctrl_bin.name}"
463 bdir = fs_root.lookup(doc_dir)
464 if bdir and not bdir.is_dir:
465 # "/usr/share/doc/foo -> bar" symlink. Avoid croaking on those per:
466 # https://salsa.debian.org/debian/debputy/-/issues/49
467 return
469 if bdir:
470 if bdir.get("changelog") or bdir.get("changelog.gz"):
471 # Upstream's build system already provided the changelog with the correct name.
472 # Accept that as the canonical one.
473 return
474 upstream_changelog = _detect_upstream_changelog(
475 p.name for p in bdir.iterdir if p.is_file and p.has_fs_path and p.size > 0
476 )
477 if upstream_changelog:
478 p = bdir.lookup(upstream_changelog)
479 assert p is not None # Mostly as a typing hint
480 p.name = "changelog"
481 return
482 for dirname in _UPSTREAM_CHANGELOG_SOURCE_DIRS:
483 dir_path = source_fs_root.lookup(dirname)
484 if not dir_path or not dir_path.is_dir:
485 continue
486 changelog_name = _detect_upstream_changelog(
487 p.name
488 for p in dir_path.iterdir
489 if p.is_file and p.has_fs_path and p.size > 0
490 )
491 if changelog_name:
492 if bdir is None: 492 ↛ 494line 492 didn't jump to line 494, because the condition on line 492 was never false
493 bdir = fs_root.mkdirs(doc_dir)
494 bdir.insert_file_from_fs_path(
495 "changelog",
496 dir_path[changelog_name].fs_path,
497 )
498 break
501@dataclasses.dataclass(slots=True)
502class _ElfInfo:
503 path: VirtualPath
504 fs_path: str
505 is_stripped: Optional[bool] = None
506 build_id: Optional[str] = None
507 dbgsym: Optional[FSPath] = None
510def _elf_static_lib_walk_filter(
511 fs_path: VirtualPath,
512 children: List[VP],
513) -> bool:
514 if (
515 fs_path.name == ".build-id"
516 and assume_not_none(fs_path.parent_dir).name == "debug"
517 ):
518 children.clear()
519 return False
520 # Deal with some special cases, where certain files are not supposed to be stripped in a given directory
521 if "debug/" in fs_path.path or fs_path.name.endswith("debug/"):
522 # FIXME: We need a way to opt out of this per #468333/#1016122
523 for so_file in (f for f in list(children) if f.name.endswith(".so")):
524 children.remove(so_file)
525 if "/guile/" in fs_path.path or fs_path.name == "guile":
526 for go_file in (f for f in list(children) if f.name.endswith(".go")):
527 children.remove(go_file)
528 return True
531@contextlib.contextmanager
532def _all_elf_files(fs_root: VirtualPath) -> Iterator[Dict[str, _ElfInfo]]:
533 all_elf_files = find_all_elf_files(
534 fs_root,
535 walk_filter=_elf_static_lib_walk_filter,
536 )
537 if not all_elf_files:
538 yield {}
539 return
540 with ExitStack() as cm_stack:
541 resolved = (
542 (p, cm_stack.enter_context(p.replace_fs_path_content()))
543 for p in all_elf_files
544 )
545 elf_info = {
546 fs_path: _ElfInfo(
547 path=assume_not_none(fs_root.lookup(detached_path.path)),
548 fs_path=fs_path,
549 )
550 for detached_path, fs_path in resolved
551 }
552 _resolve_build_ids(elf_info)
553 yield elf_info
556def _find_all_static_libs(
557 fs_root: FSPath,
558) -> Iterator[FSPath]:
559 for path, children in fs_root.walk():
560 # Matching the logic of dh_strip for now.
561 if not _elf_static_lib_walk_filter(path, children):
562 continue
563 if not path.is_file:
564 continue
565 if path.name.startswith("lib") and path.name.endswith("_g.a"):
566 # _g.a are historically ignored. I do not remember why, but guessing the "_g" is
567 # an encoding of gcc's -g parameter into the filename (with -g meaning "I want debug
568 # symbols")
569 continue
570 if not path.has_fs_path:
571 continue
572 with path.open(byte_io=True) as fd:
573 magic = fd.read(8)
574 if magic not in (b"!<arch>\n", b"!<thin>\n"):
575 continue
576 # Maybe we should see if the first file looks like an index file.
577 # Three random .a samples suggests the index file is named "/"
578 # Not sure if we should skip past it and then do the ELF check or just assume
579 # that "index => static lib".
580 data = fd.read(1024 * 1024)
581 if b"\0" not in data and ELF_MAGIC not in data:
582 continue
583 yield path
586@contextlib.contextmanager
587def _all_static_libs(fs_root: FSPath) -> Iterator[List[str]]:
588 all_static_libs = list(_find_all_static_libs(fs_root))
589 if not all_static_libs:
590 yield []
591 return
592 with ExitStack() as cm_stack:
593 resolved: List[str] = [
594 cm_stack.enter_context(p.replace_fs_path_content()) for p in all_static_libs
595 ]
596 yield resolved
599_FILE_BUILD_ID_RE = re.compile(rb"BuildID(?:\[\S+\])?=([A-Fa-f0-9]+)")
602def _resolve_build_ids(elf_info: Dict[str, _ElfInfo]) -> None:
603 static_cmd = ["file", "-00", "-N"]
604 if detect_fakeroot():
605 static_cmd.append("--no-sandbox")
607 for cmd in xargs(static_cmd, (i.fs_path for i in elf_info.values())):
608 _info(f"Looking up build-ids via: {escape_shell(*cmd)}")
609 output = subprocess.check_output(cmd)
611 # Trailing "\0" gives an empty element in the end when splitting, so strip it out
612 lines = output.rstrip(b"\0").split(b"\0")
614 for fs_path_b, verdict in grouper(lines, 2, incomplete="strict"):
615 fs_path = fs_path_b.decode("utf-8")
616 info = elf_info[fs_path]
617 info.is_stripped = b"not stripped" not in verdict
618 m = _FILE_BUILD_ID_RE.search(verdict)
619 if m:
620 info.build_id = m.group(1).decode("utf-8")
623def _make_debug_file(
624 objcopy: str, fs_path: str, build_id: str, dbgsym_fs_root: FSPath
625) -> FSPath:
626 dbgsym_dirname = f"./usr/lib/debug/.build-id/{build_id[0:2]}/"
627 dbgsym_basename = f"{build_id[2:]}.debug"
628 dbgsym_dir = dbgsym_fs_root.mkdirs(dbgsym_dirname)
629 if dbgsym_basename in dbgsym_dir:
630 return dbgsym_dir[dbgsym_basename]
631 # objcopy is a pain and includes the basename verbatim when you do `--add-gnu-debuglink` without having an option
632 # to overwrite the physical basename. So we have to ensure that the physical basename matches the installed
633 # basename.
634 with dbgsym_dir.add_file(
635 dbgsym_basename,
636 unlink_if_exists=False,
637 fs_basename_matters=True,
638 subdir_key="dbgsym-build-ids",
639 ) as dbgsym:
640 try:
641 subprocess.check_call(
642 [
643 objcopy,
644 "--only-keep-debug",
645 "--compress-debug-sections",
646 fs_path,
647 dbgsym.fs_path,
648 ]
649 )
650 except subprocess.CalledProcessError:
651 full_command = (
652 f"{objcopy} --only-keep-debug --compress-debug-sections"
653 f" {escape_shell(fs_path, dbgsym.fs_path)}"
654 )
655 _error(
656 f"Attempting to create a .debug file failed. Please review the error message from {objcopy} to"
657 f" understand what went wrong. Full command was: {full_command}"
658 )
659 return dbgsym
662def _strip_binary(strip: str, options: List[str], paths: Iterable[str]) -> None:
663 # We assume the paths are obtained via `p.replace_fs_path_content()`,
664 # which is the case at the time of written and should remain so forever.
665 it = iter(paths)
666 first = next(it, None)
667 if first is None:
668 return
669 static_cmd = [strip]
670 static_cmd.extend(options)
672 for cmd in xargs(static_cmd, itertools.chain((first,), (f for f in it))):
673 _info(f"Removing unnecessary ELF debug info via: {escape_shell(*cmd)}")
674 try:
675 subprocess.check_call(
676 cmd,
677 stdin=subprocess.DEVNULL,
678 restore_signals=True,
679 )
680 except subprocess.CalledProcessError:
681 _error(
682 f"Attempting to remove ELF debug info failed. Please review the error from {strip} above"
683 f" understand what went wrong."
684 )
687def _attach_debug(objcopy: str, elf_binary: VirtualPath, dbgsym: FSPath) -> None:
688 dbgsym_fs_path: str
689 with dbgsym.replace_fs_path_content() as dbgsym_fs_path:
690 cmd = [objcopy, "--add-gnu-debuglink", dbgsym_fs_path, elf_binary.fs_path]
691 print_command(*cmd)
692 try:
693 subprocess.check_call(cmd)
694 except subprocess.CalledProcessError:
695 _error(
696 f"Attempting to attach ELF debug link to ELF binary failed. Please review the error from {objcopy}"
697 f" above understand what went wrong."
698 )
701def _run_dwz(
702 dctrl: BinaryPackage,
703 dbgsym_fs_root: FSPath,
704 unstripped_elf_info: List[_ElfInfo],
705) -> None:
706 if not unstripped_elf_info or dctrl.is_udeb:
707 return
708 dwz_cmd = ["dwz"]
709 dwz_ma_dir_name = f"usr/lib/debug/.dwz/{dctrl.deb_multiarch}"
710 dwz_ma_basename = f"{dctrl.name}.debug"
711 multifile = f"{dwz_ma_dir_name}/{dwz_ma_basename}"
712 build_time_multifile = None
713 if len(unstripped_elf_info) > 1:
714 fs_content_dir = generated_content_dir()
715 fd, build_time_multifile = mkstemp(suffix=dwz_ma_basename, dir=fs_content_dir)
716 os.close(fd)
717 dwz_cmd.append(f"-m{build_time_multifile}")
718 dwz_cmd.append(f"-M/{multifile}")
720 # TODO: configuration for disabling multi-file and tweaking memory limits
722 dwz_cmd.extend(e.fs_path for e in unstripped_elf_info)
724 _info(f"Deduplicating ELF debug info via: {escape_shell(*dwz_cmd)}")
725 try:
726 subprocess.check_call(dwz_cmd)
727 except subprocess.CalledProcessError:
728 _error(
729 "Attempting to deduplicate ELF info via dwz failed. Please review the output from dwz above"
730 " to understand what went wrong."
731 )
732 if build_time_multifile is not None and os.stat(build_time_multifile).st_size > 0:
733 dwz_dir = dbgsym_fs_root.mkdirs(dwz_ma_dir_name)
734 dwz_dir.insert_file_from_fs_path(
735 dwz_ma_basename,
736 build_time_multifile,
737 mode=0o644,
738 require_copy_on_write=False,
739 follow_symlinks=False,
740 )
743def relocate_dwarves_into_dbgsym_packages(
744 dctrl: BinaryPackage,
745 package_fs_root: FSPath,
746 dbgsym_fs_root: VirtualPath,
747) -> List[str]:
748 # FIXME: hardlinks
749 with _all_static_libs(package_fs_root) as all_static_files:
750 if all_static_files:
751 strip = dctrl.cross_command("strip")
752 _strip_binary(
753 strip,
754 [
755 "--strip-debug",
756 "--remove-section=.comment",
757 "--remove-section=.note",
758 "--enable-deterministic-archives",
759 "-R",
760 ".gnu.lto_*",
761 "-R",
762 ".gnu.debuglto_*",
763 "-N",
764 "__gnu_lto_slim",
765 "-N",
766 "__gnu_lto_v1",
767 ],
768 all_static_files,
769 )
771 with _all_elf_files(package_fs_root) as all_elf_files:
772 if not all_elf_files:
773 return []
774 objcopy = dctrl.cross_command("objcopy")
775 strip = dctrl.cross_command("strip")
776 unstripped_elf_info = list(
777 e for e in all_elf_files.values() if not e.is_stripped
778 )
780 _run_dwz(dctrl, dbgsym_fs_root, unstripped_elf_info)
782 for elf_info in unstripped_elf_info:
783 elf_info.dbgsym = _make_debug_file(
784 objcopy,
785 elf_info.fs_path,
786 assume_not_none(elf_info.build_id),
787 dbgsym_fs_root,
788 )
790 # Note: When run strip, we do so also on already stripped ELF binaries because that is what debhelper does!
791 # Executables (defined by mode)
792 _strip_binary(
793 strip,
794 ["--remove-section=.comment", "--remove-section=.note"],
795 (i.fs_path for i in all_elf_files.values() if i.path.is_executable),
796 )
798 # Libraries (defined by mode)
799 _strip_binary(
800 strip,
801 ["--remove-section=.comment", "--remove-section=.note", "--strip-unneeded"],
802 (i.fs_path for i in all_elf_files.values() if not i.path.is_executable),
803 )
805 for elf_info in unstripped_elf_info:
806 _attach_debug(
807 objcopy,
808 assume_not_none(elf_info.path),
809 assume_not_none(elf_info.dbgsym),
810 )
812 # Set for uniqueness
813 all_debug_info = sorted(
814 {assume_not_none(i.build_id) for i in unstripped_elf_info}
815 )
817 dbgsym_doc_dir = dbgsym_fs_root.mkdirs("./usr/share/doc/")
818 dbgsym_doc_dir.add_symlink(f"{dctrl.name}-dbgsym", dctrl.name)
819 return all_debug_info
822def run_package_processors(
823 manifest: HighLevelManifest,
824 package_metadata_context: PackageProcessingContext,
825 fs_root: VirtualPath,
826) -> None:
827 pppps = manifest.plugin_provided_feature_set.package_processors_in_order()
828 binary_package = package_metadata_context.binary_package
829 for pppp in pppps:
830 if not pppp.applies_to(binary_package):
831 continue
832 pppp.run_package_processor(fs_root, None, package_metadata_context)
835def cross_package_control_files(
836 package_data_table: PackageDataTable,
837 manifest: HighLevelManifest,
838) -> None:
839 errors = []
840 combined_shlibs = ShlibsContent()
841 shlibs_dir = None
842 shlib_dirs: List[str] = []
843 shlibs_local = manifest.debian_dir.get("shlibs.local")
844 if shlibs_local and shlibs_local.is_file:
845 with shlibs_local.open() as fd:
846 combined_shlibs.add_entries_from_shlibs_file(fd)
848 debputy_plugin_metadata = manifest.plugin_provided_feature_set.plugin_data[
849 "debputy"
850 ]
852 for binary_package_data in package_data_table:
853 binary_package = binary_package_data.binary_package
854 if binary_package.is_arch_all or not binary_package.should_be_acted_on:
855 continue
856 control_output_dir = assume_not_none(binary_package_data.control_output_dir)
857 fs_root = binary_package_data.fs_root
858 package_state = manifest.package_state_for(binary_package.name)
859 related_udeb_package = (
860 binary_package_data.package_metadata_context.related_udeb_package
861 )
863 udeb_package_name = related_udeb_package.name if related_udeb_package else None
864 ctrl = binary_package_data.ctrl_creator.for_plugin(
865 debputy_plugin_metadata,
866 "compute_shlibs",
867 )
868 try:
869 soname_info_list = compute_shlibs(
870 binary_package,
871 control_output_dir,
872 fs_root,
873 manifest,
874 udeb_package_name,
875 ctrl,
876 package_state.reserved_packager_provided_files,
877 combined_shlibs,
878 )
879 except DebputyDpkgGensymbolsError as e:
880 errors.append(e.message)
881 else:
882 if soname_info_list:
883 if shlibs_dir is None:
884 shlibs_dir = generated_content_dir(
885 subdir_key="_shlibs_materialization_dir"
886 )
887 generate_shlib_dirs(
888 binary_package,
889 shlibs_dir,
890 soname_info_list,
891 shlib_dirs,
892 )
893 if errors:
894 for error in errors:
895 _warn(error)
896 _error("Stopping due to the errors above")
898 generated_shlibs_local = None
899 if combined_shlibs:
900 if shlibs_dir is None:
901 shlibs_dir = generated_content_dir(subdir_key="_shlibs_materialization_dir")
902 generated_shlibs_local = os.path.join(shlibs_dir, "shlibs.local")
903 with open(generated_shlibs_local, "wt", encoding="utf-8") as fd:
904 combined_shlibs.write_to(fd)
905 _info(f"Generated {generated_shlibs_local} for dpkg-shlibdeps")
907 for binary_package_data in package_data_table:
908 binary_package = binary_package_data.binary_package
909 if binary_package.is_arch_all or not binary_package.should_be_acted_on:
910 continue
911 binary_package_data.ctrl_creator.shlibs_details = (
912 generated_shlibs_local,
913 shlib_dirs,
914 )
917def _relevant_service_definitions(
918 service_rule: ServiceRule,
919 service_managers: Union[List[str], FrozenSet[str]],
920 by_service_manager_key: Mapping[
921 Tuple[str, str, str, str], Tuple[ServiceManagerDetails, ServiceDefinition[Any]]
922 ],
923 aliases: Mapping[str, Sequence[Tuple[str, str, str, str]]],
924) -> Iterable[Tuple[Tuple[str, str, str, str], ServiceDefinition[Any]]]:
925 as_keys = (key for key in aliases[service_rule.service])
927 pending_queue = {
928 key
929 for key in as_keys
930 if key in by_service_manager_key
931 and service_rule.applies_to_service_manager(key[-1])
932 }
933 relevant_names = {}
934 seen_keys = set()
936 if not pending_queue:
937 service_manager_names = ", ".join(sorted(service_managers))
938 _error(
939 f"No none of the service managers ({service_manager_names}) detected a service named"
940 f" {service_rule.service} (type: {service_rule.type_of_service}, scope: {service_rule.service_scope}),"
941 f" but the manifest definition at {service_rule.definition_source} requested that."
942 )
944 while pending_queue:
945 next_key = pending_queue.pop()
946 seen_keys.add(next_key)
947 _, definition = by_service_manager_key[next_key]
948 yield next_key, definition
949 for name in definition.names:
950 for target_key in aliases[name]:
951 if (
952 target_key not in seen_keys
953 and service_rule.applies_to_service_manager(target_key[-1])
954 ):
955 pending_queue.add(target_key)
957 return relevant_names
960def handle_service_management(
961 binary_package_data: BinaryPackageData,
962 manifest: HighLevelManifest,
963 package_metadata_context: PackageProcessingContext,
964 fs_root: VirtualPath,
965 feature_set: PluginProvidedFeatureSet,
966) -> None:
968 by_service_manager_key = {}
969 aliases_by_name = collections.defaultdict(list)
971 state = manifest.package_state_for(binary_package_data.binary_package.name)
972 all_service_managers = list(feature_set.service_managers)
973 requested_service_rules = state.requested_service_rules
974 for requested_service_rule in requested_service_rules:
975 if not requested_service_rule.service_managers:
976 continue
977 for manager in requested_service_rule.service_managers:
978 if manager not in feature_set.service_managers:
979 # FIXME: Missing definition source; move to parsing.
980 _error(
981 f"Unknown service manager {manager} used at {requested_service_rule.definition_source}"
982 )
984 for service_manager_details in feature_set.service_managers.values():
985 service_registry = ServiceRegistryImpl(service_manager_details)
986 service_manager_details.service_detector(
987 fs_root,
988 service_registry,
989 package_metadata_context,
990 )
992 service_definitions = service_registry.detected_services
993 if not service_definitions:
994 continue
996 for plugin_provided_definition in service_definitions:
997 key = (
998 plugin_provided_definition.name,
999 plugin_provided_definition.type_of_service,
1000 plugin_provided_definition.service_scope,
1001 service_manager_details.service_manager,
1002 )
1003 by_service_manager_key[key] = (
1004 service_manager_details,
1005 plugin_provided_definition,
1006 )
1008 for name in plugin_provided_definition.names:
1009 aliases_by_name[name].append(key)
1011 for requested_service_rule in requested_service_rules:
1012 explicit_service_managers = requested_service_rule.service_managers is not None
1013 related_service_managers = (
1014 requested_service_rule.service_managers or all_service_managers
1015 )
1016 seen_service_managers = set()
1017 for service_key, service_definition in _relevant_service_definitions(
1018 requested_service_rule,
1019 related_service_managers,
1020 by_service_manager_key,
1021 aliases_by_name,
1022 ):
1023 sm = service_key[-1]
1024 seen_service_managers.add(sm)
1025 by_service_manager_key[service_key] = (
1026 by_service_manager_key[service_key][0],
1027 requested_service_rule.apply_to_service_definition(service_definition),
1028 )
1029 if (
1030 explicit_service_managers
1031 and seen_service_managers != related_service_managers
1032 ):
1033 missing_sms = ", ".join(
1034 sorted(related_service_managers - seen_service_managers)
1035 )
1036 _error(
1037 f"The rule {requested_service_rule.definition_source} explicitly requested which service managers"
1038 f" it should apply to. However, the following service managers did not provide a service of that"
1039 f" name, type and scope: {missing_sms}. Please check the rule is correct and either provide the"
1040 f" missing service or update the definition match the relevant services."
1041 )
1043 per_service_manager = {}
1045 for (
1046 service_manager_details,
1047 plugin_provided_definition,
1048 ) in by_service_manager_key.values():
1049 service_manager = service_manager_details.service_manager
1050 if service_manager not in per_service_manager:
1051 per_service_manager[service_manager] = (
1052 service_manager_details,
1053 [plugin_provided_definition],
1054 )
1055 else:
1056 per_service_manager[service_manager][1].append(plugin_provided_definition)
1058 for (
1059 service_manager_details,
1060 final_service_definitions,
1061 ) in per_service_manager.values():
1062 ctrl = binary_package_data.ctrl_creator.for_plugin(
1063 service_manager_details.plugin_metadata,
1064 service_manager_details.service_manager,
1065 default_snippet_order="service",
1066 )
1067 _info(f"Applying {final_service_definitions}")
1068 service_manager_details.service_integrator(
1069 final_service_definitions,
1070 ctrl,
1071 package_metadata_context,
1072 )
1075def setup_control_files(
1076 binary_package_data: BinaryPackageData,
1077 manifest: HighLevelManifest,
1078 dbgsym_fs_root: VirtualPath,
1079 dbgsym_ids: List[str],
1080 package_metadata_context: PackageProcessingContext,
1081 *,
1082 allow_ctrl_file_management: bool = True,
1083) -> None:
1084 binary_package = package_metadata_context.binary_package
1085 control_output_dir = assume_not_none(binary_package_data.control_output_dir)
1086 fs_root = binary_package_data.fs_root
1087 package_state = manifest.package_state_for(binary_package.name)
1089 feature_set: PluginProvidedFeatureSet = manifest.plugin_provided_feature_set
1090 metadata_maintscript_detectors = feature_set.metadata_maintscript_detectors
1091 substvars = binary_package_data.substvars
1093 snippets = STD_CONTROL_SCRIPTS
1094 generated_triggers = list(binary_package_data.ctrl_creator.generated_triggers())
1096 if binary_package.is_udeb:
1097 # FIXME: Add missing udeb scripts
1098 snippets = ["postinst"]
1100 if allow_ctrl_file_management:
1101 process_alternatives(
1102 binary_package,
1103 fs_root,
1104 package_state.reserved_packager_provided_files,
1105 package_state.maintscript_snippets,
1106 )
1107 process_debconf_templates(
1108 binary_package,
1109 package_state.reserved_packager_provided_files,
1110 package_state.maintscript_snippets,
1111 substvars,
1112 control_output_dir,
1113 )
1115 handle_service_management(
1116 binary_package_data,
1117 manifest,
1118 package_metadata_context,
1119 fs_root,
1120 feature_set,
1121 )
1123 plugin_detector_definition: MetadataOrMaintscriptDetector
1124 for plugin_detector_definition in itertools.chain.from_iterable(
1125 metadata_maintscript_detectors.values()
1126 ):
1127 if not plugin_detector_definition.applies_to(binary_package):
1128 continue
1129 ctrl = binary_package_data.ctrl_creator.for_plugin(
1130 plugin_detector_definition.plugin_metadata,
1131 plugin_detector_definition.detector_id,
1132 )
1133 plugin_detector_definition.run_detector(
1134 fs_root, ctrl, package_metadata_context
1135 )
1137 for script in snippets:
1138 _generate_snippet(
1139 control_output_dir,
1140 script,
1141 package_state.maintscript_snippets,
1142 )
1144 else:
1145 state = manifest.package_state_for(binary_package_data.binary_package.name)
1146 if state.requested_service_rules:
1147 service_source = state.requested_service_rules[0].definition_source
1148 _error(
1149 f"Use of service definitions (such as {service_source}) is not supported in this integration mode"
1150 )
1151 for script, snippet_container in package_state.maintscript_snippets.items():
1152 for snippet in snippet_container.all_snippets():
1153 source = snippet.definition_source
1154 _error(
1155 f"This integration mode cannot use maintscript snippets"
1156 f' (since dh_installdeb has already been called). However, "{source}" triggered'
1157 f" a snippet for {script}. Please remove the offending definition if it is from"
1158 f" the manifest or file a bug if it is caused by a built-in rule."
1159 )
1161 for trigger in generated_triggers:
1162 source = f"{trigger.provider.plugin_name}:{trigger.provider_source_id}"
1163 _error(
1164 f"This integration mode must not generate triggers"
1165 f' (since dh_installdeb has already been called). However, "{source}" created'
1166 f" a trigger. Please remove the offending definition if it is from"
1167 f" the manifest or file a bug if it is caused by a built-in rule."
1168 )
1170 shlibdeps_definition = [
1171 d
1172 for d in metadata_maintscript_detectors["debputy"]
1173 if d.detector_id == "dpkg-shlibdeps"
1174 ][0]
1176 ctrl = binary_package_data.ctrl_creator.for_plugin(
1177 shlibdeps_definition.plugin_metadata,
1178 shlibdeps_definition.detector_id,
1179 )
1180 shlibdeps_definition.run_detector(fs_root, ctrl, package_metadata_context)
1182 dh_staging_dir = os.path.join("debian", binary_package.name, "DEBIAN")
1183 try:
1184 with os.scandir(dh_staging_dir) as it:
1185 existing_control_files = [
1186 f.path
1187 for f in it
1188 if f.is_file(follow_symlinks=False)
1189 and f.name not in ("control", "md5sums")
1190 ]
1191 except FileNotFoundError:
1192 existing_control_files = []
1194 if existing_control_files:
1195 cmd = ["cp", "-a"]
1196 cmd.extend(existing_control_files)
1197 cmd.append(control_output_dir)
1198 print_command(*cmd)
1199 subprocess.check_call(cmd)
1201 if binary_package.is_udeb:
1202 _generate_control_files(
1203 binary_package_data.source_package,
1204 binary_package,
1205 package_state,
1206 control_output_dir,
1207 fs_root,
1208 substvars,
1209 # We never built udebs due to #797391, so skip over this information,
1210 # when creating the udeb
1211 None,
1212 None,
1213 )
1214 return
1216 if generated_triggers:
1217 assert not allow_ctrl_file_management
1218 dest_file = os.path.join(control_output_dir, "triggers")
1219 with open(dest_file, "at", encoding="utf-8") as fd:
1220 fd.writelines(
1221 textwrap.dedent(
1222 f"""\
1223 # Added by {t.provider_source_id} from {t.provider.plugin_name}
1224 {t.dpkg_trigger_type} {t.dpkg_trigger_target}
1225 """
1226 )
1227 for t in generated_triggers
1228 )
1229 os.chmod(fd.fileno(), 0o644)
1231 if allow_ctrl_file_management:
1232 install_or_generate_conffiles(
1233 binary_package,
1234 control_output_dir,
1235 fs_root,
1236 manifest.debian_dir,
1237 )
1239 _generate_control_files(
1240 binary_package_data.source_package,
1241 binary_package,
1242 package_state,
1243 control_output_dir,
1244 fs_root,
1245 substvars,
1246 dbgsym_fs_root,
1247 dbgsym_ids,
1248 )
1251def _generate_snippet(
1252 control_output_dir: str,
1253 script: str,
1254 maintscript_snippets: Dict[str, MaintscriptSnippetContainer],
1255) -> None:
1256 debputy_snippets = maintscript_snippets.get(script)
1257 if debputy_snippets is None:
1258 return
1259 reverse = script in ("prerm", "postrm")
1260 snippets = [
1261 debputy_snippets.generate_snippet(reverse=reverse),
1262 debputy_snippets.generate_snippet(snippet_order="service", reverse=reverse),
1263 ]
1264 if reverse:
1265 snippets = reversed(snippets)
1266 full_content = "".join(f"{s}\n" for s in filter(None, snippets))
1267 if not full_content:
1268 return
1269 filename = os.path.join(control_output_dir, script)
1270 with open(filename, "wt") as fd:
1271 fd.write("#!/bin/sh\nset -e\n\n")
1272 fd.write(full_content)
1273 os.chmod(fd.fileno(), 0o755)
1276def _add_conffiles(
1277 conffiles_dest: str,
1278 conffile_matches: Iterable[VirtualPath],
1279) -> None:
1280 with open(conffiles_dest, "at") as fd:
1281 for conffile_match in conffile_matches:
1282 conffile = conffile_match.absolute
1283 assert conffile_match.is_file
1284 fd.write(f"{conffile}\n")
1285 if os.stat(conffiles_dest).st_size == 0:
1286 os.unlink(conffiles_dest)
1289def _ensure_base_substvars_defined(substvars: FlushableSubstvars) -> None:
1290 for substvar in ("misc:Depends", "misc:Pre-Depends"):
1291 if substvar not in substvars:
1292 substvars[substvar] = ""
1295def _compute_installed_size(fs_root: VirtualPath) -> int:
1296 """Emulate dpkg-gencontrol's code for computing the default Installed-Size"""
1297 size_in_kb = 0
1298 hard_links = set()
1299 for path in fs_root.all_paths():
1300 if not path.is_dir and path.has_fs_path:
1301 st = path.stat()
1302 if st.st_nlink > 1:
1303 hl_key = (st.st_dev, st.st_ino)
1304 if hl_key in hard_links:
1305 continue
1306 hard_links.add(hl_key)
1307 path_size = (st.st_size + 1023) // 1024
1308 elif path.is_symlink:
1309 path_size = (len(path.readlink()) + 1023) // 1024
1310 else:
1311 path_size = 1
1312 size_in_kb += path_size
1313 return size_in_kb
1316def _generate_dbgsym_control_file_if_relevant(
1317 binary_package: BinaryPackage,
1318 dbgsym_fs_root: VirtualPath,
1319 dbgsym_root_dir: str,
1320 dbgsym_ids: str,
1321 multi_arch: Optional[str],
1322 dctrl: str,
1323 extra_common_params: Sequence[str],
1324) -> None:
1325 section = binary_package.archive_section
1326 component = ""
1327 extra_params = []
1328 if section is not None and "/" in section and not section.startswith("main/"):
1329 component = section.split("/", 1)[1] + "/"
1330 if multi_arch != "same":
1331 extra_params.append("-UMulti-Arch")
1332 extra_params.append("-UReplaces")
1333 extra_params.append("-UBreaks")
1334 dbgsym_control_dir = os.path.join(dbgsym_root_dir, "DEBIAN")
1335 ensure_dir(dbgsym_control_dir)
1336 # Pass it via cmd-line to make it more visible that we are providing the
1337 # value. It also prevents the dbgsym package from picking up this value.
1338 ctrl_fs_root = FSROOverlay.create_root_dir("DEBIAN", dbgsym_control_dir)
1339 total_size = _compute_installed_size(dbgsym_fs_root) + _compute_installed_size(
1340 ctrl_fs_root
1341 )
1342 extra_params.append(f"-VInstalled-Size={total_size}")
1343 extra_params.extend(extra_common_params)
1345 package = binary_package.name
1346 package_selector = (
1347 binary_package.name
1348 if dctrl == "debian/control"
1349 else f"{binary_package.name}-dbgsym"
1350 )
1351 dpkg_cmd = [
1352 "dpkg-gencontrol",
1353 f"-p{package_selector}",
1354 # FIXME: Support d/<pkg>.changelog at some point.
1355 "-ldebian/changelog",
1356 "-T/dev/null",
1357 f"-c{dctrl}",
1358 f"-P{dbgsym_root_dir}",
1359 f"-DPackage={package}-dbgsym",
1360 "-DDepends=" + package + " (= ${binary:Version})",
1361 f"-DDescription=debug symbols for {package}",
1362 f"-DSection={component}debug",
1363 f"-DBuild-Ids={dbgsym_ids}",
1364 "-UPre-Depends",
1365 "-URecommends",
1366 "-USuggests",
1367 "-UEnhances",
1368 "-UProvides",
1369 "-UEssential",
1370 "-UConflicts",
1371 "-DPriority=optional",
1372 "-UHomepage",
1373 "-UImportant",
1374 "-UBuilt-Using",
1375 "-UStatic-Built-Using",
1376 "-DAuto-Built-Package=debug-symbols",
1377 "-UProtected",
1378 *extra_params,
1379 ]
1380 print_command(*dpkg_cmd)
1381 try:
1382 subprocess.check_call(dpkg_cmd)
1383 except subprocess.CalledProcessError:
1384 _error(
1385 f"Attempting to generate DEBIAN/control file for {package}-dbgsym failed. Please review the output from "
1386 " dpkg-gencontrol above to understand what went wrong."
1387 )
1388 os.chmod(os.path.join(dbgsym_root_dir, "DEBIAN", "control"), 0o644)
1391def _all_parent_directories_of(directories: Iterable[str]) -> Set[str]:
1392 result = {"."}
1393 for path in directories:
1394 current = os.path.dirname(path)
1395 while current and current not in result:
1396 result.add(current)
1397 current = os.path.dirname(current)
1398 return result
1401def _auto_compute_multi_arch(
1402 binary_package: BinaryPackage,
1403 control_output_dir: str,
1404 fs_root: FSPath,
1405) -> Optional[str]:
1406 resolved_arch = binary_package.resolved_architecture
1407 if resolved_arch == "all":
1408 return None
1409 if any(
1410 script
1411 for script in ALL_CONTROL_SCRIPTS
1412 if os.path.isfile(os.path.join(control_output_dir, script))
1413 ):
1414 return None
1416 resolved_multiarch = binary_package.deb_multiarch
1417 assert resolved_arch != "all"
1418 acceptable_no_descend_paths = {
1419 f"./usr/lib/{resolved_multiarch}",
1420 f"./usr/include/{resolved_multiarch}",
1421 }
1422 acceptable_files = {
1423 f"./usr/share/doc/{binary_package.name}/{basename}"
1424 for basename in (
1425 "copyright",
1426 "changelog.gz",
1427 "changelog.Debian.gz",
1428 f"changelog.Debian.{resolved_arch}.gz",
1429 "NEWS.Debian",
1430 "NEWS.Debian.gz",
1431 "README.Debian",
1432 "README.Debian.gz",
1433 )
1434 }
1435 acceptable_intermediate_dirs = _all_parent_directories_of(
1436 itertools.chain(acceptable_no_descend_paths, acceptable_files)
1437 )
1439 for fs_path, children in fs_root.walk():
1440 path = fs_path.path
1441 if path in acceptable_no_descend_paths:
1442 children.clear()
1443 continue
1444 if path in acceptable_intermediate_dirs or path in acceptable_files:
1445 continue
1446 return None
1448 return "same"
1451@functools.lru_cache()
1452def _has_t64_enabled() -> bool:
1453 try:
1454 output = subprocess.check_output(
1455 ["dpkg-buildflags", "--query-features", "abi"]
1456 ).decode()
1457 except (subprocess.CalledProcessError, FileNotFoundError):
1458 return False
1460 for stanza in Deb822.iter_paragraphs(output):
1461 if stanza.get("Feature") == "time64" and stanza.get("Enabled") == "yes":
1462 return True
1463 return False
1466def _t64_migration_substvar(
1467 binary_package: BinaryPackage,
1468 control_output_dir: str,
1469 substvars: FlushableSubstvars,
1470) -> None:
1471 name = binary_package.name
1472 compat_name = binary_package.fields.get("X-Time64-Compat")
1473 if compat_name is None and not _T64_REGEX.match(name):
1474 return
1476 if not any(
1477 os.path.isfile(os.path.join(control_output_dir, n))
1478 for n in ["symbols", "shlibs"]
1479 ):
1480 return
1482 if compat_name is None:
1483 compat_name = name.replace("t64", "", 1)
1484 if compat_name == name:
1485 raise AssertionError(
1486 f"Failed to derive a t64 compat name for {name}. Please file a bug against debputy."
1487 " As a work around, you can explicitly provide a X-Time64-Compat header in debian/control"
1488 " where you specify the desired compat name."
1489 )
1491 arch_bits = binary_package.package_deb_architecture_variable("ARCH_BITS")
1493 if arch_bits != "32" or not _has_t64_enabled():
1494 substvars.add_dependency(
1495 _T64_PROVIDES,
1496 f"{compat_name} (= ${{binary:Version}})",
1497 )
1498 elif _T64_PROVIDES not in substvars:
1499 substvars[_T64_PROVIDES] = ""
1502@functools.lru_cache()
1503def dpkg_field_list_pkg_dep() -> Sequence[str]:
1504 try:
1505 output = subprocess.check_output(
1506 [
1507 "perl",
1508 "-MDpkg::Control::Fields",
1509 "-e",
1510 r'print "$_\n" for field_list_pkg_dep',
1511 ]
1512 )
1513 except (FileNotFoundError, subprocess.CalledProcessError):
1514 _error("Could not run perl -MDpkg::Control::Fields to get a list of fields")
1515 return output.decode("utf-8").splitlines(keepends=False)
1518def _handle_relationship_substvars(
1519 source: SourcePackage,
1520 dctrl_file: BinaryPackage,
1521 substvars: FlushableSubstvars,
1522 has_dbgsym: bool,
1523) -> Optional[str]:
1524 relationship_fields = dpkg_field_list_pkg_dep()
1525 relationship_fields_lc = frozenset(x.lower() for x in relationship_fields)
1526 substvar_fields = collections.defaultdict(list)
1527 needs_dbgsym_stanza = False
1528 for substvar_name, substvar in substvars.as_substvar.items():
1529 if ":" not in substvar_name:
1530 continue
1531 if substvar.assignment_operator in ("$=", "!="):
1532 # Will create incorrect results if there is a dbgsym and we do nothing
1533 needs_dbgsym_stanza = True
1535 if substvar.assignment_operator == "$=":
1536 # Automatically handled; no need for manual merging.
1537 continue
1538 _, field = substvar_name.rsplit(":", 1)
1539 field_lc = field.lower()
1540 if field_lc not in relationship_fields_lc:
1541 continue
1542 substvar_fields[field_lc].append("${" + substvar_name + "}")
1544 if not has_dbgsym:
1545 needs_dbgsym_stanza = False
1547 if not substvar_fields and not needs_dbgsym_stanza:
1548 return None
1550 replacement_stanza = debian.deb822.Deb822(dctrl_file.fields)
1552 for field_name in relationship_fields:
1553 field_name_lc = field_name.lower()
1554 addendum = substvar_fields.get(field_name_lc)
1555 if addendum is None:
1556 # No merging required
1557 continue
1558 substvars_part = ", ".join(addendum)
1559 existing_value = replacement_stanza.get(field_name)
1561 if existing_value is None or existing_value.isspace():
1562 final_value = substvars_part
1563 else:
1564 existing_value = existing_value.rstrip().rstrip(",")
1565 final_value = f"{existing_value}, {substvars_part}"
1566 replacement_stanza[field_name] = final_value
1568 tmpdir = generated_content_dir(package=dctrl_file)
1569 with tempfile.NamedTemporaryFile(
1570 mode="wb",
1571 dir=tmpdir,
1572 suffix="__DEBIAN_control",
1573 delete=False,
1574 ) as fd:
1575 try:
1576 cast("Any", source.fields).dump(fd)
1577 except AttributeError:
1578 debian.deb822.Deb822(source.fields).dump(fd)
1579 fd.write(b"\n")
1580 replacement_stanza.dump(fd)
1582 if has_dbgsym:
1583 # Minimal stanza to avoid substvars warnings. Most fields are still set
1584 # via -D.
1585 dbgsym_stanza = Deb822()
1586 dbgsym_stanza["Package"] = f"{dctrl_file.name}-dbgsym"
1587 dbgsym_stanza["Architecture"] = dctrl_file.fields["Architecture"]
1588 dbgsym_stanza["Description"] = f"debug symbols for {dctrl_file.name}"
1589 fd.write(b"\n")
1590 dbgsym_stanza.dump(fd)
1592 return fd.name
1595def _generate_control_files(
1596 source_package: SourcePackage,
1597 binary_package: BinaryPackage,
1598 package_state: PackageTransformationDefinition,
1599 control_output_dir: str,
1600 fs_root: FSPath,
1601 substvars: FlushableSubstvars,
1602 dbgsym_root_fs: Optional[VirtualPath],
1603 dbgsym_build_ids: Optional[List[str]],
1604) -> None:
1605 package = binary_package.name
1606 extra_common_params = []
1607 extra_params_specific = []
1608 _ensure_base_substvars_defined(substvars)
1609 if "Installed-Size" not in substvars:
1610 # Pass it via cmd-line to make it more visible that we are providing the
1611 # value. It also prevents the dbgsym package from picking up this value.
1612 ctrl_fs_root = FSROOverlay.create_root_dir("DEBIAN", control_output_dir)
1613 total_size = _compute_installed_size(fs_root) + _compute_installed_size(
1614 ctrl_fs_root
1615 )
1616 extra_params_specific.append(f"-VInstalled-Size={total_size}")
1618 ma_value = binary_package.fields.get("Multi-Arch")
1619 if not binary_package.is_udeb and ma_value is None:
1620 ma_value = _auto_compute_multi_arch(binary_package, control_output_dir, fs_root)
1621 if ma_value is not None:
1622 _info(
1623 f'The package "{binary_package.name}" looks like it should be "Multi-Arch: {ma_value}" based'
1624 ' on the contents and there is no explicit "Multi-Arch" field. Setting the Multi-Arch field'
1625 ' accordingly in the binary. If this auto-correction is wrong, please add "Multi-Arch: no" to the'
1626 ' relevant part of "debian/control" to disable this feature.'
1627 )
1628 # We want this to apply to the `-dbgsym` package as well to avoid
1629 # lintian `debug-package-for-multi-arch-same-pkg-not-coinstallable`
1630 extra_common_params.append(f"-DMulti-Arch={ma_value}")
1631 elif ma_value == "no":
1632 extra_common_params.append("-UMulti-Arch")
1634 dbgsym_root_dir = dhe_dbgsym_root_dir(binary_package)
1635 dbgsym_ids = " ".join(dbgsym_build_ids) if dbgsym_build_ids else ""
1636 if package_state.binary_version is not None:
1637 extra_common_params.append(f"-v{package_state.binary_version}")
1639 _t64_migration_substvar(binary_package, control_output_dir, substvars)
1641 with substvars.flush() as flushed_substvars:
1642 has_dbgsym = dbgsym_root_fs is not None and any(
1643 f for f in dbgsym_root_fs.all_paths() if f.is_file
1644 )
1645 dctrl_file = _handle_relationship_substvars(
1646 source_package,
1647 binary_package,
1648 substvars,
1649 has_dbgsym,
1650 )
1651 if dctrl_file is None:
1652 dctrl_file = "debian/control"
1654 if has_dbgsym:
1655 _generate_dbgsym_control_file_if_relevant(
1656 binary_package,
1657 dbgsym_root_fs,
1658 dbgsym_root_dir,
1659 dbgsym_ids,
1660 ma_value,
1661 dctrl_file,
1662 extra_common_params,
1663 )
1664 generate_md5sums_file(
1665 os.path.join(dbgsym_root_dir, "DEBIAN"),
1666 dbgsym_root_fs,
1667 )
1668 elif dbgsym_ids:
1669 extra_common_params.append(f"-DBuild-Ids={dbgsym_ids}")
1671 ctrl_file = os.path.join(control_output_dir, "control")
1672 dpkg_cmd = [
1673 "dpkg-gencontrol",
1674 f"-p{package}",
1675 # FIXME: Support d/<pkg>.changelog at some point.
1676 "-ldebian/changelog",
1677 f"-c{dctrl_file}",
1678 f"-T{flushed_substvars}",
1679 f"-O{ctrl_file}",
1680 f"-P{control_output_dir}",
1681 *extra_common_params,
1682 *extra_params_specific,
1683 ]
1684 print_command(*dpkg_cmd)
1685 try:
1686 subprocess.check_call(dpkg_cmd)
1687 except subprocess.CalledProcessError:
1688 _error(
1689 f"Attempting to generate DEBIAN/control file for {package} failed. Please review the output from "
1690 " dpkg-gencontrol above to understand what went wrong."
1691 )
1692 os.chmod(ctrl_file, 0o644)
1694 if not binary_package.is_udeb:
1695 generate_md5sums_file(control_output_dir, fs_root)