summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-08-26 10:22:39 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-08-26 10:22:39 +0000
commita7a8dcc3f3e7ffa12ac734a1ce0a6f4ef88ed6c9 (patch)
tree28525063835d0d1b64a06217746b0c1c9b87baeb /src
parentReleasing progress-linux version 0.1.44-0.0~progress7.99u1. (diff)
downloaddebputy-a7a8dcc3f3e7ffa12ac734a1ce0a6f4ef88ed6c9.tar.xz
debputy-a7a8dcc3f3e7ffa12ac734a1ce0a6f4ef88ed6c9.zip
Merging upstream version 0.1.45.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src')
-rw-r--r--src/debputy/_manifest_constants.py2
-rw-r--r--src/debputy/build_support/__init__.py7
-rw-r--r--src/debputy/build_support/build_context.py100
-rw-r--r--src/debputy/build_support/build_logic.py193
-rw-r--r--src/debputy/build_support/buildsystem_detection.py112
-rw-r--r--src/debputy/build_support/clean_logic.py233
-rw-r--r--src/debputy/builtin_manifest_rules.py8
-rw-r--r--src/debputy/commands/debputy_cmd/__main__.py126
-rw-r--r--src/debputy/commands/debputy_cmd/context.py78
-rw-r--r--src/debputy/commands/debputy_cmd/lint_and_lsp_cmds.py1
-rw-r--r--src/debputy/commands/debputy_cmd/plugin_cmds.py20
-rw-r--r--src/debputy/deb_packaging_support.py10
-rw-r--r--src/debputy/dh_migration/migration.py60
-rw-r--r--src/debputy/dh_migration/migrators.py62
-rw-r--r--src/debputy/dh_migration/migrators_impl.py73
-rw-r--r--src/debputy/exceptions.py8
-rw-r--r--src/debputy/highlevel_manifest.py54
-rw-r--r--src/debputy/highlevel_manifest_parser.py101
-rw-r--r--src/debputy/installations.py8
-rw-r--r--src/debputy/integration_detection.py10
-rw-r--r--src/debputy/linting/lint_impl.py6
-rw-r--r--src/debputy/linting/lint_util.py22
-rw-r--r--src/debputy/lsp/lsp_debian_control.py6
-rw-r--r--src/debputy/lsp/lsp_debian_control_reference_data.py150
-rw-r--r--src/debputy/lsp/lsp_debian_debputy_manifest.py36
-rw-r--r--src/debputy/lsp/lsp_generic_yaml.py2
-rw-r--r--src/debputy/lsp/maint_prefs.py218
-rw-r--r--src/debputy/maintscript_snippet.py2
-rw-r--r--src/debputy/manifest_conditions.py28
-rw-r--r--src/debputy/manifest_parser/base_types.py107
-rw-r--r--src/debputy/manifest_parser/declarative_parser.py420
-rw-r--r--src/debputy/manifest_parser/exceptions.py10
-rw-r--r--src/debputy/manifest_parser/mapper_code.py21
-rw-r--r--src/debputy/manifest_parser/parse_hints.py259
-rw-r--r--src/debputy/manifest_parser/parser_data.py14
-rw-r--r--src/debputy/manifest_parser/tagging_types.py36
-rw-r--r--src/debputy/manifest_parser/util.py27
-rw-r--r--src/debputy/package_build/assemble_deb.py4
-rw-r--r--src/debputy/packager_provided_files.py2
-rw-r--r--src/debputy/packages.py10
-rw-r--r--src/debputy/path_matcher.py2
-rw-r--r--src/debputy/plugin/api/feature_set.py22
-rw-r--r--src/debputy/plugin/api/impl.py184
-rw-r--r--src/debputy/plugin/api/impl_types.py144
-rw-r--r--src/debputy/plugin/api/parser_tables.py67
-rw-r--r--src/debputy/plugin/api/plugin_parser.py4
-rw-r--r--src/debputy/plugin/api/spec.py45
-rw-r--r--src/debputy/plugin/api/std_docs.py142
-rw-r--r--src/debputy/plugin/debputy/binary_package_rules.py14
-rw-r--r--src/debputy/plugin/debputy/build_system_rules.py2319
-rw-r--r--src/debputy/plugin/debputy/manifest_root_rules.py12
-rw-r--r--src/debputy/plugin/debputy/private_api.py86
-rw-r--r--src/debputy/plugin/debputy/to_be_api_types.py1039
-rw-r--r--src/debputy/plugin/plugin_state.py113
-rw-r--r--src/debputy/transformation_rules.py36
-rw-r--r--src/debputy/types.py135
-rw-r--r--src/debputy/util.py212
57 files changed, 6395 insertions, 827 deletions
diff --git a/src/debputy/_manifest_constants.py b/src/debputy/_manifest_constants.py
index 3ed992b..974ef7b 100644
--- a/src/debputy/_manifest_constants.py
+++ b/src/debputy/_manifest_constants.py
@@ -8,6 +8,8 @@ assert DEFAULT_MANIFEST_VERSION in SUPPORTED_MANIFEST_VERSIONS
MK_MANIFEST_VERSION = "manifest-version"
MK_PACKAGES = "packages"
+MK_BUILDS = "builds"
+
MK_INSTALLATIONS = "installations"
MK_INSTALLATIONS_INSTALL = "install"
MK_INSTALLATIONS_MULTI_DEST_INSTALL = "multi-dest-install"
diff --git a/src/debputy/build_support/__init__.py b/src/debputy/build_support/__init__.py
new file mode 100644
index 0000000..8123659
--- /dev/null
+++ b/src/debputy/build_support/__init__.py
@@ -0,0 +1,7 @@
+from debputy.build_support.build_logic import perform_builds
+from debputy.build_support.clean_logic import perform_clean
+
+__all__ = [
+ "perform_clean",
+ "perform_builds",
+]
diff --git a/src/debputy/build_support/build_context.py b/src/debputy/build_support/build_context.py
new file mode 100644
index 0000000..2eeef66
--- /dev/null
+++ b/src/debputy/build_support/build_context.py
@@ -0,0 +1,100 @@
+from typing import Mapping, Optional
+
+from debputy.architecture_support import DpkgArchitectureBuildProcessValuesTable
+from debputy.commands.debputy_cmd.context import CommandContext
+from debputy.highlevel_manifest import HighLevelManifest
+from debputy.manifest_conditions import _run_build_time_tests
+
+
+class BuildContext:
+ @staticmethod
+ def from_command_context(
+ cmd_context: CommandContext,
+ ) -> "BuildContext":
+ return BuildContextImpl(cmd_context)
+
+ @property
+ def deb_build_options(self) -> Mapping[str, Optional[str]]:
+ raise NotImplementedError
+
+ def parallelization_limit(self, *, support_zero_as_unlimited: bool = False) -> int:
+ """Parallelization limit of the build
+
+ This is accessor that reads the `parallel` option from `DEB_BUILD_OPTIONS` with relevant
+ fallback behavior.
+
+ :param support_zero_as_unlimited: The debhelper framework allowed `0` to mean unlimited
+ in some build systems. If the build system supports this, it should set this option
+ to True, which will allow `0` as a possible return value. WHen this option is False
+ (which is the default), `0` will be remapped to a high number to preserve the effect
+ in spirit (said fallback number is also from `debhelper`).
+ """
+ limit = self.deb_build_options.get("parallel")
+ if limit is None:
+ return 1
+ try:
+ v = int(limit)
+ except ValueError:
+ return 1
+ if v == 0 and not support_zero_as_unlimited:
+ # debhelper allowed "0" to be used as unlimited in some cases. Preserve that feature
+ # for callers that are prepared for it. For everyone else, remap 0 to an obscene number
+ # that de facto has the same behaviour
+ #
+ # The number is taken out of `cmake.pm` from `debhelper` to be "Bug compatible" with
+ # debhelper on the fallback as well.
+ return 999
+ return v
+
+ @property
+ def is_terse_build(self) -> bool:
+ """Whether the build is terse
+
+ This is a shorthand for testing for `terse` in DEB_BUILD_OPTIONS
+ """
+ return "terse" in self.deb_build_options
+
+ @property
+ def is_cross_compiling(self) -> bool:
+ """Whether the build is considered a cross build
+
+ Note: Do **not** use this as indicator for whether tests should run. Use `should_run_tests` instead.
+ To the naive eye, they seem like they overlap in functionality, but they do not. There are cross
+ builds where tests can be run. Additionally, there are non-cross-builds where tests should be
+ skipped.
+ """
+ return self.dpkg_architecture_variables.is_cross_compiling
+
+ def cross_tool(self, command: str) -> str:
+ if not self.is_cross_compiling:
+ return command
+ cross_prefix = self.dpkg_architecture_variables["DEB_HOST_GNU_TYPE"]
+ return f"{cross_prefix}-{command}"
+
+ @property
+ def dpkg_architecture_variables(self) -> DpkgArchitectureBuildProcessValuesTable:
+ raise NotImplementedError
+
+ @property
+ def should_run_tests(self) -> bool:
+ return _run_build_time_tests(self.deb_build_options)
+
+
+class BuildContextImpl(BuildContext):
+ def __init__(
+ self,
+ cmd_context: CommandContext,
+ ) -> None:
+ self._cmd_context = cmd_context
+
+ @property
+ def deb_build_options(self) -> Mapping[str, Optional[str]]:
+ return self._cmd_context.deb_build_options
+
+ @property
+ def dpkg_architecture_variables(self) -> DpkgArchitectureBuildProcessValuesTable:
+ return self._cmd_context.dpkg_architecture_variables()
+
+ @property
+ def manifest(self) -> HighLevelManifest:
+ return self._manifest
diff --git a/src/debputy/build_support/build_logic.py b/src/debputy/build_support/build_logic.py
new file mode 100644
index 0000000..ee247e7
--- /dev/null
+++ b/src/debputy/build_support/build_logic.py
@@ -0,0 +1,193 @@
+import collections
+import contextlib
+import os
+from typing import (
+ Iterator,
+ Mapping,
+ List,
+ Dict,
+ Optional,
+)
+
+from debputy.build_support.build_context import BuildContext
+from debputy.build_support.buildsystem_detection import (
+ auto_detect_buildsystem,
+)
+from debputy.commands.debputy_cmd.context import CommandContext
+from debputy.highlevel_manifest import HighLevelManifest
+from debputy.manifest_parser.base_types import BuildEnvironmentDefinition
+from debputy.plugin.debputy.to_be_api_types import BuildRule
+from debputy.util import (
+ _error,
+ _info,
+ _non_verbose_info,
+)
+
+
+@contextlib.contextmanager
+def in_build_env(build_env: BuildEnvironmentDefinition):
+ remove_unnecessary_env()
+ # Should possibly be per build
+ with _setup_build_env(build_env):
+ yield
+
+
+def _set_stem_if_absent(stems: List[Optional[str]], idx: int, stem: str) -> None:
+ if stems[idx] is None:
+ stems[idx] = stem
+
+
+def assign_stems(
+ build_rules: List[BuildRule],
+ manifest: HighLevelManifest,
+) -> None:
+ if not build_rules:
+ return
+ if len(build_rules) == 1:
+ build_rules[0].auto_generated_stem = ""
+ return
+
+ debs = {p.name for p in manifest.all_packages if p.package_type == "deb"}
+ udebs = {p.name for p in manifest.all_packages if p.package_type == "udeb"}
+ deb_only_builds: List[int] = []
+ udeb_only_builds: List[int] = []
+ by_name_only_builds: Dict[str, List[int]] = collections.defaultdict(list)
+ stems = [rule.name for rule in build_rules]
+ reserved_stems = set(n for n in stems if n is not None)
+
+ for idx, rule in enumerate(build_rules):
+ stem = stems[idx]
+ if stem is not None:
+ continue
+ pkg_names = {p.name for p in rule.for_packages}
+ if pkg_names == debs:
+ deb_only_builds.append(idx)
+ elif pkg_names == udebs:
+ udeb_only_builds.append(idx)
+
+ if len(pkg_names) == 1:
+ pkg_name = next(iter(pkg_names))
+ by_name_only_builds[pkg_name].append(idx)
+
+ if "deb" not in reserved_stems and len(deb_only_builds) == 1:
+ _set_stem_if_absent(stems, deb_only_builds[0], "deb")
+
+ if "udeb" not in reserved_stems and len(udeb_only_builds) == 1:
+ _set_stem_if_absent(stems, udeb_only_builds[0], "udeb")
+
+ for pkg, idxs in by_name_only_builds.items():
+ if len(idxs) != 1 or pkg in reserved_stems:
+ continue
+ _set_stem_if_absent(stems, idxs[0], pkg)
+
+ for idx, rule in enumerate(build_rules):
+ stem = stems[idx]
+ if stem is None:
+ stem = f"bno_{idx}"
+ rule.auto_generated_stem = stem
+ _info(f"Assigned {rule.auto_generated_stem} [{stem}] to step {idx}")
+
+
+def perform_builds(
+ context: CommandContext,
+ manifest: HighLevelManifest,
+) -> None:
+ build_rules = manifest.build_rules
+ if build_rules is not None:
+ if not build_rules:
+ # Defined but empty disables the auto-detected build system
+ return
+ active_packages = frozenset(manifest.active_packages)
+ condition_context = manifest.source_condition_context
+ build_context = BuildContext.from_command_context(context)
+ assign_stems(build_rules, manifest)
+ for step_no, build_rule in enumerate(build_rules):
+ step_ref = (
+ f"step {step_no} [{build_rule.auto_generated_stem}]"
+ if build_rule.name is None
+ else f"step {step_no} [{build_rule.name}]"
+ )
+ if build_rule.for_packages.isdisjoint(active_packages):
+ _info(
+ f"Skipping build for {step_ref}: None of the relevant packages are being built"
+ )
+ continue
+ manifest_condition = build_rule.manifest_condition
+ if manifest_condition is not None and not manifest_condition.evaluate(
+ condition_context
+ ):
+ _info(
+ f"Skipping build for {step_ref}: The condition clause evaluated to false"
+ )
+ continue
+ _info(f"Starting build for {step_ref}.")
+ with in_build_env(build_rule.environment):
+ try:
+ build_rule.run_build(build_context, manifest)
+ except (RuntimeError, AttributeError) as e:
+ if context.parsed_args.debug_mode:
+ raise e
+ _error(
+ f"An error occurred during build/install at {step_ref} (defined at {build_rule.attribute_path.path}): {str(e)}"
+ )
+ _info(f"Completed build for {step_ref}.")
+
+ else:
+ build_system = auto_detect_buildsystem(manifest)
+ if build_system:
+ _info(f"Auto-detected build system: {build_system.__class__.__name__}")
+ build_context = BuildContext.from_command_context(context)
+ with in_build_env(build_system.environment):
+ with in_build_env(build_system.environment):
+ build_system.run_build(
+ build_context,
+ manifest,
+ )
+
+ _non_verbose_info("Upstream builds completed successfully")
+ else:
+ _info("No build system was detected from the current plugin set.")
+
+
+def remove_unnecessary_env() -> None:
+ vs = [
+ "XDG_CACHE_HOME",
+ "XDG_CONFIG_DIRS",
+ "XDG_CONFIG_HOME",
+ "XDG_DATA_HOME",
+ "XDG_DATA_DIRS",
+ "XDG_RUNTIME_DIR",
+ ]
+ for v in vs:
+ if v in os.environ:
+ del os.environ[v]
+
+ # FIXME: Add custom HOME + XDG_RUNTIME_DIR
+
+
+@contextlib.contextmanager
+def _setup_build_env(build_env: BuildEnvironmentDefinition) -> Iterator[None]:
+ env_backup = dict(os.environ)
+ env = dict(env_backup)
+ had_delta = False
+ build_env.update_env(env)
+ if env != env_backup:
+ _set_env(env)
+ had_delta = True
+ _info("Updated environment to match build")
+ yield
+ if had_delta or env != env_backup:
+ _set_env(env_backup)
+
+
+def _set_env(desired_env: Mapping[str, str]) -> None:
+ os_env = os.environ
+ for key in os_env.keys() | desired_env.keys():
+ desired_value = desired_env.get(key)
+ if desired_value is None:
+ try:
+ del os_env[key]
+ except KeyError:
+ pass
+ else:
+ os_env[key] = desired_value
diff --git a/src/debputy/build_support/buildsystem_detection.py b/src/debputy/build_support/buildsystem_detection.py
new file mode 100644
index 0000000..47415fd
--- /dev/null
+++ b/src/debputy/build_support/buildsystem_detection.py
@@ -0,0 +1,112 @@
+from typing import (
+ Optional,
+)
+
+from debputy.exceptions import (
+ DebputyPluginRuntimeError,
+ PluginBaseError,
+)
+from debputy.filesystem_scan import FSRootDir, FSROOverlay
+from debputy.highlevel_manifest import HighLevelManifest
+from debputy.manifest_parser.base_types import BuildEnvironmentDefinition
+from debputy.manifest_parser.util import AttributePath
+from debputy.plugin.debputy.to_be_api_types import (
+ BuildSystemRule,
+)
+from debputy.plugin.plugin_state import run_in_context_of_plugin_wrap_errors
+from debputy.util import (
+ _error,
+ _debug_log,
+)
+
+
+def default_build_environment_only(
+ manifest: HighLevelManifest,
+) -> BuildEnvironmentDefinition:
+ build_envs = manifest.build_environments
+ if build_envs.environments:
+ _error(
+ 'When automatic build system detection is used, the manifest cannot use "build-environments"'
+ )
+ build_env = build_envs.default_environment
+ assert build_env is not None
+ return build_env
+
+
+def auto_detect_buildsystem(
+ manifest: HighLevelManifest,
+) -> Optional[BuildSystemRule]:
+ auto_detectable_build_systems = (
+ manifest.plugin_provided_feature_set.auto_detectable_build_systems
+ )
+ excludes = set()
+ options = []
+ _debug_log("Auto-detecting build systems.")
+ source_root = FSROOverlay.create_root_dir("", ".")
+ for ppadbs in auto_detectable_build_systems.values():
+ detected = ppadbs.detector(source_root)
+ if not isinstance(detected, bool):
+ _error(
+ f'The auto-detector for the build system {ppadbs.manifest_keyword} returned a "non-bool"'
+ f" ({detected!r}), which could be a bug in the plugin or the plugin relying on a newer"
+ " version of `debputy` that changed the auto-detection protocol."
+ )
+ if not detected:
+ _debug_log(
+ f"Skipping build system {ppadbs.manifest_keyword}: Detector returned False!"
+ )
+ continue
+ _debug_log(
+ f"Considering build system {ppadbs.manifest_keyword} as its Detector returned True!"
+ )
+ if ppadbs.auto_detection_shadow_build_systems:
+ names = ", ".join(
+ sorted(x for x in ppadbs.auto_detection_shadow_build_systems)
+ )
+ _debug_log(f"Build system {ppadbs.manifest_keyword} excludes: {names}!")
+ excludes.update(ppadbs.auto_detection_shadow_build_systems)
+ options.append(ppadbs)
+
+ if not options:
+ _debug_log("Zero candidates; continuing without a build system")
+ return None
+
+ if excludes:
+ names = ", ".join(sorted(x for x in excludes))
+ _debug_log(f"The following build systems have been excluded: {names}!")
+ remaining_options = [o for o in options if o.manifest_keyword not in excludes]
+ else:
+ remaining_options = options
+
+ if len(remaining_options) > 1:
+ names = ", ".join(o.manifest_keyword for o in remaining_options)
+ # TODO: This means adding an auto-detectable build system to an existing plugin causes FTBFS
+ # We need a better way of handling this. Probably the build systems should include
+ # a grace timer based on d/changelog. Anything before the changelog date is in
+ # "grace mode" and will not conflict with a build system that is. If all choices
+ # are in "grace mode", "oldest one" wins.
+ _error(
+ f"Multiple build systems match, please pick one explicitly (under `builds:`): {names}"
+ )
+
+ if not remaining_options:
+ names = ", ".join(o.build_system_rule_type.__name__ for o in options)
+ # TODO: Detect at registration time
+ _error(
+ f"Multiple build systems matched but they all shadowed each other: {names}."
+ f" There is a bug in at least one of them!"
+ )
+
+ chosen_build_system = remaining_options[0]
+ environment = default_build_environment_only(manifest)
+ bs = run_in_context_of_plugin_wrap_errors(
+ chosen_build_system.plugin_metadata.plugin_name,
+ chosen_build_system.constructor,
+ {
+ "environment": environment,
+ },
+ AttributePath.builtin_path(),
+ manifest,
+ )
+ bs.auto_generated_stem = ""
+ return bs
diff --git a/src/debputy/build_support/clean_logic.py b/src/debputy/build_support/clean_logic.py
new file mode 100644
index 0000000..13347b0
--- /dev/null
+++ b/src/debputy/build_support/clean_logic.py
@@ -0,0 +1,233 @@
+import os.path
+from typing import (
+ Set,
+ cast,
+ List,
+)
+
+from debputy.build_support.build_context import BuildContext
+from debputy.build_support.build_logic import (
+ in_build_env,
+ assign_stems,
+)
+from debputy.build_support.buildsystem_detection import auto_detect_buildsystem
+from debputy.commands.debputy_cmd.context import CommandContext
+from debputy.highlevel_manifest import HighLevelManifest
+from debputy.plugin.debputy.to_be_api_types import BuildSystemRule, CleanHelper
+from debputy.util import _info, print_command, _error, _debug_log, _warn
+from debputy.util import (
+ run_build_system_command,
+)
+
+_REMOVE_DIRS = frozenset(
+ [
+ "__pycache__",
+ "autom4te.cache",
+ ]
+)
+_IGNORE_DIRS = frozenset(
+ [
+ ".git",
+ ".svn",
+ ".bzr",
+ ".hg",
+ "CVS",
+ ".pc",
+ "_darcs",
+ ]
+)
+DELETE_FILE_EXT = (
+ "~",
+ ".orig",
+ ".rej",
+ ".bak",
+)
+DELETE_FILE_BASENAMES = {
+ "DEADJOE",
+ ".SUMS",
+ "TAGS",
+}
+
+
+def _debhelper_left_overs() -> bool:
+ if os.path.lexists("debian/.debhelper") or os.path.lexists(
+ "debian/debhelper-build-stamp"
+ ):
+ return True
+ with os.scandir(".") as root_dir:
+ for child in root_dir:
+ if child.is_file(follow_symlinks=False) and (
+ child.name.endswith(".debhelper.log")
+ or child.name.endswith(".debhelper")
+ ):
+ return True
+ return False
+
+
+class CleanHelperImpl(CleanHelper):
+
+ def __init__(self) -> None:
+ self.files_to_remove: Set[str] = set()
+ self.dirs_to_remove: Set[str] = set()
+
+ def schedule_removal_of_files(self, *args: str) -> None:
+ self.files_to_remove.update(args)
+
+ def schedule_removal_of_directories(self, *args: str) -> None:
+ if any(p == "/" for p in args):
+ raise ValueError("Refusing to delete '/'")
+ self.dirs_to_remove.update(args)
+
+
+def _scan_for_standard_removals(clean_helper: CleanHelperImpl) -> None:
+ remove_files = clean_helper.files_to_remove
+ remove_dirs = clean_helper.dirs_to_remove
+ with os.scandir(".") as root_dir:
+ for child in root_dir:
+ if child.is_file(follow_symlinks=False) and child.name.endswith("-stamp"):
+ remove_files.add(child.path)
+ for current_dir, subdirs, files in os.walk("."):
+ for remove_dir in [d for d in subdirs if d in _REMOVE_DIRS]:
+ path = os.path.join(current_dir, remove_dir)
+ remove_dirs.add(path)
+ subdirs.remove(remove_dir)
+ for skip_dir in [d for d in subdirs if d in _IGNORE_DIRS]:
+ subdirs.remove(skip_dir)
+
+ for basename in files:
+ if (
+ basename.endswith(DELETE_FILE_EXT)
+ or basename in DELETE_FILE_BASENAMES
+ or (basename.startswith("#") and basename.endswith("#"))
+ ):
+ path = os.path.join(current_dir, basename)
+ remove_files.add(path)
+
+
+def perform_clean(
+ context: CommandContext,
+ manifest: HighLevelManifest,
+) -> None:
+ clean_helper = CleanHelperImpl()
+
+ build_rules = manifest.build_rules
+ if build_rules is not None:
+ if not build_rules:
+ # Defined but empty disables the auto-detected build system
+ return
+ active_packages = frozenset(manifest.active_packages)
+ condition_context = manifest.source_condition_context
+ build_context = BuildContext.from_command_context(context)
+ assign_stems(build_rules, manifest)
+ for step_no, build_rule in enumerate(build_rules):
+ step_ref = (
+ f"step {step_no} [{build_rule.auto_generated_stem}]"
+ if build_rule.name is None
+ else f"step {step_no} [{build_rule.name}]"
+ )
+ if not build_rule.is_buildsystem:
+ _debug_log(f"Skipping clean for {step_ref}: Not a build system")
+ continue
+ build_system_rule: BuildSystemRule = cast("BuildSystemRule", build_rule)
+ if build_system_rule.for_packages.isdisjoint(active_packages):
+ _info(
+ f"Skipping build for {step_ref}: None of the relevant packages are being built"
+ )
+ continue
+ manifest_condition = build_system_rule.manifest_condition
+ if manifest_condition is not None and not manifest_condition.evaluate(
+ condition_context
+ ):
+ _info(
+ f"Skipping clean for {step_ref}: The condition clause evaluated to false"
+ )
+ continue
+ _info(f"Starting clean for {step_ref}.")
+ with in_build_env(build_rule.environment):
+ try:
+ build_system_rule.run_clean(
+ build_context,
+ manifest,
+ clean_helper,
+ )
+ except (RuntimeError, AttributeError) as e:
+ if context.parsed_args.debug_mode:
+ raise e
+ _error(
+ f"An error occurred during clean at {step_ref} (defined at {build_rule.attribute_path.path}): {str(e)}"
+ )
+ _info(f"Completed clean for {step_ref}.")
+ else:
+ build_system = auto_detect_buildsystem(manifest)
+ if build_system:
+ _info(f"Auto-detected build system: {build_system.__class__.__name__}")
+ build_context = BuildContext.from_command_context(context)
+ with in_build_env(build_system.environment):
+ build_system.run_clean(
+ build_context,
+ manifest,
+ clean_helper,
+ )
+ else:
+ _info("No build system was detected from the current plugin set.")
+
+ dh_autoreconf_used = os.path.lexists("debian/autoreconf.before")
+ debhelper_used = False
+
+ if dh_autoreconf_used or _debhelper_left_overs():
+ debhelper_used = True
+
+ _scan_for_standard_removals(clean_helper)
+
+ for package in manifest.all_packages:
+ package_staging_dir = os.path.join("debian", package.name)
+ if os.path.lexists(package_staging_dir):
+ clean_helper.schedule_removal_of_directories(package_staging_dir)
+
+ remove_files = clean_helper.files_to_remove
+ remove_dirs = clean_helper.dirs_to_remove
+ if remove_files:
+ print_command("rm", "-f", *remove_files)
+ _remove_files_if_exists(*remove_files)
+ if remove_dirs:
+ run_build_system_command("rm", "-fr", *remove_dirs)
+
+ if debhelper_used:
+ _info(
+ "Noted traces of debhelper commands being used; invoking dh_clean to clean up after them"
+ )
+ if dh_autoreconf_used:
+ run_build_system_command("dh_autoreconf_clean")
+ run_build_system_command("dh_clean")
+
+ try:
+ run_build_system_command("dpkg-buildtree", "clean")
+ except FileNotFoundError:
+ _warn("The dpkg-buildtree command is not present. Emulating it")
+ # This is from the manpage of dpkg-buildtree for 1.22.11.
+ _remove_files_if_exists(
+ "debian/files",
+ "debian/files.new",
+ "debian/substvars",
+ "debian/substvars.new",
+ )
+ run_build_system_command("rm", "-fr", "debian/tmp")
+ # Remove debian/.debputy as a separate step. While `rm -fr` should process things in order,
+ # it will continue on error, which could cause our manifests of things to delete to be deleted
+ # while leaving things half-removed unless we do this extra step.
+ run_build_system_command("rm", "-fr", "debian/.debputy")
+
+
+def _remove_files_if_exists(*args: str) -> None:
+ for path in args:
+ try:
+ os.unlink(path)
+ except FileNotFoundError:
+ continue
+ except OSError as e:
+ if os.path.isdir(path):
+ _error(
+ f"Failed to remove {path}: It is a directory, but it should have been a non-directory."
+ " Please verify everything is as expected and, if it is, remove it manually."
+ )
+ _error(f"Failed to remove {path}: {str(e)}")
diff --git a/src/debputy/builtin_manifest_rules.py b/src/debputy/builtin_manifest_rules.py
index e420cda..e31a50e 100644
--- a/src/debputy/builtin_manifest_rules.py
+++ b/src/debputy/builtin_manifest_rules.py
@@ -17,7 +17,7 @@ from debputy.path_matcher import (
)
from debputy.substitution import Substitution
from debputy.types import VP
-from debputy.util import _normalize_path, perl_module_dirs
+from debputy.util import _normalize_path, resolve_perl_config
# Imported from dh_fixperms
_PERMISSION_NORMALIZATION_SOURCE_DEFINITION = "permission normalization"
@@ -218,17 +218,19 @@ def builtin_mode_normalization_rules(
OctalMode(0o0644),
)
+ perl_config_data = resolve_perl_config(dpkg_architecture_variables, dctrl_bin)
+
yield from (
(
BasenameGlobMatch(
"*.pm",
- only_when_in_directory=perl_dir,
+ only_when_in_directory=_normalize_path(perl_dir),
path_type=PathType.FILE,
recursive_match=True,
),
_STD_FILE_MODE,
)
- for perl_dir in perl_module_dirs(dpkg_architecture_variables, dctrl_bin)
+ for perl_dir in (perl_config_data.vendorlib, perl_config_data.vendorarch)
)
yield (
diff --git a/src/debputy/commands/debputy_cmd/__main__.py b/src/debputy/commands/debputy_cmd/__main__.py
index 3270737..05bd135 100644
--- a/src/debputy/commands/debputy_cmd/__main__.py
+++ b/src/debputy/commands/debputy_cmd/__main__.py
@@ -24,6 +24,7 @@ from typing import (
from debputy import DEBPUTY_ROOT_DIR, DEBPUTY_PLUGIN_ROOT_DIR
from debputy.analysis import REFERENCE_DATA_TABLE
from debputy.analysis.debian_dir import scan_debian_dir
+from debputy.build_support import perform_clean, perform_builds
from debputy.commands.debputy_cmd.context import (
CommandContext,
add_arg,
@@ -40,10 +41,15 @@ from debputy.exceptions import (
UnhandledOrUnexpectedErrorFromPluginError,
SymlinkLoopError,
)
+from debputy.highlevel_manifest import HighLevelManifest
from debputy.package_build.assemble_deb import (
assemble_debs,
)
-from debputy.plugin.api.spec import INTEGRATION_MODE_DH_DEBPUTY_RRR
+from debputy.plugin.api.spec import (
+ INTEGRATION_MODE_DH_DEBPUTY_RRR,
+ DebputyIntegrationMode,
+ INTEGRATION_MODE_FULL,
+)
try:
from argcomplete import autocomplete
@@ -92,8 +98,9 @@ from debputy.util import (
escape_shell,
program_name,
integrated_with_debhelper,
- change_log_level,
+ PRINT_BUILD_SYSTEM_COMMAND,
PRINT_COMMAND,
+ change_log_level,
)
@@ -255,6 +262,18 @@ def _add_packages_args(parser: argparse.ArgumentParser) -> None:
)
+def _build_subcommand_log_level(context: CommandContext) -> int:
+ parsed_args = context.parsed_args
+ log_level: Optional[int] = None
+ if os.environ.get("DH_VERBOSE", "") != "":
+ log_level = PRINT_COMMAND
+ if parsed_args.debug_mode:
+ log_level = logging.INFO
+ if log_level is not None:
+ change_log_level(log_level)
+ return PRINT_BUILD_SYSTEM_COMMAND
+
+
internal_commands = ROOT_COMMAND.add_dispatching_subcommand(
"internal-command",
dest="internal_command",
@@ -630,10 +649,71 @@ def _run_tests_for_plugin(context: CommandContext) -> None:
@internal_commands.register_subcommand(
+ "dpkg-build-driver-run-task",
+ help_description="[Internal command] Perform a given Dpkg::BuildDriver task (Not stable API)",
+ requested_plugins_only=True,
+ default_log_level=_build_subcommand_log_level,
+ argparser=[
+ add_arg(
+ "task_name",
+ metavar="task-name",
+ choices=[
+ "clean",
+ "build",
+ "build-arch",
+ "build-indep",
+ "binary",
+ "binary-arch",
+ "binary-indep",
+ ],
+ help="The task to run",
+ ),
+ add_arg(
+ "output",
+ nargs="?",
+ default="..",
+ metavar="output",
+ help="Where to place the resulting packages. Should be a directory",
+ ),
+ ],
+)
+def _dpkg_build_driver_integration(context: CommandContext) -> None:
+ parsed_args = context.parsed_args
+ log_level = context.set_log_level_for_build_subcommand()
+ task_name = parsed_args.task_name
+
+ if task_name.endswith("-indep"):
+ context.package_set = "indep"
+ elif task_name.endswith("arch"):
+ context.package_set = "arch"
+
+ manifest = context.parse_manifest()
+
+ plugins = context.load_plugins().plugin_data
+ for plugin in plugins.values():
+ if not plugin.is_bundled:
+ _info(f"Loaded plugin {plugin.plugin_name}")
+ if task_name == "clean":
+ perform_clean(context, manifest)
+ elif task_name in ("build", "build-indep", "build-arch"):
+ perform_builds(context, manifest)
+ elif task_name in ("binary", "binary-indep", "binary-arch"):
+ perform_builds(context, manifest)
+ assemble(
+ context,
+ manifest,
+ INTEGRATION_MODE_FULL,
+ debug_materialization=log_level is not None,
+ )
+ else:
+ _error(f"Unsupported Dpkg::BuildDriver task: {task_name}.")
+
+
+@internal_commands.register_subcommand(
"dh-integration-generate-debs",
help_description="[Internal command] Generate .deb/.udebs packages from debian/<pkg> (Not stable API)",
requested_plugins_only=True,
- default_log_level=logging.WARN,
+ default_log_level=_build_subcommand_log_level,
argparser=[
_add_packages_args,
add_arg(
@@ -653,14 +733,7 @@ def _run_tests_for_plugin(context: CommandContext) -> None:
)
def _dh_integration_generate_debs(context: CommandContext) -> None:
integrated_with_debhelper()
- parsed_args = context.parsed_args
- log_level: Optional[int] = None
- if os.environ.get("DH_VERBOSE", "") != "":
- log_level = PRINT_COMMAND
- if parsed_args.debug_mode:
- log_level = logging.INFO
- if log_level is not None:
- change_log_level(log_level)
+ log_level = context.set_log_level_for_build_subcommand()
integration_mode = context.resolve_integration_mode()
is_dh_rrr_only_mode = integration_mode == INTEGRATION_MODE_DH_DEBPUTY_RRR
if is_dh_rrr_only_mode:
@@ -678,13 +751,28 @@ def _dh_integration_generate_debs(context: CommandContext) -> None:
_info(f"Loaded plugin {plugin.plugin_name}")
manifest = context.parse_manifest()
- package_data_table = manifest.perform_installations(
- enable_manifest_installation_feature=not is_dh_rrr_only_mode
+ assemble(
+ context,
+ manifest,
+ integration_mode,
+ debug_materialization=log_level is not None,
)
+
+
+def assemble(
+ context: CommandContext,
+ manifest: HighLevelManifest,
+ integration_mode: DebputyIntegrationMode,
+ *,
+ debug_materialization: bool = False,
+) -> None:
source_fs = FSROOverlay.create_root_dir("..", ".")
source_version = manifest.source_version()
is_native = "-" not in source_version
-
+ is_dh_rrr_only_mode = integration_mode == INTEGRATION_MODE_DH_DEBPUTY_RRR
+ package_data_table = manifest.perform_installations(
+ enable_manifest_installation_feature=not is_dh_rrr_only_mode
+ )
if not is_dh_rrr_only_mode:
for dctrl_bin in manifest.active_packages:
package = dctrl_bin.name
@@ -702,7 +790,7 @@ def _dh_integration_generate_debs(context: CommandContext) -> None:
fs_root,
dctrl_data.substvars,
)
- if "nostrip" not in manifest.build_env.deb_build_options:
+ if "nostrip" not in manifest.deb_options_and_profiles.deb_build_options:
dbgsym_ids = relocate_dwarves_into_dbgsym_packages(
dctrl_bin,
fs_root,
@@ -714,7 +802,7 @@ def _dh_integration_generate_debs(context: CommandContext) -> None:
dctrl_bin,
fs_root,
is_native,
- manifest.build_env,
+ manifest.deb_options_and_profiles,
)
if not is_native:
install_upstream_changelog(
@@ -739,7 +827,7 @@ def _dh_integration_generate_debs(context: CommandContext) -> None:
manifest,
package_data_table,
is_dh_rrr_only_mode,
- debug_materialization=log_level is not None,
+ debug_materialization=debug_materialization,
)
@@ -901,10 +989,10 @@ def _json_output(data: Any) -> None:
],
)
def _migrate_from_dh(context: CommandContext) -> None:
+ context.must_be_called_in_source_root()
parsed_args = context.parsed_args
-
resolved_migration_target = _check_migration_target(
- context.debian_dir,
+ context,
parsed_args.migration_target,
)
context.debputy_integration_mode = resolved_migration_target
diff --git a/src/debputy/commands/debputy_cmd/context.py b/src/debputy/commands/debputy_cmd/context.py
index 0c184c7..a9c0a13 100644
--- a/src/debputy/commands/debputy_cmd/context.py
+++ b/src/debputy/commands/debputy_cmd/context.py
@@ -15,6 +15,7 @@ from typing import (
Callable,
Dict,
TYPE_CHECKING,
+ Literal,
)
from debian.debian_support import DpkgArchTable
@@ -45,7 +46,14 @@ from debputy.substitution import (
SubstitutionImpl,
NULL_SUBSTITUTION,
)
-from debputy.util import _error, PKGNAME_REGEX, resolve_source_date_epoch, setup_logging
+from debputy.util import (
+ _error,
+ PKGNAME_REGEX,
+ resolve_source_date_epoch,
+ setup_logging,
+ PRINT_COMMAND,
+ change_log_level,
+)
if TYPE_CHECKING:
from argparse import _SubParsersAction
@@ -110,6 +118,19 @@ class CommandContext:
Mapping[str, "BinaryPackage"],
]
] = None
+ self._package_set: Literal["both", "arch", "indep"] = "both"
+
+ @property
+ def package_set(self) -> Literal["both", "arch", "indep"]:
+ return self._package_set
+
+ @package_set.setter
+ def package_set(self, new_value: Literal["both", "arch", "indep"]) -> None:
+ if self._dctrl_parser is not None:
+ raise TypeError(
+ "package_set cannot be redefined once the debian/control parser has been initialized"
+ )
+ self._package_set = new_value
@property
def debian_dir(self) -> VirtualPath:
@@ -135,9 +156,10 @@ class CommandContext:
parser = DctrlParser(
packages, # -p/--package
set(), # -N/--no-package
- False, # -i
- False, # -a
- build_env=DebBuildOptionsAndProfiles.instance(),
+ # binary-indep and binary-indep (dpkg BuildDriver integration only)
+ self._package_set == "indep",
+ self._package_set == "arch",
+ deb_options_and_profiles=DebBuildOptionsAndProfiles.instance(),
dpkg_architecture_variables=dpkg_architecture_table(),
dpkg_arch_query_table=DpkgArchTable.load_arch_table(),
)
@@ -152,6 +174,9 @@ class CommandContext:
_, binary_package_table = self._parse_dctrl()
return binary_package_table
+ def dpkg_architecture_variables(self) -> DpkgArchitectureBuildProcessValuesTable:
+ return self.dctrl_parser.dpkg_architecture_variables
+
def requested_plugins(self) -> Sequence[str]:
if self._requested_plugins is None:
self._requested_plugins = self._resolve_requested_plugins()
@@ -162,7 +187,7 @@ class CommandContext:
@property
def deb_build_options_and_profiles(self) -> "DebBuildOptionsAndProfiles":
- return self.dctrl_parser.build_env
+ return self.dctrl_parser.deb_options_and_profiles
@property
def deb_build_options(self) -> Mapping[str, Optional[str]]:
@@ -292,20 +317,37 @@ class CommandContext:
debian_control = self.debian_dir.get("control")
return debian_control is not None
- def resolve_integration_mode(self) -> DebputyIntegrationMode:
+ def resolve_integration_mode(
+ self,
+ require_integration: bool = True,
+ ) -> DebputyIntegrationMode:
integration_mode = self.debputy_integration_mode
if integration_mode is None:
r = read_dh_addon_sequences(self.debian_dir)
bd_sequences, dr_sequences, _ = r
all_sequences = bd_sequences | dr_sequences
- integration_mode = determine_debputy_integration_mode(all_sequences)
- if integration_mode is None:
+ integration_mode = determine_debputy_integration_mode(
+ self.source_package().fields,
+ all_sequences,
+ )
+ if integration_mode is None and not require_integration:
_error(
"Cannot resolve the integration mode expected for this package. Is this package using `debputy`?"
)
self.debputy_integration_mode = integration_mode
return integration_mode
+ def set_log_level_for_build_subcommand(self) -> Optional[int]:
+ parsed_args = self.parsed_args
+ log_level: Optional[int] = None
+ if os.environ.get("DH_VERBOSE", "") != "":
+ log_level = PRINT_COMMAND
+ if parsed_args.debug_mode or os.environ.get("DEBPUTY_DEBUG", "") != "":
+ log_level = logging.DEBUG
+ if log_level is not None:
+ change_log_level(log_level)
+ return log_level
+
def manifest_parser(
self,
*,
@@ -320,7 +362,6 @@ class CommandContext:
manifest_path = self.parsed_args.debputy_manifest
if manifest_path is None:
manifest_path = os.path.join(self.debian_dir.fs_path, "debputy.manifest")
- debian_dir = self.debian_dir
return YAMLManifestParser(
manifest_path,
source_package,
@@ -328,7 +369,7 @@ class CommandContext:
substitution,
dctrl_parser.dpkg_architecture_variables,
dctrl_parser.dpkg_arch_query_table,
- dctrl_parser.build_env,
+ dctrl_parser.deb_options_and_profiles,
self.load_plugins(),
self.resolve_integration_mode(),
debian_dir=self.debian_dir,
@@ -420,7 +461,7 @@ class GenericSubCommand(SubcommandBase):
require_substitution: bool = True,
requested_plugins_only: bool = False,
log_only_to_stderr: bool = False,
- default_log_level: int = logging.INFO,
+ default_log_level: Union[int, Callable[[CommandContext], int]] = logging.INFO,
) -> None:
super().__init__(name, aliases=aliases, help_description=help_description)
self._handler = handler
@@ -452,7 +493,18 @@ class GenericSubCommand(SubcommandBase):
)
if self._log_only_to_stderr:
setup_logging(reconfigure_logging=True, log_only_to_stderr=True)
- logging.getLogger().setLevel(self._default_log_level)
+
+ default_log_level = self._default_log_level
+ if isinstance(default_log_level, int):
+ level = default_log_level
+ else:
+ assert callable(default_log_level)
+ level = default_log_level(context)
+ change_log_level(level)
+ if level > logging.DEBUG and (
+ context.parsed_args.debug_mode or os.environ.get("DEBPUTY_DEBUG", "") != ""
+ ):
+ change_log_level(logging.DEBUG)
return self._handler(context)
@@ -494,7 +546,7 @@ class DispatchingCommandMixin(CommandBase):
require_substitution: bool = True,
requested_plugins_only: bool = False,
log_only_to_stderr: bool = False,
- default_log_level: int = logging.INFO,
+ default_log_level: Union[int, Callable[[CommandContext], int]] = logging.INFO,
) -> Callable[[CommandHandler], GenericSubCommand]:
if isinstance(name, str):
cmd_name = name
diff --git a/src/debputy/commands/debputy_cmd/lint_and_lsp_cmds.py b/src/debputy/commands/debputy_cmd/lint_and_lsp_cmds.py
index eaab750..a03126b 100644
--- a/src/debputy/commands/debputy_cmd/lint_and_lsp_cmds.py
+++ b/src/debputy/commands/debputy_cmd/lint_and_lsp_cmds.py
@@ -267,7 +267,6 @@ def lsp_describe_features(context: CommandContext) -> None:
"--spellcheck",
dest="spellcheck",
action="store_true",
- shared=True,
help="Enable spellchecking",
),
add_arg(
diff --git a/src/debputy/commands/debputy_cmd/plugin_cmds.py b/src/debputy/commands/debputy_cmd/plugin_cmds.py
index 83bb88f..9721702 100644
--- a/src/debputy/commands/debputy_cmd/plugin_cmds.py
+++ b/src/debputy/commands/debputy_cmd/plugin_cmds.py
@@ -2,6 +2,7 @@ import argparse
import operator
import os
import sys
+import textwrap
from itertools import chain
from typing import (
Sequence,
@@ -28,7 +29,7 @@ from debputy.commands.debputy_cmd.output import (
)
from debputy.exceptions import DebputySubstitutionError
from debputy.filesystem_scan import build_virtual_fs
-from debputy.manifest_parser.base_types import TypeMapping
+from debputy.manifest_parser.tagging_types import TypeMapping
from debputy.manifest_parser.declarative_parser import (
BASIC_SIMPLE_TYPES,
)
@@ -45,13 +46,15 @@ from debputy.plugin.api.impl_types import (
PackagerProvidedFileClassSpec,
PluginProvidedManifestVariable,
DispatchingParserBase,
- SUPPORTED_DISPATCHABLE_TABLE_PARSERS,
- OPARSER_MANIFEST_ROOT,
PluginProvidedDiscardRule,
AutomaticDiscardRuleExample,
MetadataOrMaintscriptDetector,
PluginProvidedTypeMapping,
)
+from debputy.plugin.api.parser_tables import (
+ SUPPORTED_DISPATCHABLE_TABLE_PARSERS,
+ OPARSER_MANIFEST_ROOT,
+)
from debputy.plugin.api.spec import (
TypeMappingExample,
)
@@ -538,7 +541,16 @@ def _plugin_cmd_show_manifest_variables(context: CommandContext) -> None:
variable_value=None,
is_context_specific_variable=False,
is_documentation_placeholder=True,
- variable_reference_documentation=f'Environment variable "{env_var}"',
+ variable_reference_documentation=textwrap.dedent(
+ f"""\
+ Environment variable "{env_var}"
+
+ Note that uses beneath `builds:` may use the environment variable defined by
+ `build-environment:` (depends on whether the rule uses eager or lazy
+ substitution) while uses outside `builds:` will generally not use a definition
+ from `build-environment:`.
+ """
+ ),
)
else:
variable = variables.get(variable_name)
diff --git a/src/debputy/deb_packaging_support.py b/src/debputy/deb_packaging_support.py
index 875b3b1..92f57a2 100644
--- a/src/debputy/deb_packaging_support.py
+++ b/src/debputy/deb_packaging_support.py
@@ -81,7 +81,7 @@ from debputy.util import (
_error,
ensure_dir,
assume_not_none,
- perl_module_dirs,
+ resolve_perl_config,
perlxs_api_dependency,
detect_fakeroot,
grouper,
@@ -186,11 +186,11 @@ def handle_perl_code(
fs_root: FSPath,
substvars: FlushableSubstvars,
) -> None:
- known_perl_inc_dirs = perl_module_dirs(dpkg_architecture_variables, dctrl_bin)
+ perl_config_data = resolve_perl_config(dpkg_architecture_variables, dctrl_bin)
detected_dep_requirements = 0
# MakeMaker always makes lib and share dirs, but typically only one directory is actually used.
- for perl_inc_dir in known_perl_inc_dirs:
+ for perl_inc_dir in (perl_config_data.vendorarch, perl_config_data.vendorlib):
p = fs_root.lookup(perl_inc_dir)
if p and p.is_dir:
p.prune_if_empty_dir()
@@ -198,8 +198,8 @@ def handle_perl_code(
# FIXME: 80% of this belongs in a metadata detector, but that requires us to expose .walk() in the public API,
# which will not be today.
for d, pm_mode in [
- (known_perl_inc_dirs.vendorlib, PERL_DEP_INDEP_PM_MODULE),
- (known_perl_inc_dirs.vendorarch, PERL_DEP_ARCH_PM_MODULE),
+ (perl_config_data.vendorlib, PERL_DEP_INDEP_PM_MODULE),
+ (perl_config_data.vendorarch, PERL_DEP_ARCH_PM_MODULE),
]:
inc_dir = fs_root.lookup(d)
if not inc_dir:
diff --git a/src/debputy/dh_migration/migration.py b/src/debputy/dh_migration/migration.py
index f7b7d9e..62f739e 100644
--- a/src/debputy/dh_migration/migration.py
+++ b/src/debputy/dh_migration/migration.py
@@ -3,10 +3,11 @@ import os
import re
import subprocess
from itertools import chain
-from typing import Optional, List, Callable, Set, Container
+from typing import Optional, List, Callable, Set, Container, Mapping, FrozenSet
from debian.deb822 import Deb822
+from debputy.commands.debputy_cmd.context import CommandContext
from debputy.dh.debhelper_emulation import CannotEmulateExecutableDHConfigFile
from debputy.dh_migration.migrators import MIGRATORS
from debputy.dh_migration.migrators_impl import (
@@ -24,9 +25,25 @@ from debputy.highlevel_manifest import HighLevelManifest
from debputy.integration_detection import determine_debputy_integration_mode
from debputy.manifest_parser.exceptions import ManifestParseException
from debputy.plugin.api import VirtualPath
-from debputy.plugin.api.spec import DebputyIntegrationMode
+from debputy.plugin.api.spec import DebputyIntegrationMode, INTEGRATION_MODE_FULL
from debputy.util import _error, _warn, _info, escape_shell, assume_not_none
+SUPPORTED_MIGRATIONS: Mapping[
+ DebputyIntegrationMode, FrozenSet[DebputyIntegrationMode]
+] = {
+ INTEGRATION_MODE_FULL: frozenset([INTEGRATION_MODE_FULL]),
+ INTEGRATION_MODE_DH_DEBPUTY: frozenset(
+ [INTEGRATION_MODE_DH_DEBPUTY, INTEGRATION_MODE_FULL]
+ ),
+ INTEGRATION_MODE_DH_DEBPUTY_RRR: frozenset(
+ [
+ INTEGRATION_MODE_DH_DEBPUTY_RRR,
+ INTEGRATION_MODE_DH_DEBPUTY,
+ INTEGRATION_MODE_FULL,
+ ]
+ ),
+}
+
def _print_migration_summary(
migrations: List[FeatureMigration],
@@ -143,22 +160,33 @@ def _requested_debputy_plugins(debian_dir: VirtualPath) -> Optional[Set[str]]:
def _check_migration_target(
- debian_dir: VirtualPath,
+ context: CommandContext,
migration_target: Optional[DebputyIntegrationMode],
) -> DebputyIntegrationMode:
- r = read_dh_addon_sequences(debian_dir)
- if r is None and migration_target is None:
- _error("debian/control is missing and no migration target was provided")
- bd_sequences, dr_sequences, _ = r
- all_sequences = bd_sequences | dr_sequences
-
- detected_migration_target = determine_debputy_integration_mode(all_sequences)
-
- if (
- migration_target == INTEGRATION_MODE_DH_DEBPUTY_RRR
- and detected_migration_target == INTEGRATION_MODE_DH_DEBPUTY
- ):
- _error("Cannot migrate from (zz-)debputy to zz-debputy-rrr")
+ r = read_dh_addon_sequences(context.debian_dir)
+ if r is not None:
+ bd_sequences, dr_sequences, _ = r
+ all_sequences = bd_sequences | dr_sequences
+ detected_migration_target = determine_debputy_integration_mode(
+ context.source_package().fields,
+ all_sequences,
+ )
+ else:
+ detected_migration_target = None
+
+ if migration_target is not None and detected_migration_target is not None:
+ supported_migrations = SUPPORTED_MIGRATIONS.get(
+ detected_migration_target,
+ frozenset([detected_migration_target]),
+ )
+
+ if (
+ migration_target != detected_migration_target
+ and migration_target not in supported_migrations
+ ):
+ _error(
+ f"Cannot migrate from {detected_migration_target} to {migration_target}"
+ )
if migration_target is not None:
resolved_migration_target = migration_target
diff --git a/src/debputy/dh_migration/migrators.py b/src/debputy/dh_migration/migrators.py
index 8eff679..8a057b7 100644
--- a/src/debputy/dh_migration/migrators.py
+++ b/src/debputy/dh_migration/migrators.py
@@ -13,7 +13,7 @@ from debputy.dh_migration.migrators_impl import (
migrate_lintian_overrides_files,
detect_unsupported_zz_debputy_features,
detect_pam_files,
- detect_dh_addons,
+ detect_dh_addons_with_zz_integration,
migrate_not_installed_file,
migrate_installman_file,
migrate_bash_completion,
@@ -21,6 +21,7 @@ from debputy.dh_migration.migrators_impl import (
migrate_dh_installsystemd_files,
detect_obsolete_substvars,
detect_dh_addons_zz_debputy_rrr,
+ detect_dh_addons_with_full_integration,
)
from debputy.dh_migration.models import AcceptableMigrationIssues, FeatureMigration
from debputy.highlevel_manifest import HighLevelManifest
@@ -29,13 +30,42 @@ from debputy.plugin.api.spec import (
DebputyIntegrationMode,
INTEGRATION_MODE_DH_DEBPUTY_RRR,
INTEGRATION_MODE_DH_DEBPUTY,
+ INTEGRATION_MODE_FULL,
)
Migrator = Callable[
- [VirtualPath, HighLevelManifest, AcceptableMigrationIssues, FeatureMigration, str],
+ [
+ VirtualPath,
+ HighLevelManifest,
+ AcceptableMigrationIssues,
+ FeatureMigration,
+ DebputyIntegrationMode,
+ ],
None,
]
+_DH_DEBPUTY_MIGRATORS = [
+ detect_unsupported_zz_debputy_features,
+ detect_pam_files,
+ migrate_dh_hook_targets,
+ migrate_dh_installsystemd_files,
+ migrate_install_file,
+ migrate_installdocs_file,
+ migrate_installexamples_file,
+ migrate_installman_file,
+ migrate_installinfo_file,
+ migrate_misspelled_readme_debian_files,
+ migrate_doc_base_files,
+ migrate_links_files,
+ migrate_maintscript,
+ migrate_tmpfile,
+ migrate_lintian_overrides_files,
+ migrate_bash_completion,
+ detect_obsolete_substvars,
+ # not-installed should go last, so its rules appear after other installations
+ # It is not perfect, but it is a start.
+ migrate_not_installed_file,
+]
MIGRATORS: Mapping[DebputyIntegrationMode, List[Migrator]] = {
INTEGRATION_MODE_DH_DEBPUTY_RRR: [
@@ -45,26 +75,12 @@ MIGRATORS: Mapping[DebputyIntegrationMode, List[Migrator]] = {
detect_obsolete_substvars,
],
INTEGRATION_MODE_DH_DEBPUTY: [
- detect_unsupported_zz_debputy_features,
- detect_pam_files,
- migrate_dh_hook_targets,
- migrate_dh_installsystemd_files,
- migrate_install_file,
- migrate_installdocs_file,
- migrate_installexamples_file,
- migrate_installman_file,
- migrate_installinfo_file,
- migrate_misspelled_readme_debian_files,
- migrate_doc_base_files,
- migrate_links_files,
- migrate_maintscript,
- migrate_tmpfile,
- migrate_lintian_overrides_files,
- migrate_bash_completion,
- detect_dh_addons,
- detect_obsolete_substvars,
- # not-installed should go last, so its rules appear after other installations
- # It is not perfect, but it is a start.
- migrate_not_installed_file,
+ *_DH_DEBPUTY_MIGRATORS,
+ detect_dh_addons_with_zz_integration,
+ ],
+ INTEGRATION_MODE_FULL: [
+ *_DH_DEBPUTY_MIGRATORS,
+ detect_dh_addons_with_full_integration,
],
}
+del _DH_DEBPUTY_MIGRATORS
diff --git a/src/debputy/dh_migration/migrators_impl.py b/src/debputy/dh_migration/migrators_impl.py
index 91ea8cd..97b0fd2 100644
--- a/src/debputy/dh_migration/migrators_impl.py
+++ b/src/debputy/dh_migration/migrators_impl.py
@@ -17,6 +17,7 @@ from typing import (
Callable,
TypeVar,
Dict,
+ Container,
)
from debian.deb822 import Deb822
@@ -51,6 +52,8 @@ from debputy.plugin.api import VirtualPath
from debputy.plugin.api.spec import (
INTEGRATION_MODE_DH_DEBPUTY_RRR,
INTEGRATION_MODE_DH_DEBPUTY,
+ DebputyIntegrationMode,
+ INTEGRATION_MODE_FULL,
)
from debputy.util import (
_error,
@@ -61,8 +64,15 @@ from debputy.util import (
has_glob_magic,
)
+
+class ContainsEverything:
+
+ def __contains__(self, item: str) -> bool:
+ return True
+
+
# Align with debputy.py
-DH_COMMANDS_REPLACED = {
+DH_COMMANDS_REPLACED: Mapping[DebputyIntegrationMode, Container[str]] = {
INTEGRATION_MODE_DH_DEBPUTY_RRR: frozenset(
{
"dh_fixperms",
@@ -124,6 +134,7 @@ DH_COMMANDS_REPLACED = {
"dh_builddeb",
}
),
+ INTEGRATION_MODE_FULL: ContainsEverything(),
}
_GS_DOC = f"{DEBPUTY_DOC_ROOT_DIR}/GETTING-STARTED-WITH-dh-debputy.md"
@@ -375,7 +386,7 @@ def migrate_bash_completion(
manifest: HighLevelManifest,
acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "dh_bash-completion files"
is_single_binary = sum(1 for _ in manifest.all_packages) == 1
@@ -466,7 +477,7 @@ def migrate_dh_installsystemd_files(
manifest: HighLevelManifest,
_acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "dh_installsystemd files"
for dctrl_bin in manifest.all_packages:
@@ -499,7 +510,7 @@ def migrate_maintscript(
manifest: HighLevelManifest,
acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "dh_installdeb files"
mutable_manifest = assume_not_none(manifest.mutable_manifest)
@@ -608,7 +619,7 @@ def migrate_install_file(
manifest: HighLevelManifest,
acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "dh_install config files"
mutable_manifest = assume_not_none(manifest.mutable_manifest)
@@ -799,7 +810,7 @@ def migrate_installdocs_file(
manifest: HighLevelManifest,
acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "dh_installdocs config files"
mutable_manifest = assume_not_none(manifest.mutable_manifest)
@@ -846,7 +857,7 @@ def migrate_installexamples_file(
manifest: HighLevelManifest,
acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "dh_installexamples config files"
mutable_manifest = assume_not_none(manifest.mutable_manifest)
@@ -900,7 +911,7 @@ def migrate_installinfo_file(
manifest: HighLevelManifest,
acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "dh_installinfo config files"
mutable_manifest = assume_not_none(manifest.mutable_manifest)
@@ -975,7 +986,7 @@ def migrate_installman_file(
manifest: HighLevelManifest,
acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "dh_installman config files"
mutable_manifest = assume_not_none(manifest.mutable_manifest)
@@ -1095,7 +1106,7 @@ def migrate_not_installed_file(
manifest: HighLevelManifest,
acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "dh_missing's not-installed config file"
mutable_manifest = assume_not_none(manifest.mutable_manifest)
@@ -1135,7 +1146,7 @@ def detect_pam_files(
manifest: HighLevelManifest,
_acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "detect dh_installpam files (min dh compat)"
for dctrl_bin in manifest.all_packages:
@@ -1150,7 +1161,7 @@ def migrate_tmpfile(
manifest: HighLevelManifest,
_acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "dh_installtmpfiles config files"
for dctrl_bin in manifest.all_packages:
@@ -1174,7 +1185,7 @@ def migrate_lintian_overrides_files(
manifest: HighLevelManifest,
acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "dh_lintian config files"
for dctrl_bin in manifest.all_packages:
@@ -1198,7 +1209,7 @@ def migrate_links_files(
manifest: HighLevelManifest,
acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "dh_link files"
mutable_manifest = assume_not_none(manifest.mutable_manifest)
@@ -1272,7 +1283,7 @@ def migrate_misspelled_readme_debian_files(
manifest: HighLevelManifest,
acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "misspelled README.Debian files"
for dctrl_bin in manifest.all_packages:
@@ -1304,7 +1315,7 @@ def migrate_doc_base_files(
manifest: HighLevelManifest,
_: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "doc-base files"
# ignore the dh_make ".EX" file if one should still be present. The dh_installdocs tool ignores it too.
@@ -1355,7 +1366,7 @@ def migrate_dh_hook_targets(
_: HighLevelManifest,
acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- migration_target: str,
+ migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "dh hook targets"
source_root = os.path.dirname(debian_dir.fs_path)
@@ -1407,7 +1418,7 @@ def detect_unsupported_zz_debputy_features(
manifest: HighLevelManifest,
acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "Known unsupported features"
@@ -1426,7 +1437,7 @@ def detect_obsolete_substvars(
_manifest: HighLevelManifest,
_acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = (
"Check for obsolete ${foo:var} variables in debian/control"
@@ -1507,7 +1518,7 @@ def detect_dh_addons_zz_debputy_rrr(
_manifest: HighLevelManifest,
_acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "Check for dh-sequence-addons"
r = read_dh_addon_sequences(debian_dir)
@@ -1527,12 +1538,28 @@ def detect_dh_addons_zz_debputy_rrr(
feature_migration.warn("Missing Build-Depends on dh-sequence-zz-debputy-rrr")
-def detect_dh_addons(
+def detect_dh_addons_with_full_integration(
+ _debian_dir: VirtualPath,
+ _manifest: HighLevelManifest,
+ _acceptable_migration_issues: AcceptableMigrationIssues,
+ feature_migration: FeatureMigration,
+ _migration_target: DebputyIntegrationMode,
+) -> None:
+ feature_migration.tagline = "Check for dh-sequence-addons and Build-Depends"
+ feature_migration.warn(
+ "TODO: Not implemented: Please remove any dh-sequence Build-Dependency"
+ )
+ feature_migration.warn(
+ "TODO: Not implemented: Please ensure there is a Build-Dependency on `debputy (>= 0.1.45~)"
+ )
+
+
+def detect_dh_addons_with_zz_integration(
debian_dir: VirtualPath,
_manifest: HighLevelManifest,
acceptable_migration_issues: AcceptableMigrationIssues,
feature_migration: FeatureMigration,
- _migration_target: str,
+ _migration_target: DebputyIntegrationMode,
) -> None:
feature_migration.tagline = "Check for dh-sequence-addons"
r = read_dh_addon_sequences(debian_dir)
@@ -1544,6 +1571,8 @@ def detect_dh_addons(
)
return
+ assert _migration_target != INTEGRATION_MODE_FULL
+
bd_sequences, dr_sequences, _ = r
remaining_sequences = bd_sequences | dr_sequences
diff --git a/src/debputy/exceptions.py b/src/debputy/exceptions.py
index a445997..b3ff7d5 100644
--- a/src/debputy/exceptions.py
+++ b/src/debputy/exceptions.py
@@ -10,6 +10,10 @@ class DebputyRuntimeError(RuntimeError):
return cast("str", self.args[0])
+class DebputyBuildStepError(DebputyRuntimeError):
+ pass
+
+
class DebputySubstitutionError(DebputyRuntimeError):
pass
@@ -64,6 +68,10 @@ class PluginInitializationError(PluginBaseError):
pass
+class PluginIncorrectRegistrationError(PluginInitializationError):
+ pass
+
+
class PluginMetadataError(PluginBaseError):
pass
diff --git a/src/debputy/highlevel_manifest.py b/src/debputy/highlevel_manifest.py
index 9bdc225..6c910ab 100644
--- a/src/debputy/highlevel_manifest.py
+++ b/src/debputy/highlevel_manifest.py
@@ -49,7 +49,11 @@ from .maintscript_snippet import (
MaintscriptSnippetContainer,
)
from .manifest_conditions import ConditionContext
-from .manifest_parser.base_types import FileSystemMatchRule, FileSystemExactMatchRule
+from .manifest_parser.base_types import (
+ FileSystemMatchRule,
+ FileSystemExactMatchRule,
+ BuildEnvironments,
+)
from .manifest_parser.util import AttributePath
from .packager_provided_files import PackagerProvidedFile
from .packages import BinaryPackage, SourcePackage
@@ -61,6 +65,8 @@ from .plugin.api.impl_types import (
)
from .plugin.api.spec import FlushableSubstvars, VirtualPath
from .plugin.debputy.binary_package_rules import ServiceRule
+from .plugin.debputy.to_be_api_types import BuildRule
+from .plugin.plugin_state import run_in_context_of_plugin
from .substitution import Substitution
from .transformation_rules import (
TransformationRule,
@@ -1036,7 +1042,9 @@ def _install_everything_from_source_dir_if_present(
) -> None:
attribute_path = AttributePath.builtin_path()[f"installing {source_dir.fs_path}"]
pkg_set = frozenset([dctrl_bin])
- install_rule = InstallRule.install_dest(
+ install_rule = run_in_context_of_plugin(
+ "debputy",
+ InstallRule.install_dest,
[FileSystemMatchRule.from_path_match("*", attribute_path, substitution)],
None,
pkg_set,
@@ -1086,6 +1094,8 @@ class HighLevelManifest:
dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable,
dpkg_arch_query_table: DpkgArchTable,
build_env: DebBuildOptionsAndProfiles,
+ build_environments: BuildEnvironments,
+ build_rules: Optional[List[BuildRule]],
plugin_provided_feature_set: PluginProvidedFeatureSet,
debian_dir: VirtualPath,
) -> None:
@@ -1100,8 +1110,17 @@ class HighLevelManifest:
self._dpkg_arch_query_table = dpkg_arch_query_table
self._build_env = build_env
self._used_for: Set[str] = set()
+ self.build_environments = build_environments
+ self.build_rules = build_rules
self._plugin_provided_feature_set = plugin_provided_feature_set
self._debian_dir = debian_dir
+ self._source_condition_context = ConditionContext(
+ binary_package=None,
+ substitution=self.substitution,
+ deb_options_and_profiles=self._build_env,
+ dpkg_architecture_variables=self._dpkg_architecture_variables,
+ dpkg_arch_query_table=self._dpkg_arch_query_table,
+ )
def source_version(self, include_binnmu_version: bool = True) -> str:
# TODO: There should an easier way to determine the source version; really.
@@ -1116,6 +1135,10 @@ class HighLevelManifest:
raise AssertionError(f"Could not resolve {version_var}") from e
@property
+ def source_condition_context(self) -> ConditionContext:
+ return self._source_condition_context
+
+ @property
def debian_dir(self) -> VirtualPath:
return self._debian_dir
@@ -1124,7 +1147,7 @@ class HighLevelManifest:
return self._dpkg_architecture_variables
@property
- def build_env(self) -> DebBuildOptionsAndProfiles:
+ def deb_options_and_profiles(self) -> DebBuildOptionsAndProfiles:
return self._build_env
@property
@@ -1270,13 +1293,7 @@ class HighLevelManifest:
]
path_matcher = SourcePathMatcher(discard_rules)
- source_condition_context = ConditionContext(
- binary_package=None,
- substitution=self.substitution,
- build_env=self._build_env,
- dpkg_architecture_variables=self._dpkg_architecture_variables,
- dpkg_arch_query_table=self._dpkg_arch_query_table,
- )
+ source_condition_context = self._source_condition_context
for dctrl_bin in self.active_packages:
package = dctrl_bin.name
@@ -1416,23 +1433,14 @@ class HighLevelManifest:
self, binary_package: Optional[Union[BinaryPackage, str]]
) -> ConditionContext:
if binary_package is None:
- return ConditionContext(
- binary_package=None,
- substitution=self.substitution,
- build_env=self._build_env,
- dpkg_architecture_variables=self._dpkg_architecture_variables,
- dpkg_arch_query_table=self._dpkg_arch_query_table,
- )
+ return self._source_condition_context
if not isinstance(binary_package, str):
binary_package = binary_package.name
package_transformation = self.package_transformations[binary_package]
- return ConditionContext(
+ return self._source_condition_context.replace(
binary_package=package_transformation.binary_package,
substitution=package_transformation.substitution,
- build_env=self._build_env,
- dpkg_architecture_variables=self._dpkg_architecture_variables,
- dpkg_arch_query_table=self._dpkg_arch_query_table,
)
def apply_fs_transformations(
@@ -1452,7 +1460,7 @@ class HighLevelManifest:
condition_context = ConditionContext(
binary_package=package_transformation.binary_package,
substitution=package_transformation.substitution,
- build_env=self._build_env,
+ deb_options_and_profiles=self._build_env,
dpkg_architecture_variables=self._dpkg_architecture_variables,
dpkg_arch_query_table=self._dpkg_arch_query_table,
)
@@ -1466,7 +1474,7 @@ class HighLevelManifest:
norm_mode_transformation_rule = ModeNormalizationTransformationRule(norm_rules)
norm_mode_transformation_rule.transform_file_system(fs_root, condition_context)
for transformation in package_transformation.transformations:
- transformation.transform_file_system(fs_root, condition_context)
+ transformation.run_transform_file_system(fs_root, condition_context)
interpreter_normalization = NormalizeShebangLineTransformation()
interpreter_normalization.transform_file_system(fs_root, condition_context)
diff --git a/src/debputy/highlevel_manifest_parser.py b/src/debputy/highlevel_manifest_parser.py
index b7a4600..18d9fa7 100644
--- a/src/debputy/highlevel_manifest_parser.py
+++ b/src/debputy/highlevel_manifest_parser.py
@@ -43,20 +43,22 @@ from ._deb_options_profiles import DebBuildOptionsAndProfiles
from .architecture_support import DpkgArchitectureBuildProcessValuesTable
from .filesystem_scan import FSROOverlay
from .installations import InstallRule, PPFInstallRule
+from .manifest_parser.base_types import BuildEnvironments, BuildEnvironmentDefinition
from .manifest_parser.exceptions import ManifestParseException
from .manifest_parser.parser_data import ParserContextData
from .manifest_parser.util import AttributePath
from .packager_provided_files import detect_all_packager_provided_files
from .plugin.api import VirtualPath
+from .plugin.api.feature_set import PluginProvidedFeatureSet
from .plugin.api.impl_types import (
TP,
TTP,
DispatchingTableParser,
- OPARSER_MANIFEST_ROOT,
PackageContextData,
)
-from .plugin.api.feature_set import PluginProvidedFeatureSet
+from debputy.plugin.api.parser_tables import OPARSER_MANIFEST_ROOT
from .plugin.api.spec import DebputyIntegrationMode
+from .plugin.debputy.to_be_api_types import BuildRule
from .yaml import YAMLError, MANIFEST_YAML
try:
@@ -131,11 +133,19 @@ class HighLevelManifestParser(ParserContextData):
self._substitution = substitution
self._dpkg_architecture_variables = dpkg_architecture_variables
self._dpkg_arch_query_table = dpkg_arch_query_table
- self._build_env = build_env
+ self._deb_options_and_profiles = build_env
self._package_state_stack: List[PackageTransformationDefinition] = []
self._plugin_provided_feature_set = plugin_provided_feature_set
self._debputy_integration_mode = debputy_integration_mode
self._declared_variables = {}
+ self._used_named_envs = set()
+ self._build_environments: Optional[BuildEnvironments] = BuildEnvironments(
+ {},
+ None,
+ )
+ self._has_set_default_build_environment = False
+ self._read_build_environment = False
+ self._build_rules: Optional[List[BuildRule]] = None
if isinstance(debian_dir, str):
debian_dir = FSROOverlay.create_root_dir("debian", debian_dir)
@@ -202,10 +212,21 @@ class HighLevelManifestParser(ParserContextData):
return self._dpkg_arch_query_table
@property
- def build_env(self) -> DebBuildOptionsAndProfiles:
- return self._build_env
+ def deb_options_and_profiles(self) -> DebBuildOptionsAndProfiles:
+ return self._deb_options_and_profiles
+
+ def _self_check(self) -> None:
+ unused_envs = (
+ self._build_environments.environments.keys() - self._used_named_envs
+ )
+ if unused_envs:
+ unused_env_names = ", ".join(unused_envs)
+ raise ManifestParseException(
+ f"The following named environments were never referenced: {unused_env_names}"
+ )
def build_manifest(self) -> HighLevelManifest:
+ self._self_check()
if self._used:
raise TypeError("build_manifest can only be called once!")
self._used = True
@@ -240,6 +261,8 @@ class HighLevelManifestParser(ParserContextData):
ppf_result.reserved_only
)
self._transform_dpkg_maintscript_helpers_to_snippets()
+ build_environments = self.build_environments()
+ assert build_environments is not None
return HighLevelManifest(
self.manifest_path,
@@ -251,7 +274,9 @@ class HighLevelManifestParser(ParserContextData):
self._package_states,
self._dpkg_architecture_variables,
self._dpkg_arch_query_table,
- self._build_env,
+ self._deb_options_and_profiles,
+ build_environments,
+ self._build_rules,
self._plugin_provided_feature_set,
self._debian_dir,
)
@@ -325,6 +350,69 @@ class HighLevelManifestParser(ParserContextData):
def debputy_integration_mode(self, new_value: DebputyIntegrationMode) -> None:
self._debputy_integration_mode = new_value
+ def _register_build_environment(
+ self,
+ name: Optional[str],
+ build_environment: BuildEnvironmentDefinition,
+ attribute_path: AttributePath,
+ is_default: bool = False,
+ ) -> None:
+ assert not self._read_build_environment
+
+ # TODO: Reference the paths of the original environments for the error messages where that is relevant.
+ if is_default:
+ if self._has_set_default_build_environment:
+ raise ManifestParseException(
+ f"There cannot be multiple default environments and"
+ f" therefore {attribute_path.path} cannot be a default environment"
+ )
+ self._has_set_default_build_environment = True
+ self._build_environments.default_environment = build_environment
+ if name is None:
+ return
+ elif name is None:
+ raise ManifestParseException(
+ f"Useless environment defined at {attribute_path.path}. It is neither the"
+ " default environment nor does it have a name (so no rules can reference it"
+ " explicitly)"
+ )
+
+ if name in self._build_environments.environments:
+ raise ManifestParseException(
+ f'The environment defined at {attribute_path.path} reuse the name "{name}".'
+ " The environment name must be unique."
+ )
+ self._build_environments.environments[name] = build_environment
+
+ def resolve_build_environment(
+ self,
+ name: Optional[str],
+ attribute_path: AttributePath,
+ ) -> BuildEnvironmentDefinition:
+ if name is None:
+ return self.build_environments().default_environment
+ try:
+ env = self.build_environments().environments[name]
+ except KeyError:
+ raise ManifestParseException(
+ f'The environment "{name}" requested at {attribute_path.path} was not'
+ f" defined in the `build-environments`"
+ )
+ else:
+ self._used_named_envs.add(name)
+ return env
+
+ def build_environments(self) -> BuildEnvironments:
+ v = self._build_environments
+ if (
+ not self._read_build_environment
+ and not self._build_environments.environments
+ and self._build_environments.default_environment is None
+ ):
+ self._build_environments.default_environment = BuildEnvironmentDefinition()
+ self._read_build_environment = True
+ return v
+
def _transform_dpkg_maintscript_helpers_to_snippets(self) -> None:
package_state = self.current_binary_package_state
for dmh in package_state.dpkg_maintscript_helper_snippets:
@@ -504,6 +592,7 @@ class YAMLManifestParser(HighLevelManifestParser):
)
if service_rules:
package_state.requested_service_rules.extend(service_rules)
+ self._build_rules = parsed_data.get("builds")
return self.build_manifest()
diff --git a/src/debputy/installations.py b/src/debputy/installations.py
index b781757..806a964 100644
--- a/src/debputy/installations.py
+++ b/src/debputy/installations.py
@@ -31,10 +31,11 @@ from debputy.manifest_conditions import (
from debputy.manifest_parser.base_types import (
FileSystemMatchRule,
FileSystemExactMatchRule,
- DebputyDispatchableType,
)
+from debputy.manifest_parser.tagging_types import DebputyDispatchableType
from debputy.packages import BinaryPackage
from debputy.path_matcher import MatchRule, ExactFileSystemPath, MATCH_ANYTHING
+from debputy.plugin.plugin_state import run_in_context_of_plugin
from debputy.substitution import Substitution
from debputy.util import _error, _warn
@@ -585,6 +586,7 @@ class InstallRule(DebputyDispatchableType):
*,
match_filter: Optional[Callable[["VirtualPath"], bool]] = None,
) -> None:
+ super().__init__()
self._condition = condition
self._definition_source = definition_source
self._match_filter = match_filter
@@ -1025,7 +1027,9 @@ class PPFInstallRule(InstallRule):
substitution: Substitution,
ppfs: Sequence["PackagerProvidedFile"],
) -> None:
- super().__init__(
+ run_in_context_of_plugin(
+ "debputy",
+ super().__init__,
None,
"<built-in; PPF install rule>",
)
diff --git a/src/debputy/integration_detection.py b/src/debputy/integration_detection.py
index f412268..cc19057 100644
--- a/src/debputy/integration_detection.py
+++ b/src/debputy/integration_detection.py
@@ -1,16 +1,21 @@
-from typing import Container, Optional
+from typing import Container, Optional, Mapping
from debputy.plugin.api.spec import (
DebputyIntegrationMode,
INTEGRATION_MODE_DH_DEBPUTY_RRR,
INTEGRATION_MODE_DH_DEBPUTY,
+ INTEGRATION_MODE_FULL,
)
def determine_debputy_integration_mode(
+ source_fields: Mapping[str, str],
all_sequences: Container[str],
) -> Optional[DebputyIntegrationMode]:
+ if source_fields.get("Build-Driver", "").lower() == "debputy":
+ return INTEGRATION_MODE_FULL
+
has_zz_debputy = "zz-debputy" in all_sequences or "debputy" in all_sequences
has_zz_debputy_rrr = "zz-debputy-rrr" in all_sequences
has_any_existing = has_zz_debputy or has_zz_debputy_rrr
@@ -18,4 +23,7 @@ def determine_debputy_integration_mode(
return INTEGRATION_MODE_DH_DEBPUTY_RRR
if has_any_existing:
return INTEGRATION_MODE_DH_DEBPUTY
+ if source_fields.get("Source", "") == "debputy":
+ # Self-hosting. We cannot set the Build-Driver field since that creates a self-circular dependency loop
+ return INTEGRATION_MODE_FULL
return None
diff --git a/src/debputy/linting/lint_impl.py b/src/debputy/linting/lint_impl.py
index ddc7e93..0f37dce 100644
--- a/src/debputy/linting/lint_impl.py
+++ b/src/debputy/linting/lint_impl.py
@@ -35,7 +35,7 @@ from debputy.lsp.lsp_debian_tests_control import (
)
from debputy.lsp.maint_prefs import (
MaintainerPreferenceTable,
- EffectivePreference,
+ EffectiveFormattingPreference,
determine_effective_preference,
)
from debputy.lsp.quickfixes import provide_standard_quickfixes_from_diagnostics
@@ -92,7 +92,7 @@ class LintContext:
parsed_deb822_file_content: Optional[Deb822FileElement] = None
source_package: Optional[SourcePackage] = None
binary_packages: Optional[Mapping[str, BinaryPackage]] = None
- effective_preference: Optional[EffectivePreference] = None
+ effective_preference: Optional[EffectiveFormattingPreference] = None
style_tool: Optional[str] = None
unsupported_preference_reason: Optional[str] = None
salsa_ci: Optional[CommentedMap] = None
@@ -233,8 +233,6 @@ def perform_reformat(
named_style: Optional[str] = None,
) -> None:
parsed_args = context.parsed_args
- if not parsed_args.spellcheck:
- disable_spellchecking()
fo = _output_styling(context.parsed_args, sys.stdout)
lint_context = gather_lint_info(context)
if named_style is not None:
diff --git a/src/debputy/linting/lint_util.py b/src/debputy/linting/lint_util.py
index 6346508..017a1dc 100644
--- a/src/debputy/linting/lint_util.py
+++ b/src/debputy/linting/lint_util.py
@@ -43,7 +43,7 @@ if TYPE_CHECKING:
from debputy.lsp.text_util import LintCapablePositionCodec
from debputy.lsp.maint_prefs import (
MaintainerPreferenceTable,
- EffectivePreference,
+ EffectiveFormattingPreference,
)
@@ -56,9 +56,14 @@ class DebputyMetadata:
debputy_integration_mode: Optional[DebputyIntegrationMode]
@classmethod
- def from_data(cls, dh_sequencer_data: DhSequencerData) -> typing.Self:
+ def from_data(
+ cls,
+ source_fields: Mapping[str, str],
+ dh_sequencer_data: DhSequencerData,
+ ) -> typing.Self:
integration_mode = determine_debputy_integration_mode(
- dh_sequencer_data.sequences
+ source_fields,
+ dh_sequencer_data.sequences,
)
return cls(integration_mode)
@@ -114,12 +119,17 @@ class LintState:
raise NotImplementedError
@property
- def effective_preference(self) -> Optional["EffectivePreference"]:
+ def effective_preference(self) -> Optional["EffectiveFormattingPreference"]:
raise NotImplementedError
@property
def debputy_metadata(self) -> DebputyMetadata:
- return DebputyMetadata.from_data(self.dh_sequencer_data)
+ src_pkg = self.source_package
+ src_fields = src_pkg.fields if src_pkg else {}
+ return DebputyMetadata.from_data(
+ src_fields,
+ self.dh_sequencer_data,
+ )
@property
def dh_sequencer_data(self) -> DhSequencerData:
@@ -137,7 +147,7 @@ class LintStateImpl(LintState):
lines: List[str]
source_package: Optional[SourcePackage] = None
binary_packages: Optional[Mapping[str, BinaryPackage]] = None
- effective_preference: Optional["EffectivePreference"] = None
+ effective_preference: Optional["EffectiveFormattingPreference"] = None
_parsed_cache: Optional[Deb822FileElement] = None
_dh_sequencer_cache: Optional[DhSequencerData] = None
diff --git a/src/debputy/lsp/lsp_debian_control.py b/src/debputy/lsp/lsp_debian_control.py
index 2b8f9b0..ac08266 100644
--- a/src/debputy/lsp/lsp_debian_control.py
+++ b/src/debputy/lsp/lsp_debian_control.py
@@ -677,7 +677,7 @@ def _doc_inlay_hint(
stanza_range = stanza.range_in_parent()
if stanza_no < 1:
continue
- pkg_kvpair = stanza.get_kvpair_element("Package", use_get=True)
+ pkg_kvpair = stanza.get_kvpair_element(("Package", 0), use_get=True)
if pkg_kvpair is None:
continue
@@ -778,7 +778,7 @@ def _binary_package_checks(
) -> None:
package_name = stanza.get("Package", "")
source_section = source_stanza.get("Section")
- section_kvpair = stanza.get_kvpair_element("Section", use_get=True)
+ section_kvpair = stanza.get_kvpair_element(("Section", 0), use_get=True)
section: Optional[str] = None
if section_kvpair is not None:
section, section_range = extract_first_value_and_position(
@@ -1227,7 +1227,7 @@ def _package_range_of_stanza(
binary_stanzas: List[Tuple[Deb822ParagraphElement, TEPosition]],
) -> Iterable[Tuple[str, Optional[str], Range]]:
for stanza, stanza_position in binary_stanzas:
- kvpair = stanza.get_kvpair_element("Package", use_get=True)
+ kvpair = stanza.get_kvpair_element(("Package", 0), use_get=True)
if kvpair is None:
continue
representation_field_range = kvpair.range_in_parent().relative_to(
diff --git a/src/debputy/lsp/lsp_debian_control_reference_data.py b/src/debputy/lsp/lsp_debian_control_reference_data.py
index 2ec885b..1d24045 100644
--- a/src/debputy/lsp/lsp_debian_control_reference_data.py
+++ b/src/debputy/lsp/lsp_debian_control_reference_data.py
@@ -104,7 +104,7 @@ except ImportError:
if TYPE_CHECKING:
- from debputy.lsp.maint_prefs import EffectivePreference
+ from debputy.lsp.maint_prefs import EffectiveFormattingPreference
F = TypeVar("F", bound="Deb822KnownField")
@@ -519,7 +519,7 @@ def _dctrl_ma_field_validation(
stanza_position: "TEPosition",
lint_state: LintState,
) -> Iterable[Diagnostic]:
- ma_kvpair = stanza.get_kvpair_element("Multi-Arch", use_get=True)
+ ma_kvpair = stanza.get_kvpair_element(("Multi-Arch", 0), use_get=True)
arch = stanza.get("Architecture", "any")
if arch == "all" and ma_kvpair is not None:
ma_value, ma_value_range = extract_first_value_and_position(
@@ -1035,6 +1035,36 @@ def _dctrl_validate_dep(
)
+def _rrr_build_driver_mismatch(
+ _known_field: "F",
+ _deb822_file: Deb822FileElement,
+ _kvpair: Deb822KeyValuePairElement,
+ kvpair_range_te: "TERange",
+ _field_name_range: "TERange",
+ stanza: Deb822ParagraphElement,
+ _stanza_position: "TEPosition",
+ lint_state: LintState,
+) -> Iterable[Diagnostic]:
+ dr = stanza.get("Build-Driver", "debian-rules")
+ if dr != "debian-rules":
+ yield Diagnostic(
+ lint_state.position_codec.range_to_client_units(
+ lint_state.lines,
+ te_range_to_lsp(kvpair_range_te),
+ ),
+ f"The Rules-Requires-Root field is irrelevant for the `Build-Driver` `{dr}`.",
+ DiagnosticSeverity.Warning,
+ source="debputy",
+ data=DiagnosticData(
+ quickfixes=[
+ propose_remove_range_quick_fix(
+ proposed_title="Remove Rules-Requires-Root"
+ )
+ ]
+ ),
+ )
+
+
class Dep5Matcher(BasenameGlobMatch):
def __init__(self, basename_glob: str) -> None:
super().__init__(
@@ -1979,7 +2009,7 @@ class Deb822KnownField:
def reformat_field(
self,
- effective_preference: "EffectivePreference",
+ effective_preference: "EffectiveFormattingPreference",
stanza_range: TERange,
kvpair: Deb822KeyValuePairElement,
formatter: FormatterCallback,
@@ -2002,7 +2032,7 @@ class DctrlLikeKnownField(Deb822KnownField):
def reformat_field(
self,
- effective_preference: "EffectivePreference",
+ effective_preference: "EffectiveFormattingPreference",
stanza_range: TERange,
kvpair: Deb822KeyValuePairElement,
formatter: FormatterCallback,
@@ -2011,7 +2041,7 @@ class DctrlLikeKnownField(Deb822KnownField):
) -> Iterable[TextEdit]:
interpretation = self.field_value_class.interpreter()
if (
- not effective_preference.formatting_deb822_normalize_field_content
+ not effective_preference.deb822_normalize_field_content
or interpretation is None
):
yield from super(DctrlLikeKnownField, self).reformat_field(
@@ -2133,7 +2163,7 @@ class DctrlKnownField(DctrlLikeKnownField):
def reformat_field(
self,
- effective_preference: "EffectivePreference",
+ effective_preference: "EffectiveFormattingPreference",
stanza_range: TERange,
kvpair: Deb822KeyValuePairElement,
formatter: FormatterCallback,
@@ -2142,7 +2172,7 @@ class DctrlKnownField(DctrlLikeKnownField):
) -> Iterable[TextEdit]:
if (
self.name == "Architecture"
- and effective_preference.formatting_deb822_normalize_field_content
+ and effective_preference.deb822_normalize_field_content
):
interpretation = self.field_value_class.interpreter()
assert interpretation is not None
@@ -2317,6 +2347,47 @@ SOURCE_FIELDS = _fields(
),
),
DctrlKnownField(
+ "Build-Driver",
+ FieldValueClass.SINGLE_VALUE,
+ default_value="debian-rules",
+ known_values=allowed_values(
+ Keyword(
+ "debian-rules",
+ synopsis_doc="Build via `debian/rules`",
+ hover_text=textwrap.dedent(
+ """\
+ Use the `debian/rules` interface for building packages.
+
+ This is the historical default and the interface that Debian Packages have used for
+ decades to build debs.
+ """
+ ),
+ ),
+ Keyword(
+ "debputy",
+ synopsis_doc="Build with `debputy`",
+ hover_text=textwrap.dedent(
+ """\
+ Use the `debputy` interface for building the package.
+
+ This is provides the "full" integration mode with `debputy` where all parts of the
+ package build is handled by `debputy`.
+
+ This *may* make any `debhelper` build-dependency redundant depending on which build
+ system is used. Some build systems (such as `autoconf` still use `debhelper` based tools).
+ """
+ ),
+ ),
+ ),
+ synopsis_doc="Which implementation dpkg should use for the build",
+ hover_text=textwrap.dedent(
+ """\
+ The name of the build driver that dpkg (`dpkg-buildpackage`) will use for assembling the
+ package.
+ """
+ ),
+ ),
+ DctrlKnownField(
"Vcs-Browser",
FieldValueClass.SINGLE_VALUE,
synopsis_doc="URL for browsers to interact with packaging VCS",
@@ -2580,6 +2651,7 @@ SOURCE_FIELDS = _fields(
"Rules-Requires-Root",
FieldValueClass.SPACE_SEPARATED_LIST,
unknown_value_diagnostic_severity=None,
+ custom_field_check=_rrr_build_driver_mismatch,
known_values=allowed_values(
Keyword(
"no",
@@ -2642,6 +2714,9 @@ SOURCE_FIELDS = _fields(
` Build-Depends` on `dpkg-build-api (>= 1)` or later, the default is `no`. Otherwise,
the default is `binary-target`
+ This field is only relevant when when the `Build-Driver` is `debian-rules` (which it is by
+ default).
+
Note it is **not** possible to require running the package as "true root".
"""
),
@@ -3905,6 +3980,57 @@ _DEP5_HEADER_FIELDS = _fields(
"""
),
),
+ Deb822KnownField(
+ "Files-Excluded",
+ FieldValueClass.FREE_TEXT_FIELD,
+ hover_text=textwrap.dedent(
+ """\
+ Remove the listed files from the tarball when repacking (commonly via uscan). This can be useful when the
+ listed files are non-free but not necessary for the Debian package. In this case, the upstream version of
+ the package should generally end with `~dfsg` or `+dfsg` (to mark the content changed due to the
+ Debian Free Software Guidelines). The exclusion can also be useful to remove large files or directories
+ that are not used by Debian or pre-built binaries. In this case, `~ds` or `+ds` should be added to the
+ version instead of `~dfsg` or `+dfsg` for "Debian Source" to mark it as altered by Debian. If both reasons
+ are used, the `~dfsg` or `+dfsg` version is used as that is the more important reason for the repacking.
+
+ Example:
+ ```
+ Files-Excluded: exclude-this
+ exclude-dir
+ */exclude-dir
+ .*
+ */js/jquery.js
+ ```
+
+ The `Files-Included` field can be used to "re-include" files matched by `Files-Excluded`.
+
+ It is also possible to exclude files in specific "upstream components" for source packages with multiple
+ upstream tarballs. This is done by adding a field called `Files-Excluded-<component>`. The `<component>`
+ part should then match the component name exactly (case sensitive).
+
+ Defined by: mk-origtargz (usually used via uscan)
+ """
+ ),
+ ),
+ Deb822KnownField(
+ "Files-Included",
+ FieldValueClass.FREE_TEXT_FIELD,
+ hover_text=textwrap.dedent(
+ """\
+ Re-include files that were marked for exclusion by `Files-Excluded`. This can be useful for "exclude
+ everything except X" style semantics where `Files-Excluded` has a very broad pattern and
+ `Files-Included` then marks a few exceptions.
+
+ It is also possible to re-include files in specific "upstream components" for source packages with multiple
+ upstream tarballs. This is done by adding a field called `Files-Include-<component>` which is then used
+ in tandem with `Files-Exclude-<component>`. The `<component>` part should then match the component name
+ exactly (case sensitive).
+
+
+ Defined by: mk-origtargz (usually used via uscan)
+ """
+ ),
+ ),
)
_DEP5_FILES_FIELDS = _fields(
Deb822KnownField(
@@ -4664,7 +4790,7 @@ class StanzaMetadata(Mapping[str, F], Generic[F], ABC):
def reformat_stanza(
self,
- effective_preference: "EffectivePreference",
+ effective_preference: "EffectiveFormattingPreference",
stanza: Deb822ParagraphElement,
stanza_range: TERange,
formatter: FormatterCallback,
@@ -4672,7 +4798,7 @@ class StanzaMetadata(Mapping[str, F], Generic[F], ABC):
lines: List[str],
) -> Iterable[TextEdit]:
for known_field in self.stanza_fields.values():
- kvpair = stanza.get_kvpair_element(known_field.name, use_get=True)
+ kvpair = stanza.get_kvpair_element((known_field.name, 0), use_get=True)
if kvpair is None:
continue
yield from known_field.reformat_field(
@@ -4744,7 +4870,7 @@ class Deb822FileMetadata(Generic[S]):
def reformat(
self,
- effective_preference: "EffectivePreference",
+ effective_preference: "EffectiveFormattingPreference",
deb822_file: Deb822FileElement,
formatter: FormatterCallback,
_content: str,
@@ -4856,7 +4982,7 @@ class DctrlFileMetadata(Deb822FileMetadata[DctrlStanzaMetadata]):
def reformat(
self,
- effective_preference: "EffectivePreference",
+ effective_preference: "EffectiveFormattingPreference",
deb822_file: Deb822FileElement,
formatter: FormatterCallback,
content: str,
@@ -4875,7 +5001,7 @@ class DctrlFileMetadata(Deb822FileMetadata[DctrlStanzaMetadata]):
)
if (
- not effective_preference.formatting_deb822_normalize_stanza_order
+ not effective_preference.deb822_normalize_stanza_order
or deb822_file.find_first_error_element() is not None
):
return edits
diff --git a/src/debputy/lsp/lsp_debian_debputy_manifest.py b/src/debputy/lsp/lsp_debian_debputy_manifest.py
index a8a2fdf..8c7aeac 100644
--- a/src/debputy/lsp/lsp_debian_debputy_manifest.py
+++ b/src/debputy/lsp/lsp_debian_debputy_manifest.py
@@ -47,7 +47,7 @@ from debputy.lsprotocol.types import (
DiagnosticRelatedInformation,
Location,
)
-from debputy.manifest_parser.base_types import DebputyDispatchableType
+from debputy.manifest_parser.tagging_types import DebputyDispatchableType
from debputy.manifest_parser.declarative_parser import (
AttributeDescription,
ParserGenerator,
@@ -57,7 +57,6 @@ from debputy.manifest_parser.declarative_parser import DeclarativeMappingInputPa
from debputy.manifest_parser.util import AttributePath
from debputy.plugin.api.impl import plugin_metadata_for_debputys_own_plugin
from debputy.plugin.api.impl_types import (
- OPARSER_MANIFEST_ROOT,
DeclarativeInputParser,
DispatchingParserBase,
DebputyPluginMetadata,
@@ -65,6 +64,7 @@ from debputy.plugin.api.impl_types import (
InPackageContextParser,
DeclarativeValuelessKeywordInputParser,
)
+from debputy.plugin.api.parser_tables import OPARSER_MANIFEST_ROOT
from debputy.plugin.api.spec import DebputyIntegrationMode
from debputy.plugin.debputy.private_api import Capability, load_libcap
from debputy.util import _info, detect_possible_typo
@@ -742,6 +742,22 @@ def _insert_snippet(lines: List[str], server_position: Position) -> bool:
return True
+def _maybe_quote(v: str) -> str:
+ if v and v[0].isdigit():
+ try:
+ float(v)
+ return f"'{v}'"
+ except ValueError:
+ pass
+ return v
+
+
+def _complete_value(v: Any) -> str:
+ if isinstance(v, str):
+ return _maybe_quote(v)
+ return str(v)
+
+
@lsp_completer(_LANGUAGE_IDS)
def debputy_manifest_completer(
ls: "DebputyLanguageServer",
@@ -820,7 +836,9 @@ def debputy_manifest_completer(
if isinstance(parser, DispatchingParserBase):
if matched_key:
items = [
- CompletionItem(k if has_colon else f"{k}:")
+ CompletionItem(
+ _maybe_quote(k) if has_colon else f"{_maybe_quote(k)}:"
+ )
for k in parser.registered_keywords()
if k not in parent
and not isinstance(
@@ -830,7 +848,7 @@ def debputy_manifest_completer(
]
else:
items = [
- CompletionItem(k)
+ CompletionItem(_maybe_quote(k))
for k in parser.registered_keywords()
if k not in parent
and isinstance(
@@ -842,7 +860,9 @@ def debputy_manifest_completer(
binary_packages = ls.lint_state(doc).binary_packages
if binary_packages is not None:
items = [
- CompletionItem(p if has_colon else f"{p}:")
+ CompletionItem(
+ _maybe_quote(p) if has_colon else f"{_maybe_quote(p)}:"
+ )
for p in binary_packages
if p not in parent
]
@@ -858,7 +878,9 @@ def debputy_manifest_completer(
locked.add(attr_name)
break
items = [
- CompletionItem(k if has_colon else f"{k}:")
+ CompletionItem(
+ _maybe_quote(k) if has_colon else f"{_maybe_quote(k)}:"
+ )
for k in parser.manifest_attributes
if k not in locked
]
@@ -913,7 +935,7 @@ def _completion_from_attr(
_info(f"Already filled: {matched} is one of {valid_values}")
return None
if valid_values:
- return [CompletionItem(x) for x in valid_values]
+ return [CompletionItem(_complete_value(x)) for x in valid_values]
return None
diff --git a/src/debputy/lsp/lsp_generic_yaml.py b/src/debputy/lsp/lsp_generic_yaml.py
index 94267f7..5e67428 100644
--- a/src/debputy/lsp/lsp_generic_yaml.py
+++ b/src/debputy/lsp/lsp_generic_yaml.py
@@ -1,6 +1,6 @@
from typing import Union, Any, Optional, List, Tuple
-from debputy.manifest_parser.base_types import DebputyDispatchableType
+from debputy.manifest_parser.tagging_types import DebputyDispatchableType
from debputy.manifest_parser.declarative_parser import DeclarativeMappingInputParser
from debputy.manifest_parser.parser_doc import (
render_rule,
diff --git a/src/debputy/lsp/maint_prefs.py b/src/debputy/lsp/maint_prefs.py
index fa6315b..4cc70d5 100644
--- a/src/debputy/lsp/maint_prefs.py
+++ b/src/debputy/lsp/maint_prefs.py
@@ -32,37 +32,37 @@ PT = TypeVar("PT", bool, str, int)
BUILTIN_STYLES = os.path.join(os.path.dirname(__file__), "maint-preferences.yaml")
-_NORMALISE_FIELD_CONTENT_KEY = ["formatting", "deb822", "normalize-field-content"]
+_NORMALISE_FIELD_CONTENT_KEY = ["deb822", "normalize-field-content"]
_UPLOADER_SPLIT_RE = re.compile(r"(?<=>)\s*,")
_WAS_OPTIONS = {
- "-a": ("formatting_deb822_always_wrap", True),
- "--always-wrap": ("formatting_deb822_always_wrap", True),
- "-s": ("formatting_deb822_short_indent", True),
- "--short-indent": ("formatting_deb822_short_indent", True),
- "-t": ("formatting_deb822_trailing_separator", True),
- "--trailing-separator": ("formatting_deb822_trailing_separator", True),
+ "-a": ("deb822_always_wrap", True),
+ "--always-wrap": ("deb822_always_wrap", True),
+ "-s": ("deb822_short_indent", True),
+ "--short-indent": ("deb822_short_indent", True),
+ "-t": ("deb822_trailing_separator", True),
+ "--trailing-separator": ("deb822_trailing_separator", True),
# Noise option for us; we do not accept `--no-keep-first` though
"-k": (None, True),
"--keep-first": (None, True),
"--no-keep-first": ("DISABLE_NORMALIZE_STANZA_ORDER", True),
- "-b": ("formatting_deb822_normalize_stanza_order", True),
- "--sort-binary-packages": ("formatting_deb822_normalize_stanza_order", True),
+ "-b": ("deb822_normalize_stanza_order", True),
+ "--sort-binary-packages": ("deb822_normalize_stanza_order", True),
}
_WAS_DEFAULTS = {
- "formatting_deb822_always_wrap": False,
- "formatting_deb822_short_indent": False,
- "formatting_deb822_trailing_separator": False,
- "formatting_deb822_normalize_stanza_order": False,
- "formatting_deb822_normalize_field_content": True,
+ "deb822_always_wrap": False,
+ "deb822_short_indent": False,
+ "deb822_trailing_separator": False,
+ "deb822_normalize_stanza_order": False,
+ "deb822_normalize_field_content": True,
}
@dataclasses.dataclass(slots=True, frozen=True, kw_only=True)
class PreferenceOption(Generic[PT]):
key: Union[str, List[str]]
- expected_type: Type[PT]
+ expected_type: Union[Type[PT], Callable[[Any], Optional[str]]]
description: str
default_value: Optional[Union[PT, Callable[[CommentedMap], Optional[PT]]]] = None
@@ -88,11 +88,17 @@ class PreferenceOption(Generic[PT]):
if callable(default_value):
return default_value(data)
return default_value
- if isinstance(v, self.expected_type):
+ val_issue: Optional[str] = None
+ expected_type = self.expected_type
+ if not isinstance(expected_type, type) and callable(self.expected_type):
+ val_issue = self.expected_type(v)
+ elif not isinstance(v, self.expected_type):
+ val_issue = f"It should have been a {self.expected_type} but it was not"
+
+ if val_issue is None:
return v
raise ValueError(
- f'The value "{self.name}" for key {key} in file "{filename}" should have been a'
- f" {self.expected_type} but it was not"
+ f'The value "{self.name}" for key {key} in file "{filename}" was incorrect: {val_issue}'
)
@@ -108,7 +114,7 @@ def _false_when_formatting_content(m: CommentedMap) -> Optional[bool]:
return m.mlget(_NORMALISE_FIELD_CONTENT_KEY, list_ok=True, default=False) is True
-OPTIONS: List[PreferenceOption] = [
+MAINT_OPTIONS: List[PreferenceOption] = [
PreferenceOption(
key="canonical-name",
expected_type=str,
@@ -139,7 +145,25 @@ OPTIONS: List[PreferenceOption] = [
),
),
PreferenceOption(
- key=["formatting", "deb822", "short-indent"],
+ key="formatting",
+ expected_type=lambda x: (
+ None
+ if isinstance(x, EffectiveFormattingPreference)
+ else "It should have been a EffectiveFormattingPreference but it was not"
+ ),
+ default_value=None,
+ description=textwrap.dedent(
+ """\
+ The formatting preference of the maintainer. Can either be a string for a named style or an inline
+ style.
+ """
+ ),
+ ),
+]
+
+FORMATTING_OPTIONS = [
+ PreferenceOption(
+ key=["deb822", "short-indent"],
expected_type=bool,
description=textwrap.dedent(
"""\
@@ -175,7 +199,7 @@ OPTIONS: List[PreferenceOption] = [
),
),
PreferenceOption(
- key=["formatting", "deb822", "always-wrap"],
+ key=["deb822", "always-wrap"],
expected_type=bool,
description=textwrap.dedent(
"""\
@@ -210,7 +234,7 @@ OPTIONS: List[PreferenceOption] = [
),
),
PreferenceOption(
- key=["formatting", "deb822", "trailing-separator"],
+ key=["deb822", "trailing-separator"],
expected_type=bool,
default_value=False,
description=textwrap.dedent(
@@ -241,7 +265,7 @@ OPTIONS: List[PreferenceOption] = [
),
),
PreferenceOption(
- key=["formatting", "deb822", "max-line-length"],
+ key=["deb822", "max-line-length"],
expected_type=int,
default_value=79,
description=textwrap.dedent(
@@ -297,7 +321,7 @@ OPTIONS: List[PreferenceOption] = [
),
),
PreferenceOption(
- key=["formatting", "deb822", "normalize-field-order"],
+ key=["deb822", "normalize-field-order"],
expected_type=bool,
default_value=False,
description=textwrap.dedent(
@@ -332,7 +356,7 @@ OPTIONS: List[PreferenceOption] = [
),
),
PreferenceOption(
- key=["formatting", "deb822", "normalize-stanza-order"],
+ key=["deb822", "normalize-stanza-order"],
expected_type=bool,
default_value=False,
description=textwrap.dedent(
@@ -385,43 +409,43 @@ OPTIONS: List[PreferenceOption] = [
@dataclasses.dataclass(slots=True, frozen=True)
-class EffectivePreference:
- formatting_deb822_short_indent: Optional[bool] = None
- formatting_deb822_always_wrap: Optional[bool] = None
- formatting_deb822_trailing_separator: bool = False
- formatting_deb822_normalize_field_content: bool = False
- formatting_deb822_normalize_field_order: bool = False
- formatting_deb822_normalize_stanza_order: bool = False
- formatting_deb822_max_line_length: int = 79
+class EffectiveFormattingPreference:
+ deb822_short_indent: Optional[bool] = None
+ deb822_always_wrap: Optional[bool] = None
+ deb822_trailing_separator: bool = False
+ deb822_normalize_field_content: bool = False
+ deb822_normalize_field_order: bool = False
+ deb822_normalize_stanza_order: bool = False
+ deb822_max_line_length: int = 79
@classmethod
def from_file(
cls,
filename: str,
key: str,
- stylees: CommentedMap,
+ styles: CommentedMap,
) -> Self:
attr = {}
- for option in OPTIONS:
+ for option in FORMATTING_OPTIONS:
if not hasattr(cls, option.attribute_name):
continue
- value = option.extract_value(filename, key, stylees)
+ value = option.extract_value(filename, key, styles)
attr[option.attribute_name] = value
return cls(**attr) # type: ignore
@classmethod
def aligned_preference(
cls,
- a: Optional["EffectivePreference"],
- b: Optional["EffectivePreference"],
- ) -> Optional["EffectivePreference"]:
+ a: Optional["EffectiveFormattingPreference"],
+ b: Optional["EffectiveFormattingPreference"],
+ ) -> Optional["EffectiveFormattingPreference"]:
if a is None or b is None:
return None
- for option in OPTIONS:
+ for option in MAINT_OPTIONS:
attr_name = option.attribute_name
- if not hasattr(EffectivePreference, attr_name):
+ if not hasattr(EffectiveFormattingPreference, attr_name):
continue
a_value = getattr(a, attr_name)
b_value = getattr(b, attr_name)
@@ -430,14 +454,12 @@ class EffectivePreference:
return a
def deb822_formatter(self) -> FormatterCallback:
- line_length = self.formatting_deb822_max_line_length
+ line_length = self.deb822_max_line_length
return wrap_and_sort_formatter(
- 1 if self.formatting_deb822_short_indent else "FIELD_NAME_LENGTH",
- trailing_separator=self.formatting_deb822_trailing_separator,
- immediate_empty_line=self.formatting_deb822_short_indent or False,
- max_line_length_one_liner=(
- 0 if self.formatting_deb822_always_wrap else line_length
- ),
+ 1 if self.deb822_short_indent else "FIELD_NAME_LENGTH",
+ trailing_separator=self.deb822_trailing_separator,
+ immediate_empty_line=self.deb822_short_indent or False,
+ max_line_length_one_liner=(0 if self.deb822_always_wrap else line_length),
)
def replace(self, /, **changes: Any) -> Self:
@@ -445,24 +467,33 @@ class EffectivePreference:
@dataclasses.dataclass(slots=True, frozen=True)
-class MaintainerPreference(EffectivePreference):
+class MaintainerPreference:
canonical_name: Optional[str] = None
is_packaging_team: bool = False
+ formatting: Optional[EffectiveFormattingPreference] = None
- def as_effective_pref(self) -> EffectivePreference:
- fields = {
- k: v
- for k, v in dataclasses.asdict(self).items()
- if hasattr(EffectivePreference, k)
- }
- return EffectivePreference(**fields)
+ @classmethod
+ def from_file(
+ cls,
+ filename: str,
+ key: str,
+ styles: CommentedMap,
+ ) -> Self:
+ attr = {}
+
+ for option in MAINT_OPTIONS:
+ if not hasattr(cls, option.attribute_name):
+ continue
+ value = option.extract_value(filename, key, styles)
+ attr[option.attribute_name] = value
+ return cls(**attr) # type: ignore
class MaintainerPreferenceTable:
def __init__(
self,
- named_styles: Mapping[str, EffectivePreference],
+ named_styles: Mapping[str, EffectiveFormattingPreference],
maintainer_preferences: Mapping[str, MaintainerPreference],
) -> None:
self._named_styles = named_styles
@@ -470,7 +501,7 @@ class MaintainerPreferenceTable:
@classmethod
def load_preferences(cls) -> Self:
- named_styles: Dict[str, EffectivePreference] = {}
+ named_styles: Dict[str, EffectiveFormattingPreference] = {}
maintainer_preferences: Dict[str, MaintainerPreference] = {}
with open(BUILTIN_STYLES) as fd:
parse_file(named_styles, maintainer_preferences, BUILTIN_STYLES, fd)
@@ -488,7 +519,7 @@ class MaintainerPreferenceTable:
return cls(named_styles, maintainer_preferences)
@property
- def named_styles(self) -> Mapping[str, EffectivePreference]:
+ def named_styles(self) -> Mapping[str, EffectiveFormattingPreference]:
return self._named_styles
@property
@@ -497,7 +528,7 @@ class MaintainerPreferenceTable:
def parse_file(
- named_styles: Dict[str, EffectivePreference],
+ named_styles: Dict[str, EffectiveFormattingPreference],
maintainer_preferences: Dict[str, MaintainerPreference],
filename: str,
fd,
@@ -520,38 +551,45 @@ def parse_file(
named_styles_raw = {}
for style_name, content in named_styles_raw.items():
- wrapped_style = CommentedMap({"formatting": content})
- style = EffectivePreference.from_file(
+ style = EffectiveFormattingPreference.from_file(
filename,
style_name,
- wrapped_style,
+ content,
)
named_styles[style_name] = style
- for maintainer_email, maintainer_styles in maintainer_rules.items():
- if not isinstance(maintainer_styles, CommentedMap):
+ for maintainer_email, maintainer_pref in maintainer_rules.items():
+ if not isinstance(maintainer_pref, CommentedMap):
line_no = maintainer_rules.lc.key(maintainer_email).line
raise ValueError(
f'The value for maintainer "{maintainer_email}" should have been a mapping,'
f' but it is not. The problem entry is at line {line_no} in "{filename}"'
)
- formatting = maintainer_styles.get("formatting")
+ formatting = maintainer_pref.get("formatting")
if isinstance(formatting, str):
try:
- style = named_styles_raw[formatting]
+ style = named_styles[formatting]
except KeyError:
line_no = maintainer_rules.lc.key(maintainer_email).line
raise ValueError(
f'The maintainer "{maintainer_email}" requested the named style "{formatting}",'
f' but said style was not defined {filename}. The problem entry is at line {line_no} in "{filename}"'
) from None
- maintainer_styles["formatting"] = style
- maintainer_preferences[maintainer_email] = MaintainerPreference.from_file(
+ maintainer_pref["formatting"] = style
+ elif formatting is not None:
+ maintainer_pref["formatting"] = EffectiveFormattingPreference.from_file(
+ filename,
+ "formatting",
+ formatting,
+ )
+ mp = MaintainerPreference.from_file(
filename,
maintainer_email,
- maintainer_styles,
+ maintainer_pref,
)
+ maintainer_preferences[maintainer_email] = mp
+
@functools.lru_cache(64)
def extract_maint_email(maint: str) -> str:
@@ -569,7 +607,7 @@ def determine_effective_preference(
maint_preference_table: MaintainerPreferenceTable,
source_package: Optional[SourcePackage],
salsa_ci: Optional[CommentedMap],
-) -> Tuple[Optional[EffectivePreference], Optional[str], Optional[str]]:
+) -> Tuple[Optional[EffectiveFormattingPreference], Optional[str], Optional[str]]:
style = source_package.fields.get("X-Style") if source_package is not None else None
if style is not None:
if style not in ALL_PUBLIC_NAMED_STYLES:
@@ -612,7 +650,6 @@ def determine_effective_preference(
else:
msg = None
return detected_style, tool_w_args, msg
-
if source_package is None:
return None, None, None
@@ -620,47 +657,44 @@ def determine_effective_preference(
if maint is None:
return None, None, None
maint_email = extract_maint_email(maint)
- maint_style = maint_preference_table.maintainer_preferences.get(maint_email)
+ maint_pref = maint_preference_table.maintainer_preferences.get(maint_email)
# Special-case "@packages.debian.org" when missing, since they are likely to be "ad-hoc"
# teams that will not be registered. In that case, we fall back to looking at the uploader
# preferences as-if the maintainer had not been listed at all.
- if maint_style is None and not maint_email.endswith("@packages.debian.org"):
+ if maint_pref is None and not maint_email.endswith("@packages.debian.org"):
return None, None, None
- if maint_style is not None and maint_style.is_packaging_team:
+ if maint_pref is not None and maint_pref.is_packaging_team:
# When the maintainer is registered as a packaging team, then we assume the packaging
# team's style applies unconditionally.
- effective = maint_style.as_effective_pref()
+ effective = maint_pref.formatting
tool_w_args = _guess_tool_from_style(maint_preference_table, effective)
return effective, tool_w_args, None
uploaders = source_package.fields.get("Uploaders")
if uploaders is None:
- detected_style = (
- maint_style.as_effective_pref() if maint_style is not None else None
- )
+ detected_style = maint_pref.formatting if maint_pref is not None else None
tool_w_args = _guess_tool_from_style(maint_preference_table, detected_style)
return detected_style, tool_w_args, None
- all_styles: List[Optional[EffectivePreference]] = []
- if maint_style is not None:
- all_styles.append(maint_style)
+ all_styles: List[Optional[EffectiveFormattingPreference]] = []
+ if maint_pref is not None:
+ all_styles.append(maint_pref.formatting)
for uploader in _UPLOADER_SPLIT_RE.split(uploaders):
uploader_email = extract_maint_email(uploader)
- uploader_style = maint_preference_table.maintainer_preferences.get(
+ uploader_pref = maint_preference_table.maintainer_preferences.get(
uploader_email
)
- all_styles.append(uploader_style)
+ all_styles.append(uploader_pref.formatting if uploader_pref else None)
if not all_styles:
return None, None, None
- r = functools.reduce(EffectivePreference.aligned_preference, all_styles)
- if isinstance(r, MaintainerPreference):
- r = r.as_effective_pref()
+ r = functools.reduce(EffectiveFormattingPreference.aligned_preference, all_styles)
+ assert not isinstance(r, MaintainerPreference)
tool_w_args = _guess_tool_from_style(maint_preference_table, r)
return r, tool_w_args, None
def _guess_tool_from_style(
maint_preference_table: MaintainerPreferenceTable,
- pref: Optional[EffectivePreference],
+ pref: Optional[EffectiveFormattingPreference],
) -> Optional[str]:
if pref is None:
return None
@@ -682,7 +716,9 @@ def _split_options(args: Iterable[str]) -> Iterable[str]:
@functools.lru_cache
-def parse_salsa_ci_wrap_and_sort_args(args: str) -> Optional[EffectivePreference]:
+def parse_salsa_ci_wrap_and_sort_args(
+ args: str,
+) -> Optional[EffectiveFormattingPreference]:
options = dict(_WAS_DEFAULTS)
for arg in _split_options(args.split()):
v = _WAS_OPTIONS.get(arg)
@@ -694,6 +730,6 @@ def parse_salsa_ci_wrap_and_sort_args(args: str) -> Optional[EffectivePreference
options[varname] = value
if "DISABLE_NORMALIZE_STANZA_ORDER" in options:
del options["DISABLE_NORMALIZE_STANZA_ORDER"]
- options["formatting_deb822_normalize_stanza_order"] = False
+ options["deb822_normalize_stanza_order"] = False
- return EffectivePreference(**options) # type: ignore
+ return EffectiveFormattingPreference(**options) # type: ignore
diff --git a/src/debputy/maintscript_snippet.py b/src/debputy/maintscript_snippet.py
index ca81ca5..58a6bba 100644
--- a/src/debputy/maintscript_snippet.py
+++ b/src/debputy/maintscript_snippet.py
@@ -1,7 +1,7 @@
import dataclasses
from typing import Sequence, Optional, List, Literal, Iterable, Dict, Self
-from debputy.manifest_parser.base_types import DebputyDispatchableType
+from debputy.manifest_parser.tagging_types import DebputyDispatchableType
from debputy.manifest_parser.util import AttributePath
STD_CONTROL_SCRIPTS = frozenset(
diff --git a/src/debputy/manifest_conditions.py b/src/debputy/manifest_conditions.py
index 0f5c298..3e97b00 100644
--- a/src/debputy/manifest_conditions.py
+++ b/src/debputy/manifest_conditions.py
@@ -1,12 +1,12 @@
import dataclasses
from enum import Enum
-from typing import List, Callable, Optional, Sequence
+from typing import List, Callable, Optional, Sequence, Any, Self, Mapping
from debian.debian_support import DpkgArchTable
from debputy._deb_options_profiles import DebBuildOptionsAndProfiles
from debputy.architecture_support import DpkgArchitectureBuildProcessValuesTable
-from debputy.manifest_parser.base_types import DebputyDispatchableType
+from debputy.manifest_parser.tagging_types import DebputyDispatchableType
from debputy.packages import BinaryPackage
from debputy.substitution import Substitution
from debputy.util import active_profiles_match
@@ -15,11 +15,14 @@ from debputy.util import active_profiles_match
@dataclasses.dataclass(slots=True, frozen=True)
class ConditionContext:
binary_package: Optional[BinaryPackage]
- build_env: DebBuildOptionsAndProfiles
+ deb_options_and_profiles: DebBuildOptionsAndProfiles
substitution: Substitution
dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable
dpkg_arch_query_table: DpkgArchTable
+ def replace(self, /, **changes: Any) -> "Self":
+ return dataclasses.replace(self, **changes)
+
class ManifestCondition(DebputyDispatchableType):
__slots__ = ()
@@ -72,6 +75,7 @@ class NegatedManifestCondition(ManifestCondition):
__slots__ = ("_condition",)
def __init__(self, condition: ManifestCondition) -> None:
+ super().__init__()
self._condition = condition
def negated(self) -> "ManifestCondition":
@@ -107,6 +111,7 @@ class ManifestConditionGroup(ManifestCondition):
match_type: _ConditionGroupMatchType,
conditions: Sequence[ManifestCondition],
) -> None:
+ super().__init__()
self.match_type = match_type
self._conditions = conditions
@@ -132,6 +137,7 @@ class ArchMatchManifestConditionBase(ManifestCondition):
__slots__ = ("_arch_spec", "_is_negated")
def __init__(self, arch_spec: List[str], *, is_negated: bool = False) -> None:
+ super().__init__()
self._arch_spec = arch_spec
self._is_negated = is_negated
@@ -177,6 +183,7 @@ class BuildProfileMatch(ManifestCondition):
__slots__ = ("_profile_spec", "_is_negated")
def __init__(self, profile_spec: str, *, is_negated: bool = False) -> None:
+ super().__init__()
self._profile_spec = profile_spec
self._is_negated = is_negated
@@ -190,7 +197,7 @@ class BuildProfileMatch(ManifestCondition):
def evaluate(self, context: ConditionContext) -> bool:
match = active_profiles_match(
- self._profile_spec, context.build_env.deb_build_profiles
+ self._profile_spec, context.deb_options_and_profiles.deb_build_profiles
)
return not match if self._is_negated else match
@@ -211,7 +218,14 @@ def _can_run_built_binaries(context: ConditionContext) -> bool:
if not context.dpkg_architecture_variables.is_cross_compiling:
return True
# User / Builder asserted that we could even though we are cross-compiling, so we have to assume it is true
- return "crossbuildcanrunhostbinaries" in context.build_env.deb_build_options
+ return (
+ "crossbuildcanrunhostbinaries"
+ in context.deb_options_and_profiles.deb_build_options
+ )
+
+
+def _run_build_time_tests(deb_build_options: Mapping[str, Optional[str]]) -> bool:
+ return "nocheck" not in deb_build_options
_IS_CROSS_BUILDING = _SingletonCondition(
@@ -226,12 +240,12 @@ _CAN_EXECUTE_COMPILED_BINARIES = _SingletonCondition(
_RUN_BUILD_TIME_TESTS = _SingletonCondition(
"Run build time tests",
- lambda c: "nocheck" not in c.build_env.deb_build_options,
+ lambda c: _run_build_time_tests(c.deb_options_and_profiles.deb_build_options),
)
_BUILD_DOCS_BDO = _SingletonCondition(
"Build docs (nodocs not in DEB_BUILD_OPTIONS)",
- lambda c: "nodocs" not in c.build_env.deb_build_options,
+ lambda c: "nodocs" not in c.deb_options_and_profiles.deb_build_options,
)
diff --git a/src/debputy/manifest_parser/base_types.py b/src/debputy/manifest_parser/base_types.py
index 865e320..106c30e 100644
--- a/src/debputy/manifest_parser/base_types.py
+++ b/src/debputy/manifest_parser/base_types.py
@@ -1,9 +1,8 @@
import dataclasses
import os
+import subprocess
from functools import lru_cache
from typing import (
- TypedDict,
- NotRequired,
Sequence,
Optional,
Union,
@@ -12,12 +11,14 @@ from typing import (
Mapping,
Iterable,
TYPE_CHECKING,
- Callable,
- Type,
- Generic,
+ Dict,
+ MutableMapping,
+ NotRequired,
)
+from debputy.manifest_conditions import ManifestCondition
from debputy.manifest_parser.exceptions import ManifestParseException
+from debputy.manifest_parser.tagging_types import DebputyParsedContent
from debputy.manifest_parser.util import (
AttributePath,
_SymbolicModeSegment,
@@ -25,37 +26,20 @@ from debputy.manifest_parser.util import (
)
from debputy.path_matcher import MatchRule, ExactFileSystemPath
from debputy.substitution import Substitution
-from debputy.types import S
-from debputy.util import _normalize_path, T
+from debputy.util import _normalize_path, _error, _warn, _debug_log
if TYPE_CHECKING:
- from debputy.manifest_conditions import ManifestCondition
from debputy.manifest_parser.parser_data import ParserContextData
-class DebputyParsedContent(TypedDict):
- pass
-
-
-class DebputyDispatchableType:
- __slots__ = ()
-
-
-class DebputyParsedContentStandardConditional(DebputyParsedContent):
- when: NotRequired["ManifestCondition"]
-
-
@dataclasses.dataclass(slots=True, frozen=True)
class OwnershipDefinition:
entity_name: str
entity_id: int
-@dataclasses.dataclass
-class TypeMapping(Generic[S, T]):
- target_type: Type[T]
- source_type: Type[S]
- mapper: Callable[[S, AttributePath, Optional["ParserContextData"]], T]
+class DebputyParsedContentStandardConditional(DebputyParsedContent):
+ when: NotRequired[ManifestCondition]
ROOT_DEFINITION = OwnershipDefinition("root", 0)
@@ -438,3 +422,76 @@ class FileSystemExactMatchRule(FileSystemMatchRule):
class FileSystemExactNonDirMatchRule(FileSystemExactMatchRule):
pass
+
+
+class BuildEnvironmentDefinition:
+
+ def dpkg_buildflags_env(
+ self,
+ env: Mapping[str, str],
+ definition_source: Optional[str],
+ ) -> Dict[str, str]:
+ dpkg_env = {}
+ try:
+ bf_output = subprocess.check_output(["dpkg-buildflags"], env=env)
+ except FileNotFoundError:
+ if definition_source is None:
+ _error(
+ "The dpkg-buildflags command was not available and is necessary to set the relevant"
+ "env variables by default."
+ )
+ _error(
+ "The dpkg-buildflags command was not available and is necessary to set the relevant"
+ f"env variables for the environment defined at {definition_source}."
+ )
+ except subprocess.CalledProcessError as e:
+ if definition_source is None:
+ _error(
+ f"The dpkg-buildflags command failed with exit code {e.returncode}. Please review the output from"
+ f" dpkg-buildflags above to resolve the issue."
+ )
+ _error(
+ f"The dpkg-buildflags command failed with exit code {e.returncode}. Please review the output from"
+ f" dpkg-buildflags above to resolve the issue. The environment definition that triggered this call"
+ f" was {definition_source}"
+ )
+ else:
+ warned = False
+ for line in bf_output.decode("utf-8").splitlines(keepends=False):
+ if "=" not in line or line.startswith("="):
+ if not warned:
+ _warn(
+ f"Unexpected output from dpkg-buildflags (not a K=V line): {line}"
+ )
+ continue
+ k, v = line.split("=", 1)
+ if k.strip() != k:
+ if not warned:
+ _warn(
+ f'Unexpected output from dpkg-buildflags (Key had spaces): "{line}"'
+ )
+ continue
+ dpkg_env[k] = v
+
+ return dpkg_env
+
+ def log_computed_env(self, source: str, computed_env: Mapping[str, str]) -> None:
+ _debug_log(f"Computed environment variables from {source}")
+ for k, v in computed_env.items():
+ _debug_log(f" {k}={v}")
+
+ def update_env(self, env: MutableMapping[str, str]) -> None:
+ dpkg_env = self.dpkg_buildflags_env(env, None)
+ self.log_computed_env("dpkg-buildflags", dpkg_env)
+ env.update(dpkg_env)
+
+
+class BuildEnvironments:
+
+ def __init__(
+ self,
+ environments: Dict[str, BuildEnvironmentDefinition],
+ default_environment: Optional[BuildEnvironmentDefinition],
+ ) -> None:
+ self.environments = environments
+ self.default_environment = default_environment
diff --git a/src/debputy/manifest_parser/declarative_parser.py b/src/debputy/manifest_parser/declarative_parser.py
index 6cbbce3..2c350a0 100644
--- a/src/debputy/manifest_parser/declarative_parser.py
+++ b/src/debputy/manifest_parser/declarative_parser.py
@@ -1,5 +1,6 @@
import collections
import dataclasses
+import typing
from typing import (
Any,
Callable,
@@ -16,7 +17,6 @@ from typing import (
Mapping,
Optional,
cast,
- is_typeddict,
Type,
Union,
List,
@@ -28,13 +28,7 @@ from typing import (
Container,
)
-from debputy.manifest_parser.base_types import (
- DebputyParsedContent,
- FileSystemMatchRule,
- FileSystemExactMatchRule,
- DebputyDispatchableType,
- TypeMapping,
-)
+from debputy.manifest_parser.base_types import FileSystemMatchRule
from debputy.manifest_parser.exceptions import (
ManifestParseException,
)
@@ -43,7 +37,20 @@ from debputy.manifest_parser.mapper_code import (
wrap_into_list,
map_each_element,
)
+from debputy.manifest_parser.parse_hints import (
+ ConditionalRequired,
+ DebputyParseHint,
+ TargetAttribute,
+ ManifestAttribute,
+ ConflictWithSourceAttribute,
+ NotPathHint,
+)
from debputy.manifest_parser.parser_data import ParserContextData
+from debputy.manifest_parser.tagging_types import (
+ DebputyParsedContent,
+ DebputyDispatchableType,
+ TypeMapping,
+)
from debputy.manifest_parser.util import (
AttributePath,
unpack_type,
@@ -53,8 +60,6 @@ from debputy.manifest_parser.util import (
from debputy.plugin.api.impl_types import (
DeclarativeInputParser,
TD,
- _ALL_PACKAGE_TYPES,
- resolve_package_type_selectors,
ListWrappedDeclarativeInputParser,
DispatchingObjectParser,
DispatchingTableParser,
@@ -64,8 +69,11 @@ from debputy.plugin.api.impl_types import (
)
from debputy.plugin.api.spec import (
ParserDocumentation,
- PackageTypeSelector,
DebputyIntegrationMode,
+ StandardParserAttributeDocumentation,
+ undocumented_attr,
+ ParserAttributeDocumentation,
+ reference_documentation,
)
from debputy.util import _info, _warn, assume_not_none
@@ -478,242 +486,6 @@ class DeclarativeMappingInputParser(DeclarativeInputParser[TD], Generic[TD, SF])
return self._per_attribute_conflicts_cache
-class DebputyParseHint:
- @classmethod
- def target_attribute(cls, target_attribute: str) -> "DebputyParseHint":
- """Define this source attribute to have a different target attribute name
-
- As an example:
-
- >>> class SourceType(TypedDict):
- ... source: Annotated[NotRequired[str], DebputyParseHint.target_attribute("sources")]
- ... sources: NotRequired[List[str]]
- >>> class TargetType(TypedDict):
- ... sources: List[str]
- >>> pg = ParserGenerator()
- >>> parser = pg.generate_parser(TargetType, source_content=SourceType)
-
- In this example, the user can provide either `source` or `sources` and the parser will
- map them to the `sources` attribute in the `TargetType`. Note this example relies on
- the builtin mapping of `str` to `List[str]` to align the types between `source` (from
- SourceType) and `sources` (from TargetType).
-
- The following rules apply:
-
- * All source attributes that map to the same target attribute will be mutually exclusive
- (that is, the user cannot give `source` *and* `sources` as input).
- * When the target attribute is required, the source attributes are conditionally
- mandatory requiring the user to provide exactly one of them.
- * When multiple source attributes point to a single target attribute, none of the source
- attributes can be Required.
- * The annotation can only be used for the source type specification and the source type
- specification must be different from the target type specification.
-
- The `target_attribute` annotation can be used without having multiple source attributes. This
- can be useful if the source attribute name is not valid as a python variable identifier to
- rename it to a valid python identifier.
-
- :param target_attribute: The attribute name in the target content
- :return: The annotation.
- """
- return TargetAttribute(target_attribute)
-
- @classmethod
- def conflicts_with_source_attributes(
- cls,
- *conflicting_source_attributes: str,
- ) -> "DebputyParseHint":
- """Declare a conflict with one or more source attributes
-
- Example:
-
- >>> class SourceType(TypedDict):
- ... source: Annotated[NotRequired[str], DebputyParseHint.target_attribute("sources")]
- ... sources: NotRequired[List[str]]
- ... into_dir: NotRequired[str]
- ... renamed_to: Annotated[
- ... NotRequired[str],
- ... DebputyParseHint.conflicts_with_source_attributes("sources", "into_dir")
- ... ]
- >>> class TargetType(TypedDict):
- ... sources: List[str]
- ... into_dir: NotRequired[str]
- ... renamed_to: NotRequired[str]
- >>> pg = ParserGenerator()
- >>> parser = pg.generate_parser(TargetType, source_content=SourceType)
-
- In this example, if the user was to provide `renamed_to` with `sources` or `into_dir` the parser would report
- an error. However, the parser will allow `renamed_to` with `source` as the conflict is considered only for
- the input source. That is, it is irrelevant that `sources` and `source´ happens to "map" to the same target
- attribute.
-
- The following rules apply:
- * It is not possible for a target attribute to declare conflicts unless the target type spec is reused as
- source type spec.
- * All attributes involved in a conflict must be NotRequired. If any of the attributes are Required, then
- the parser generator will reject the input.
- * All attributes listed in the conflict must be valid attributes in the source type spec.
-
- Note you do not have to specify conflicts between two attributes with the same target attribute name. The
- `target_attribute` annotation will handle that for you.
-
- :param conflicting_source_attributes: All source attributes that cannot be used with this attribute.
- :return: The annotation.
- """
- if len(conflicting_source_attributes) < 1:
- raise ValueError(
- "DebputyParseHint.conflicts_with_source_attributes requires at least one attribute as input"
- )
- return ConflictWithSourceAttribute(frozenset(conflicting_source_attributes))
-
- @classmethod
- def required_when_single_binary(
- cls,
- *,
- package_type: PackageTypeSelector = _ALL_PACKAGE_TYPES,
- ) -> "DebputyParseHint":
- """Declare a source attribute as required when the source package produces exactly one binary package
-
- The attribute in question must always be declared as `NotRequired` in the TypedDict and this condition
- can only be used for source attributes.
- """
- resolved_package_types = resolve_package_type_selectors(package_type)
- reason = "The field is required for source packages producing exactly one binary package"
- if resolved_package_types != _ALL_PACKAGE_TYPES:
- types = ", ".join(sorted(resolved_package_types))
- reason += f" of type {types}"
- return ConditionalRequired(
- reason,
- lambda c: len(
- [
- p
- for p in c.binary_packages.values()
- if p.package_type in package_type
- ]
- )
- == 1,
- )
- return ConditionalRequired(
- reason,
- lambda c: c.is_single_binary_package,
- )
-
- @classmethod
- def required_when_multi_binary(
- cls,
- *,
- package_type: PackageTypeSelector = _ALL_PACKAGE_TYPES,
- ) -> "DebputyParseHint":
- """Declare a source attribute as required when the source package produces two or more binary package
-
- The attribute in question must always be declared as `NotRequired` in the TypedDict and this condition
- can only be used for source attributes.
- """
- resolved_package_types = resolve_package_type_selectors(package_type)
- reason = "The field is required for source packages producing two or more binary packages"
- if resolved_package_types != _ALL_PACKAGE_TYPES:
- types = ", ".join(sorted(resolved_package_types))
- reason = (
- "The field is required for source packages producing not producing exactly one binary packages"
- f" of type {types}"
- )
- return ConditionalRequired(
- reason,
- lambda c: len(
- [
- p
- for p in c.binary_packages.values()
- if p.package_type in package_type
- ]
- )
- != 1,
- )
- return ConditionalRequired(
- reason,
- lambda c: not c.is_single_binary_package,
- )
-
- @classmethod
- def manifest_attribute(cls, attribute: str) -> "DebputyParseHint":
- """Declare what the attribute name (as written in the manifest) should be
-
- By default, debputy will do an attribute normalizing that will take valid python identifiers such
- as `dest_dir` and remap it to the manifest variant (such as `dest-dir`) automatically. If you have
- a special case, where this built-in normalization is insufficient or the python name is considerably
- different from what the user would write in the manifest, you can use this parse hint to set the
- name that the user would have to write in the manifest for this attribute.
-
- >>> class SourceType(TypedDict):
- ... source: List[FileSystemMatchRule]
- ... # Use "as" in the manifest because "as_" was not pretty enough
- ... install_as: Annotated[NotRequired[FileSystemExactMatchRule], DebputyParseHint.manifest_attribute("as")]
-
- In this example, we use the parse hint to use "as" as the name in the manifest, because we cannot
- use "as" a valid python identifier (it is a keyword). While debputy would map `as_` to `as` for us,
- we have chosen to use `install_as` as a python identifier.
- """
- return ManifestAttribute(attribute)
-
- @classmethod
- def not_path_error_hint(cls) -> "DebputyParseHint":
- """Mark this attribute as not a "path hint" when it comes to reporting errors
-
- By default, `debputy` will pick up attributes that uses path names (FileSystemMatchRule) as
- candidates for parse error hints (the little "<Search for: VALUE>" in error messages).
-
- Most rules only have one active path-based attribute and paths tends to be unique enough
- that it helps people spot the issue faster. However, in rare cases, you can have multiple
- attributes that fit the bill. In this case, this hint can be used to "hide" the suboptimal
- choice. As an example:
-
- >>> class SourceType(TypedDict):
- ... source: List[FileSystemMatchRule]
- ... install_as: Annotated[NotRequired[FileSystemExactMatchRule], DebputyParseHint.not_path_error_hint()]
-
- In this case, without the hint, `debputy` might pick up `install_as` as the attribute to
- use as hint for error reporting. However, here we have decided that we never want `install_as`
- leaving `source` as the only option.
-
- Generally, this type hint must be placed on the **source** format. Any source attribute matching
- the parsed format will be ignored.
-
- Mind the asymmetry: The annotation is placed in the **source** format while `debputy` looks at
- the type of the target attribute to determine if it counts as path.
- """
- return NOT_PATH_HINT
-
-
-@dataclasses.dataclass(frozen=True, slots=True)
-class TargetAttribute(DebputyParseHint):
- attribute: str
-
-
-@dataclasses.dataclass(frozen=True, slots=True)
-class ConflictWithSourceAttribute(DebputyParseHint):
- conflicting_attributes: FrozenSet[str]
-
-
-@dataclasses.dataclass(frozen=True, slots=True)
-class ConditionalRequired(DebputyParseHint):
- reason: str
- condition: Callable[["ParserContextData"], bool]
-
- def condition_applies(self, context: "ParserContextData") -> bool:
- return self.condition(context)
-
-
-@dataclasses.dataclass(frozen=True, slots=True)
-class ManifestAttribute(DebputyParseHint):
- attribute: str
-
-
-class NotPathHint(DebputyParseHint):
- pass
-
-
-NOT_PATH_HINT = NotPathHint()
-
-
def _is_path_attribute_candidate(
source_attribute: AttributeDescription, target_attribute: AttributeDescription
) -> bool:
@@ -730,6 +502,16 @@ def _is_path_attribute_candidate(
return isinstance(match_type, type) and issubclass(match_type, FileSystemMatchRule)
+if typing.is_typeddict(DebputyParsedContent):
+ is_typeddict = typing.is_typeddict
+else:
+
+ def is_typeddict(t: Any) -> bool:
+ if typing.is_typeddict(t):
+ return True
+ return isinstance(t, type) and issubclass(t, DebputyParsedContent)
+
+
class ParserGenerator:
def __init__(self) -> None:
self._registered_types: Dict[Any, TypeMapping[Any, Any]] = {}
@@ -811,6 +593,9 @@ class ParserGenerator:
expected_debputy_integration_mode: Optional[
Container[DebputyIntegrationMode]
] = None,
+ automatic_docs: Optional[
+ Mapping[Type[Any], Sequence[StandardParserAttributeDocumentation]]
+ ] = None,
) -> DeclarativeInputParser[TD]:
"""Derive a parser from a TypedDict
@@ -978,7 +763,7 @@ class ParserGenerator:
f"Unsupported parsed_content descriptor: {parsed_content.__qualname__}."
' Only "TypedDict"-based types and a subset of "DebputyDispatchableType" are supported.'
)
- if is_list_wrapped:
+ if is_list_wrapped and source_content is not None:
if get_origin(source_content) != list:
raise ValueError(
"If the parsed_content is a List type, then source_format must be a List type as well."
@@ -1133,10 +918,15 @@ class ParserGenerator:
parsed_alt_form.type_validator.combine_mapper(bridge_mapper)
)
- _verify_inline_reference_documentation(
- source_content_attributes,
- inline_reference_documentation,
- parsed_alt_form is not None,
+ inline_reference_documentation = (
+ _verify_and_auto_correct_inline_reference_documentation(
+ parsed_content,
+ source_typed_dict,
+ source_content_attributes,
+ inline_reference_documentation,
+ parsed_alt_form is not None,
+ automatic_docs,
+ )
)
if non_mapping_source_only:
parser = DeclarativeNonMappingInputParser(
@@ -1700,45 +1490,133 @@ class ParserGenerator:
return orig_td
-def _verify_inline_reference_documentation(
+def _sort_key(attr: StandardParserAttributeDocumentation) -> Any:
+ key = next(iter(attr.attributes))
+ return attr.sort_category, key
+
+
+def _apply_std_docs(
+ std_doc_table: Optional[
+ Mapping[Type[Any], Sequence[StandardParserAttributeDocumentation]]
+ ],
+ source_format_typed_dict: Type[Any],
+ attribute_docs: Optional[Sequence[ParserAttributeDocumentation]],
+) -> Optional[Sequence[ParserAttributeDocumentation]]:
+ if std_doc_table is None or not std_doc_table:
+ return attribute_docs
+
+ has_docs_for = set()
+ if attribute_docs:
+ for attribute_doc in attribute_docs:
+ has_docs_for.update(attribute_doc.attributes)
+
+ base_seen = set()
+ std_docs_used = []
+
+ remaining_bases = set(getattr(source_format_typed_dict, "__orig_bases__", []))
+ base_seen.update(remaining_bases)
+ while remaining_bases:
+ base = remaining_bases.pop()
+ new_bases_to_check = {
+ x for x in getattr(base, "__orig_bases__", []) if x not in base_seen
+ }
+ remaining_bases.update(new_bases_to_check)
+ base_seen.update(new_bases_to_check)
+ std_docs = std_doc_table.get(base)
+ if std_docs:
+ for std_doc in std_docs:
+ if any(a in has_docs_for for a in std_doc.attributes):
+ # If there is any overlap, do not add the docs
+ continue
+ has_docs_for.update(std_doc.attributes)
+ std_docs_used.append(std_doc)
+
+ if not std_docs_used:
+ return attribute_docs
+ docs = sorted(std_docs_used, key=_sort_key)
+ if attribute_docs:
+ # Plugin provided attributes first
+ c = list(attribute_docs)
+ c.extend(docs)
+ docs = c
+ return tuple(docs)
+
+
+def _verify_and_auto_correct_inline_reference_documentation(
+ parsed_content: Type[TD],
+ source_typed_dict: Type[Any],
source_content_attributes: Mapping[str, AttributeDescription],
inline_reference_documentation: Optional[ParserDocumentation],
has_alt_form: bool,
-) -> None:
- if inline_reference_documentation is None:
- return
- attribute_doc = inline_reference_documentation.attribute_doc
- if attribute_doc:
+ automatic_docs: Optional[
+ Mapping[Type[Any], Sequence[StandardParserAttributeDocumentation]]
+ ] = None,
+) -> Optional[ParserDocumentation]:
+ orig_attribute_docs = (
+ inline_reference_documentation.attribute_doc
+ if inline_reference_documentation
+ else None
+ )
+ attribute_docs = _apply_std_docs(
+ automatic_docs,
+ source_typed_dict,
+ orig_attribute_docs,
+ )
+ if inline_reference_documentation is None and attribute_docs is None:
+ return None
+ changes = {}
+ if attribute_docs:
seen = set()
- for attr_doc in attribute_doc:
+ had_any_custom_docs = False
+ for attr_doc in attribute_docs:
+ if not isinstance(attr_doc, StandardParserAttributeDocumentation):
+ had_any_custom_docs = True
for attr_name in attr_doc.attributes:
attr = source_content_attributes.get(attr_name)
if attr is None:
raise ValueError(
- f'The inline_reference_documentation references an attribute "{attr_name}", which does not'
- f" exist in the source format."
+ f"The inline_reference_documentation for the source format of {parsed_content.__qualname__}"
+ f' references an attribute "{attr_name}", which does not exist in the source format.'
)
if attr_name in seen:
raise ValueError(
- f'The inline_reference_documentation has documentation for "{attr_name}" twice,'
- f" which is not supported. Please document it at most once"
+ f"The inline_reference_documentation for the source format of {parsed_content.__qualname__}"
+ f' has documentation for "{attr_name}" twice, which is not supported.'
+ f" Please document it at most once"
)
seen.add(attr_name)
-
undocumented = source_content_attributes.keys() - seen
if undocumented:
- undocumented_attrs = ", ".join(undocumented)
- raise ValueError(
- "The following attributes were not documented. If this is deliberate, then please"
- ' declare each them as undocumented (via undocumented_attr("foo")):'
- f" {undocumented_attrs}"
- )
+ if had_any_custom_docs:
+ undocumented_attrs = ", ".join(undocumented)
+ raise ValueError(
+ f"The following attributes were not documented for the source format of"
+ f" {parsed_content.__qualname__}. If this is deliberate, then please"
+ ' declare each them as undocumented (via undocumented_attr("foo")):'
+ f" {undocumented_attrs}"
+ )
+ combined_docs = list(attribute_docs)
+ combined_docs.extend(undocumented_attr(a) for a in sorted(undocumented))
+ attribute_docs = combined_docs
+
+ if attribute_docs and orig_attribute_docs != attribute_docs:
+ assert attribute_docs is not None
+ changes["attribute_doc"] = tuple(attribute_docs)
- if inline_reference_documentation.alt_parser_description and not has_alt_form:
+ if (
+ inline_reference_documentation is not None
+ and inline_reference_documentation.alt_parser_description
+ and not has_alt_form
+ ):
raise ValueError(
"The inline_reference_documentation had documentation for an non-mapping format,"
" but the source format does not have a non-mapping format."
)
+ if changes:
+ if inline_reference_documentation is None:
+ inline_reference_documentation = reference_documentation()
+ return inline_reference_documentation.replace(**changes)
+ return inline_reference_documentation
def _check_conflicts(
diff --git a/src/debputy/manifest_parser/exceptions.py b/src/debputy/manifest_parser/exceptions.py
index 671ec1b..f058458 100644
--- a/src/debputy/manifest_parser/exceptions.py
+++ b/src/debputy/manifest_parser/exceptions.py
@@ -1,9 +1,17 @@
from debputy.exceptions import DebputyRuntimeError
-class ManifestParseException(DebputyRuntimeError):
+class ManifestException(DebputyRuntimeError):
+ pass
+
+
+class ManifestParseException(ManifestException):
pass
class ManifestTypeException(ManifestParseException):
pass
+
+
+class ManifestInvalidUserDataException(ManifestException):
+ pass
diff --git a/src/debputy/manifest_parser/mapper_code.py b/src/debputy/manifest_parser/mapper_code.py
index d7a08c3..f206af9 100644
--- a/src/debputy/manifest_parser/mapper_code.py
+++ b/src/debputy/manifest_parser/mapper_code.py
@@ -4,22 +4,25 @@ from typing import (
Union,
List,
Callable,
+ TYPE_CHECKING,
)
from debputy.manifest_parser.exceptions import ManifestTypeException
-from debputy.manifest_parser.parser_data import ParserContextData
-from debputy.manifest_parser.util import AttributePath
from debputy.packages import BinaryPackage
from debputy.util import assume_not_none
+if TYPE_CHECKING:
+ from debputy.manifest_parser.util import AttributePath
+ from debputy.manifest_parser.parser_data import ParserContextData
+
S = TypeVar("S")
T = TypeVar("T")
def type_mapper_str2package(
raw_package_name: str,
- ap: AttributePath,
- opc: Optional[ParserContextData],
+ ap: "AttributePath",
+ opc: Optional["ParserContextData"],
) -> BinaryPackage:
pc = assume_not_none(opc)
if "{{" in raw_package_name:
@@ -50,7 +53,7 @@ def type_mapper_str2package(
def wrap_into_list(
x: T,
- _ap: AttributePath,
+ _ap: "AttributePath",
_pc: Optional["ParserContextData"],
) -> List[T]:
return [x]
@@ -58,18 +61,18 @@ def wrap_into_list(
def normalize_into_list(
x: Union[T, List[T]],
- _ap: AttributePath,
+ _ap: "AttributePath",
_pc: Optional["ParserContextData"],
) -> List[T]:
return x if isinstance(x, list) else [x]
def map_each_element(
- mapper: Callable[[S, AttributePath, Optional["ParserContextData"]], T],
-) -> Callable[[List[S], AttributePath, Optional["ParserContextData"]], List[T]]:
+ mapper: Callable[[S, "AttributePath", Optional["ParserContextData"]], T],
+) -> Callable[[List[S], "AttributePath", Optional["ParserContextData"]], List[T]]:
def _generated_mapper(
xs: List[S],
- ap: AttributePath,
+ ap: "AttributePath",
pc: Optional["ParserContextData"],
) -> List[T]:
return [mapper(s, ap[i], pc) for i, s in enumerate(xs)]
diff --git a/src/debputy/manifest_parser/parse_hints.py b/src/debputy/manifest_parser/parse_hints.py
new file mode 100644
index 0000000..30b8aca
--- /dev/null
+++ b/src/debputy/manifest_parser/parse_hints.py
@@ -0,0 +1,259 @@
+import dataclasses
+from typing import (
+ NotRequired,
+ TypedDict,
+ TYPE_CHECKING,
+ Callable,
+ FrozenSet,
+ Annotated,
+ List,
+)
+
+from debputy.manifest_parser.util import (
+ resolve_package_type_selectors,
+ _ALL_PACKAGE_TYPES,
+)
+from debputy.plugin.api.spec import PackageTypeSelector
+
+if TYPE_CHECKING:
+ from debputy.manifest_parser.parser_data import ParserContextData
+
+
+class DebputyParseHint:
+ @classmethod
+ def target_attribute(cls, target_attribute: str) -> "DebputyParseHint":
+ """Define this source attribute to have a different target attribute name
+
+ As an example:
+
+ >>> from debputy.manifest_parser.declarative_parser import ParserGenerator
+ >>> class SourceType(TypedDict):
+ ... source: Annotated[NotRequired[str], DebputyParseHint.target_attribute("sources")]
+ ... sources: NotRequired[List[str]]
+ >>> class TargetType(TypedDict):
+ ... sources: List[str]
+ >>> pg = ParserGenerator()
+ >>> parser = pg.generate_parser(TargetType, source_content=SourceType)
+
+ In this example, the user can provide either `source` or `sources` and the parser will
+ map them to the `sources` attribute in the `TargetType`. Note this example relies on
+ the builtin mapping of `str` to `List[str]` to align the types between `source` (from
+ SourceType) and `sources` (from TargetType).
+
+ The following rules apply:
+
+ * All source attributes that map to the same target attribute will be mutually exclusive
+ (that is, the user cannot give `source` *and* `sources` as input).
+ * When the target attribute is required, the source attributes are conditionally
+ mandatory requiring the user to provide exactly one of them.
+ * When multiple source attributes point to a single target attribute, none of the source
+ attributes can be Required.
+ * The annotation can only be used for the source type specification and the source type
+ specification must be different from the target type specification.
+
+ The `target_attribute` annotation can be used without having multiple source attributes. This
+ can be useful if the source attribute name is not valid as a python variable identifier to
+ rename it to a valid python identifier.
+
+ :param target_attribute: The attribute name in the target content
+ :return: The annotation.
+ """
+ return TargetAttribute(target_attribute)
+
+ @classmethod
+ def conflicts_with_source_attributes(
+ cls,
+ *conflicting_source_attributes: str,
+ ) -> "DebputyParseHint":
+ """Declare a conflict with one or more source attributes
+
+ Example:
+
+ >>> from debputy.manifest_parser.declarative_parser import ParserGenerator
+ >>> class SourceType(TypedDict):
+ ... source: Annotated[NotRequired[str], DebputyParseHint.target_attribute("sources")]
+ ... sources: NotRequired[List[str]]
+ ... into_dir: NotRequired[str]
+ ... renamed_to: Annotated[
+ ... NotRequired[str],
+ ... DebputyParseHint.conflicts_with_source_attributes("sources", "into_dir")
+ ... ]
+ >>> class TargetType(TypedDict):
+ ... sources: List[str]
+ ... into_dir: NotRequired[str]
+ ... renamed_to: NotRequired[str]
+ >>> pg = ParserGenerator()
+ >>> parser = pg.generate_parser(TargetType, source_content=SourceType)
+
+ In this example, if the user was to provide `renamed_to` with `sources` or `into_dir` the parser would report
+ an error. However, the parser will allow `renamed_to` with `source` as the conflict is considered only for
+ the input source. That is, it is irrelevant that `sources` and `source´ happens to "map" to the same target
+ attribute.
+
+ The following rules apply:
+ * It is not possible for a target attribute to declare conflicts unless the target type spec is reused as
+ source type spec.
+ * All attributes involved in a conflict must be NotRequired. If any of the attributes are Required, then
+ the parser generator will reject the input.
+ * All attributes listed in the conflict must be valid attributes in the source type spec.
+
+ Note you do not have to specify conflicts between two attributes with the same target attribute name. The
+ `target_attribute` annotation will handle that for you.
+
+ :param conflicting_source_attributes: All source attributes that cannot be used with this attribute.
+ :return: The annotation.
+ """
+ if len(conflicting_source_attributes) < 1:
+ raise ValueError(
+ "DebputyParseHint.conflicts_with_source_attributes requires at least one attribute as input"
+ )
+ return ConflictWithSourceAttribute(frozenset(conflicting_source_attributes))
+
+ @classmethod
+ def required_when_single_binary(
+ cls,
+ *,
+ package_type: PackageTypeSelector = _ALL_PACKAGE_TYPES,
+ ) -> "DebputyParseHint":
+ """Declare a source attribute as required when the source package produces exactly one binary package
+
+ The attribute in question must always be declared as `NotRequired` in the TypedDict and this condition
+ can only be used for source attributes.
+ """
+ resolved_package_types = resolve_package_type_selectors(package_type)
+ reason = "The field is required for source packages producing exactly one binary package"
+ if resolved_package_types != _ALL_PACKAGE_TYPES:
+ types = ", ".join(sorted(resolved_package_types))
+ reason += f" of type {types}"
+ return ConditionalRequired(
+ reason,
+ lambda c: len(
+ [
+ p
+ for p in c.binary_packages.values()
+ if p.package_type in package_type
+ ]
+ )
+ == 1,
+ )
+ return ConditionalRequired(
+ reason,
+ lambda c: c.is_single_binary_package,
+ )
+
+ @classmethod
+ def required_when_multi_binary(
+ cls,
+ *,
+ package_type: PackageTypeSelector = _ALL_PACKAGE_TYPES,
+ ) -> "DebputyParseHint":
+ """Declare a source attribute as required when the source package produces two or more binary package
+
+ The attribute in question must always be declared as `NotRequired` in the TypedDict and this condition
+ can only be used for source attributes.
+ """
+ resolved_package_types = resolve_package_type_selectors(package_type)
+ reason = "The field is required for source packages producing two or more binary packages"
+ if resolved_package_types != _ALL_PACKAGE_TYPES:
+ types = ", ".join(sorted(resolved_package_types))
+ reason = (
+ "The field is required for source packages producing not producing exactly one binary packages"
+ f" of type {types}"
+ )
+ return ConditionalRequired(
+ reason,
+ lambda c: len(
+ [
+ p
+ for p in c.binary_packages.values()
+ if p.package_type in package_type
+ ]
+ )
+ != 1,
+ )
+ return ConditionalRequired(
+ reason,
+ lambda c: not c.is_single_binary_package,
+ )
+
+ @classmethod
+ def manifest_attribute(cls, attribute: str) -> "DebputyParseHint":
+ """Declare what the attribute name (as written in the manifest) should be
+
+ By default, debputy will do an attribute normalizing that will take valid python identifiers such
+ as `dest_dir` and remap it to the manifest variant (such as `dest-dir`) automatically. If you have
+ a special case, where this built-in normalization is insufficient or the python name is considerably
+ different from what the user would write in the manifest, you can use this parse hint to set the
+ name that the user would have to write in the manifest for this attribute.
+
+ >>> from debputy.manifest_parser.base_types import FileSystemMatchRule, FileSystemExactMatchRule
+ >>> class SourceType(TypedDict):
+ ... source: List[FileSystemMatchRule]
+ ... # Use "as" in the manifest because "as_" was not pretty enough
+ ... install_as: Annotated[NotRequired[FileSystemExactMatchRule], DebputyParseHint.manifest_attribute("as")]
+
+ In this example, we use the parse hint to use "as" as the name in the manifest, because we cannot
+ use "as" a valid python identifier (it is a keyword). While debputy would map `as_` to `as` for us,
+ we have chosen to use `install_as` as a python identifier.
+ """
+ return ManifestAttribute(attribute)
+
+ @classmethod
+ def not_path_error_hint(cls) -> "DebputyParseHint":
+ """Mark this attribute as not a "path hint" when it comes to reporting errors
+
+ By default, `debputy` will pick up attributes that uses path names (FileSystemMatchRule) as
+ candidates for parse error hints (the little "<Search for: VALUE>" in error messages).
+
+ Most rules only have one active path-based attribute and paths tends to be unique enough
+ that it helps people spot the issue faster. However, in rare cases, you can have multiple
+ attributes that fit the bill. In this case, this hint can be used to "hide" the suboptimal
+ choice. As an example:
+
+ >>> from debputy.manifest_parser.base_types import FileSystemMatchRule, FileSystemExactMatchRule
+ >>> class SourceType(TypedDict):
+ ... source: List[FileSystemMatchRule]
+ ... install_as: Annotated[NotRequired[FileSystemExactMatchRule], DebputyParseHint.not_path_error_hint()]
+
+ In this case, without the hint, `debputy` might pick up `install_as` as the attribute to
+ use as hint for error reporting. However, here we have decided that we never want `install_as`
+ leaving `source` as the only option.
+
+ Generally, this type hint must be placed on the **source** format. Any source attribute matching
+ the parsed format will be ignored.
+
+ Mind the asymmetry: The annotation is placed in the **source** format while `debputy` looks at
+ the type of the target attribute to determine if it counts as path.
+ """
+ return NOT_PATH_HINT
+
+
+@dataclasses.dataclass(frozen=True, slots=True)
+class TargetAttribute(DebputyParseHint):
+ attribute: str
+
+
+@dataclasses.dataclass(frozen=True, slots=True)
+class ConflictWithSourceAttribute(DebputyParseHint):
+ conflicting_attributes: FrozenSet[str]
+
+
+@dataclasses.dataclass(frozen=True, slots=True)
+class ConditionalRequired(DebputyParseHint):
+ reason: str
+ condition: Callable[["ParserContextData"], bool]
+
+ def condition_applies(self, context: "ParserContextData") -> bool:
+ return self.condition(context)
+
+
+@dataclasses.dataclass(frozen=True, slots=True)
+class ManifestAttribute(DebputyParseHint):
+ attribute: str
+
+
+class NotPathHint(DebputyParseHint):
+ pass
+
+
+NOT_PATH_HINT = NotPathHint()
diff --git a/src/debputy/manifest_parser/parser_data.py b/src/debputy/manifest_parser/parser_data.py
index 30d9ce0..acc5c67 100644
--- a/src/debputy/manifest_parser/parser_data.py
+++ b/src/debputy/manifest_parser/parser_data.py
@@ -11,12 +11,15 @@ from debian.debian_support import DpkgArchTable
from debputy._deb_options_profiles import DebBuildOptionsAndProfiles
from debputy.architecture_support import DpkgArchitectureBuildProcessValuesTable
+from debputy.manifest_parser.base_types import BuildEnvironmentDefinition
from debputy.manifest_parser.exceptions import ManifestParseException
from debputy.manifest_parser.util import AttributePath
-from debputy.packages import BinaryPackage
-from debputy.plugin.api.impl_types import (
+from debputy.manifest_parser.util import (
_ALL_PACKAGE_TYPES,
resolve_package_type_selectors,
+)
+from debputy.packages import BinaryPackage
+from debputy.plugin.api.impl_types import (
TP,
DispatchingTableParser,
TTP,
@@ -101,7 +104,7 @@ class ParserContextData:
raise NotImplementedError
@property
- def build_env(self) -> DebBuildOptionsAndProfiles:
+ def deb_options_and_profiles(self) -> DebBuildOptionsAndProfiles:
raise NotImplementedError
@contextlib.contextmanager
@@ -129,3 +132,8 @@ class ParserContextData:
@property
def debputy_integration_mode(self) -> DebputyIntegrationMode:
raise NotImplementedError
+
+ def resolve_build_environment(
+ self, name: Optional[str], attribute_path: AttributePath
+ ) -> BuildEnvironmentDefinition:
+ raise NotImplementedError
diff --git a/src/debputy/manifest_parser/tagging_types.py b/src/debputy/manifest_parser/tagging_types.py
new file mode 100644
index 0000000..83030f0
--- /dev/null
+++ b/src/debputy/manifest_parser/tagging_types.py
@@ -0,0 +1,36 @@
+import dataclasses
+from typing import (
+ TypedDict,
+ TYPE_CHECKING,
+ Generic,
+ Type,
+ Callable,
+ Optional,
+)
+
+from debputy.plugin.plugin_state import current_debputy_plugin_required
+from debputy.types import S
+from debputy.util import T
+
+if TYPE_CHECKING:
+ from debputy.manifest_parser.parser_data import ParserContextData
+
+ from debputy.manifest_parser.util import AttributePath
+
+
+class DebputyParsedContent(TypedDict):
+ pass
+
+
+class DebputyDispatchableType:
+ __slots__ = ("_debputy_plugin",)
+
+ def __init__(self) -> None:
+ self._debputy_plugin = current_debputy_plugin_required()
+
+
+@dataclasses.dataclass
+class TypeMapping(Generic[S, T]):
+ target_type: Type[T]
+ source_type: Type[S]
+ mapper: Callable[[S, "AttributePath", Optional["ParserContextData"]], T]
diff --git a/src/debputy/manifest_parser/util.py b/src/debputy/manifest_parser/util.py
index bcaa617..4e8fd7c 100644
--- a/src/debputy/manifest_parser/util.py
+++ b/src/debputy/manifest_parser/util.py
@@ -15,6 +15,8 @@ from typing import (
Iterable,
Container,
Literal,
+ FrozenSet,
+ cast,
)
from debputy.yaml.compat import CommentedBase
@@ -23,7 +25,7 @@ from debputy.manifest_parser.exceptions import ManifestParseException
if TYPE_CHECKING:
from debputy.manifest_parser.parser_data import ParserContextData
- from debputy.plugin.api.spec import DebputyIntegrationMode
+ from debputy.plugin.api.spec import DebputyIntegrationMode, PackageTypeSelector
MP = TypeVar("MP", bound="DebputyParseHint")
@@ -34,6 +36,25 @@ AttributePathAliasMapping = Mapping[
LineReportKind = Literal["key", "value", "container"]
+_PACKAGE_TYPE_DEB_ONLY = frozenset(["deb"])
+_ALL_PACKAGE_TYPES = frozenset(["deb", "udeb"])
+
+
+def resolve_package_type_selectors(
+ package_type: "PackageTypeSelector",
+) -> FrozenSet[str]:
+ if package_type is _ALL_PACKAGE_TYPES or package_type is _PACKAGE_TYPE_DEB_ONLY:
+ return cast("FrozenSet[str]", package_type)
+ if isinstance(package_type, str):
+ return (
+ _PACKAGE_TYPE_DEB_ONLY
+ if package_type == "deb"
+ else frozenset([package_type])
+ )
+ else:
+ return frozenset(package_type)
+
+
class AttributePath:
__slots__ = ("parent", "container", "name", "alias_mapping", "path_hint")
@@ -94,7 +115,7 @@ class AttributePath:
lc_data = lc.value(key)
else:
lc_data = lc.item(key)
- except (AttributeError, RuntimeError, LookupError):
+ except (AttributeError, RuntimeError, LookupError, TypeError):
lc_data = None
else:
lc_data = None
@@ -161,7 +182,7 @@ class AttributePath:
if container is not None:
try:
child_container = self.container[item]
- except (AttributeError, RuntimeError, LookupError):
+ except (AttributeError, RuntimeError, LookupError, TypeError):
child_container = None
else:
child_container = None
diff --git a/src/debputy/package_build/assemble_deb.py b/src/debputy/package_build/assemble_deb.py
index fd92f37..21fe0d6 100644
--- a/src/debputy/package_build/assemble_deb.py
+++ b/src/debputy/package_build/assemble_deb.py
@@ -95,8 +95,8 @@ def assemble_debs(
package_metadata_context = dctrl_data.package_metadata_context
if (
dbgsym_package_name in package_data_table
- or "noautodbgsym" in manifest.build_env.deb_build_options
- or "noddebs" in manifest.build_env.deb_build_options
+ or "noautodbgsym" in manifest.deb_options_and_profiles.deb_build_options
+ or "noddebs" in manifest.deb_options_and_profiles.deb_build_options
):
# Discard the dbgsym part if it conflicts with a real package, or
# we were asked not to build it.
diff --git a/src/debputy/packager_provided_files.py b/src/debputy/packager_provided_files.py
index a35beec..5657ad2 100644
--- a/src/debputy/packager_provided_files.py
+++ b/src/debputy/packager_provided_files.py
@@ -35,6 +35,8 @@ _KNOWN_NON_TYPO_EXTENSIONS = frozenset(
"bash",
"pl",
"py",
+ # Fairly common image format in older packages
+ "xpm",
}
)
diff --git a/src/debputy/packages.py b/src/debputy/packages.py
index 3a6ee16..0a3876a 100644
--- a/src/debputy/packages.py
+++ b/src/debputy/packages.py
@@ -44,15 +44,15 @@ class DctrlParser:
DpkgArchitectureBuildProcessValuesTable
] = None,
dpkg_arch_query_table: Optional[DpkgArchTable] = None,
- build_env: Optional[DebBuildOptionsAndProfiles] = None,
+ deb_options_and_profiles: Optional[DebBuildOptionsAndProfiles] = None,
ignore_errors: bool = False,
) -> None:
if dpkg_architecture_variables is None:
dpkg_architecture_variables = dpkg_architecture_table()
if dpkg_arch_query_table is None:
dpkg_arch_query_table = DpkgArchTable.load_arch_table()
- if build_env is None:
- build_env = DebBuildOptionsAndProfiles.instance()
+ if deb_options_and_profiles is None:
+ deb_options_and_profiles = DebBuildOptionsAndProfiles.instance()
# If no selection option is set, then all packages are acted on (except the
# excluded ones)
@@ -66,7 +66,7 @@ class DctrlParser:
self.select_arch_any = select_arch_any
self.dpkg_architecture_variables = dpkg_architecture_variables
self.dpkg_arch_query_table = dpkg_arch_query_table
- self.build_env = build_env
+ self.deb_options_and_profiles = deb_options_and_profiles
self.ignore_errors = ignore_errors
@overload
@@ -138,7 +138,7 @@ class DctrlParser:
self.select_arch_any,
self.dpkg_architecture_variables,
self.dpkg_arch_query_table,
- self.build_env,
+ self.deb_options_and_profiles,
i,
)
)
diff --git a/src/debputy/path_matcher.py b/src/debputy/path_matcher.py
index 2917b14..a7b8356 100644
--- a/src/debputy/path_matcher.py
+++ b/src/debputy/path_matcher.py
@@ -161,7 +161,7 @@ class MatchRule:
_error(
f'The pattern "{path_or_glob}" (defined in {definition_source}) looks like it contains a'
f' brace expansion (such as "{{a,b}}" or "{{a..b}}"). Brace expansions are not supported.'
- " If you wanted to match the literal path a brace in it, please use a substitution to insert"
+ " If you wanted to match the literal path with a brace in it, please use a substitution to insert"
f' the opening brace. As an example: "{replacement}"'
)
diff --git a/src/debputy/plugin/api/feature_set.py b/src/debputy/plugin/api/feature_set.py
index a56f37b..30d79be 100644
--- a/src/debputy/plugin/api/feature_set.py
+++ b/src/debputy/plugin/api/feature_set.py
@@ -1,29 +1,24 @@
import dataclasses
-import textwrap
-from typing import Dict, List, Tuple, Sequence, Any
+from typing import Dict, List, Tuple, Sequence, Any, Optional, Type
-from debputy import DEBPUTY_DOC_ROOT_DIR
from debputy.manifest_parser.declarative_parser import ParserGenerator
-from debputy.plugin.api import reference_documentation
from debputy.plugin.api.impl_types import (
DebputyPluginMetadata,
PackagerProvidedFileClassSpec,
MetadataOrMaintscriptDetector,
- TTP,
- DispatchingTableParser,
- TP,
- SUPPORTED_DISPATCHABLE_TABLE_PARSERS,
- DispatchingObjectParser,
- SUPPORTED_DISPATCHABLE_OBJECT_PARSERS,
PluginProvidedManifestVariable,
PluginProvidedPackageProcessor,
PluginProvidedDiscardRule,
ServiceManagerDetails,
PluginProvidedKnownPackagingFile,
PluginProvidedTypeMapping,
- OPARSER_PACKAGES,
- OPARSER_PACKAGES_ROOT,
+ PluginProvidedBuildSystemAutoDetection,
+)
+from debputy.plugin.api.parser_tables import (
+ SUPPORTED_DISPATCHABLE_OBJECT_PARSERS,
+ SUPPORTED_DISPATCHABLE_TABLE_PARSERS,
)
+from debputy.plugin.debputy.to_be_api_types import BuildSystemRule
def _initialize_parser_generator() -> ParserGenerator:
@@ -70,6 +65,9 @@ class PluginProvidedFeatureSet:
manifest_parser_generator: ParserGenerator = dataclasses.field(
default_factory=_initialize_parser_generator
)
+ auto_detectable_build_systems: Dict[
+ Type[BuildSystemRule], PluginProvidedBuildSystemAutoDetection
+ ] = dataclasses.field(default_factory=dict)
def package_processors_in_order(self) -> Sequence[PluginProvidedPackageProcessor]:
order = []
diff --git a/src/debputy/plugin/api/impl.py b/src/debputy/plugin/api/impl.py
index b0674fb..c2f03d0 100644
--- a/src/debputy/plugin/api/impl.py
+++ b/src/debputy/plugin/api/impl.py
@@ -31,7 +31,8 @@ from typing import (
Any,
Literal,
Container,
- get_args,
+ TYPE_CHECKING,
+ is_typeddict,
)
from debputy import DEBPUTY_DOC_ROOT_DIR
@@ -43,16 +44,18 @@ from debputy.exceptions import (
PluginInitializationError,
PluginAPIViolationError,
PluginNotFoundError,
+ PluginIncorrectRegistrationError,
)
from debputy.maintscript_snippet import (
STD_CONTROL_SCRIPTS,
MaintscriptSnippetContainer,
MaintscriptSnippet,
)
-from debputy.manifest_parser.base_types import TypeMapping
from debputy.manifest_parser.exceptions import ManifestParseException
from debputy.manifest_parser.parser_data import ParserContextData
+from debputy.manifest_parser.tagging_types import TypeMapping
from debputy.manifest_parser.util import AttributePath
+from debputy.manifest_parser.util import resolve_package_type_selectors
from debputy.plugin.api.feature_set import PluginProvidedFeatureSet
from debputy.plugin.api.impl_types import (
DebputyPluginMetadata,
@@ -70,11 +73,12 @@ from debputy.plugin.api.impl_types import (
AutomaticDiscardRuleExample,
PPFFormatParam,
ServiceManagerDetails,
- resolve_package_type_selectors,
KnownPackagingFileInfo,
PluginProvidedKnownPackagingFile,
InstallPatternDHCompatRule,
PluginProvidedTypeMapping,
+ PluginProvidedBuildSystemAutoDetection,
+ BSR,
)
from debputy.plugin.api.plugin_parser import (
PLUGIN_METADATA_PARSER,
@@ -108,6 +112,21 @@ from debputy.plugin.api.spec import (
packager_provided_file_reference_documentation,
TypeMappingDocumentation,
DebputyIntegrationMode,
+ reference_documentation,
+ _DEBPUTY_DISPATCH_METADATA_ATTR_NAME,
+ BuildSystemManifestRuleMetadata,
+)
+from debputy.plugin.api.std_docs import _STD_ATTR_DOCS
+from debputy.plugin.debputy.to_be_api_types import (
+ BuildSystemRule,
+ BuildRuleParsedFormat,
+ BSPF,
+ debputy_build_system,
+)
+from debputy.plugin.plugin_state import (
+ run_in_context_of_plugin,
+ run_in_context_of_plugin_wrap_errors,
+ wrap_plugin_code,
)
from debputy.substitution import (
Substitution,
@@ -123,6 +142,9 @@ from debputy.util import (
_warn,
)
+if TYPE_CHECKING:
+ from debputy.highlevel_manifest import HighLevelManifest
+
PLUGIN_TEST_SUFFIX = re.compile(r"_(?:t|test|check)(?:_([a-z0-9_]+))?[.]py$")
@@ -362,7 +384,7 @@ class DebputyPluginInitializerProvider(DebputyPluginInitializer):
all_detectors[self._plugin_name].append(
MetadataOrMaintscriptDetector(
detector_id=auto_detector_id,
- detector=auto_detector,
+ detector=wrap_plugin_code(self._plugin_name, auto_detector),
plugin_metadata=self._plugin_metadata,
applies_to_package_types=package_types,
enabled=True,
@@ -575,7 +597,7 @@ class DebputyPluginInitializerProvider(DebputyPluginInitializer):
package_processors[processor_key] = PluginProvidedPackageProcessor(
processor_id,
resolve_package_type_selectors(package_type),
- processor,
+ wrap_plugin_code(self._plugin_name, processor),
frozenset(dependencies),
self._plugin_metadata,
)
@@ -704,8 +726,8 @@ class DebputyPluginInitializerProvider(DebputyPluginInitializer):
)
service_managers[service_manager] = ServiceManagerDetails(
service_manager,
- detector,
- integrator,
+ wrap_plugin_code(self._plugin_name, detector),
+ wrap_plugin_code(self._plugin_name, integrator),
self._plugin_metadata,
)
@@ -776,7 +798,7 @@ class DebputyPluginInitializerProvider(DebputyPluginInitializer):
dispatching_parser = parser_generator.dispatchable_table_parsers[rule_type]
dispatching_parser.register_keyword(
rule_name,
- handler,
+ wrap_plugin_code(self._plugin_name, handler),
self._plugin_metadata,
inline_reference_documentation=inline_reference_documentation,
)
@@ -820,6 +842,10 @@ class DebputyPluginInitializerProvider(DebputyPluginInitializer):
)
parent_dispatcher = dispatchable_object_parsers[rule_type]
child_dispatcher = dispatchable_object_parsers[object_parser_key]
+
+ if on_end_parse_step is not None:
+ on_end_parse_step = wrap_plugin_code(self._plugin_name, on_end_parse_step)
+
parent_dispatcher.register_child_parser(
rule_name,
child_dispatcher,
@@ -838,7 +864,7 @@ class DebputyPluginInitializerProvider(DebputyPluginInitializer):
def pluggable_manifest_rule(
self,
rule_type: Union[TTP, str],
- rule_name: Union[str, List[str]],
+ rule_name: Union[str, Sequence[str]],
parsed_format: Type[PF],
handler: DIPHandler,
*,
@@ -847,8 +873,15 @@ class DebputyPluginInitializerProvider(DebputyPluginInitializer):
expected_debputy_integration_mode: Optional[
Container[DebputyIntegrationMode]
] = None,
+ apply_standard_attribute_documentation: bool = False,
) -> None:
+ # When unrestricted this, consider which types will be unrestricted
self._restricted_api()
+ if apply_standard_attribute_documentation and sys.version_info < (3, 12):
+ _error(
+ f"The plugin {self._plugin_metadata.plugin_name} requires python 3.12 due to"
+ f" its use of apply_standard_attribute_documentation"
+ )
feature_set = self._feature_set
parser_generator = feature_set.manifest_parser_generator
if isinstance(rule_type, str):
@@ -870,16 +903,22 @@ class DebputyPluginInitializerProvider(DebputyPluginInitializer):
)
dispatching_parser = parser_generator.dispatchable_table_parsers[rule_type]
+ if apply_standard_attribute_documentation:
+ docs = _STD_ATTR_DOCS
+ else:
+ docs = None
+
parser = feature_set.manifest_parser_generator.generate_parser(
parsed_format,
source_content=source_format,
inline_reference_documentation=inline_reference_documentation,
expected_debputy_integration_mode=expected_debputy_integration_mode,
+ automatic_docs=docs,
)
dispatching_parser.register_parser(
rule_name,
parser,
- handler,
+ wrap_plugin_code(self._plugin_name, handler),
self._plugin_metadata,
)
@@ -890,6 +929,108 @@ class DebputyPluginInitializerProvider(DebputyPluginInitializer):
self._unloaders.append(_unload)
+ def register_build_system(
+ self,
+ build_system_definition: type[BSPF],
+ ) -> None:
+ self._restricted_api()
+ if not is_typeddict(build_system_definition):
+ raise PluginInitializationError(
+ f"Expected build_system_definition to be a subclass of {BuildRuleParsedFormat.__name__},"
+ f" but got {build_system_definition.__name__} instead"
+ )
+ metadata = getattr(
+ build_system_definition,
+ _DEBPUTY_DISPATCH_METADATA_ATTR_NAME,
+ None,
+ )
+ if not isinstance(metadata, BuildSystemManifestRuleMetadata):
+ raise PluginIncorrectRegistrationError(
+ f"The {build_system_definition.__qualname__} type should have been annotated with"
+ f" @{debputy_build_system.__name__}."
+ )
+ assert len(metadata.manifest_keywords) == 1
+ build_system_impl = metadata.build_system_impl
+ assert build_system_impl is not None
+ manifest_keyword = next(iter(metadata.manifest_keywords))
+ self.pluggable_manifest_rule(
+ metadata.dispatched_type,
+ metadata.manifest_keywords,
+ build_system_definition,
+ # pluggable_manifest_rule does the wrapping
+ metadata.unwrapped_constructor,
+ source_format=metadata.source_format,
+ )
+ self._auto_detectable_build_system(
+ manifest_keyword,
+ build_system_impl,
+ constructor=wrap_plugin_code(
+ self._plugin_name,
+ build_system_impl,
+ ),
+ shadowing_build_systems_when_active=metadata.auto_detection_shadow_build_systems,
+ )
+
+ def _auto_detectable_build_system(
+ self,
+ manifest_keyword: str,
+ rule_type: type[BSR],
+ *,
+ shadowing_build_systems_when_active: FrozenSet[str] = frozenset(),
+ constructor: Optional[
+ Callable[[BuildRuleParsedFormat, AttributePath, "HighLevelManifest"], BSR]
+ ] = None,
+ ) -> None:
+ self._restricted_api()
+ feature_set = self._feature_set
+ existing = feature_set.auto_detectable_build_systems.get(rule_type)
+ if existing is not None:
+ bs_name = rule_type.__class__.__name__
+ if existing.plugin_metadata.plugin_name == self._plugin_name:
+ message = (
+ f"Bug in the plugin {self._plugin_name}: It tried to register the"
+ f' auto-detection of the build system "{bs_name}" twice.'
+ )
+ else:
+ message = (
+ f"The plugins {existing.plugin_metadata.plugin_name} and {self._plugin_name}"
+ f' both tried to provide auto-detection of the build system "{bs_name}"'
+ )
+ raise PluginConflictError(
+ message, existing.plugin_metadata, self._plugin_metadata
+ )
+
+ if constructor is None:
+
+ def impl(
+ attributes: BuildRuleParsedFormat,
+ attribute_path: AttributePath,
+ manifest: "HighLevelManifest",
+ ) -> BSR:
+ return rule_type(attributes, attribute_path, manifest)
+
+ else:
+ impl = constructor
+
+ feature_set.auto_detectable_build_systems[rule_type] = (
+ PluginProvidedBuildSystemAutoDetection(
+ manifest_keyword,
+ rule_type,
+ wrap_plugin_code(self._plugin_name, rule_type.auto_detect_build_system),
+ impl,
+ shadowing_build_systems_when_active,
+ self._plugin_metadata,
+ )
+ )
+
+ def _unload() -> None:
+ try:
+ del feature_set.auto_detectable_build_systems[rule_type]
+ except KeyError:
+ pass
+
+ self._unloaders.append(_unload)
+
def known_packaging_files(
self,
packaging_file_details: KnownPackagingFileInfo,
@@ -981,6 +1122,7 @@ class DebputyPluginInitializerProvider(DebputyPluginInitializer):
message, existing.plugin_metadata, self._plugin_metadata
)
parser_generator = self._feature_set.manifest_parser_generator
+ # TODO: Wrap the mapper in the plugin context
mapped_types[target_type] = PluginProvidedTypeMapping(
type_mapping, reference_documentation, self._plugin_metadata
)
@@ -1437,6 +1579,10 @@ def load_plugin_features(
if plugin_metadata.plugin_name not in unloadable_plugins:
raise
if debug_mode:
+ _warn(
+ f"The optional plugin {plugin_metadata.plugin_name} failed during load. Re-raising due"
+ f" to --debug/-d."
+ )
raise
try:
api.unload_plugin()
@@ -1448,11 +1594,6 @@ def load_plugin_features(
)
raise e from None
else:
- if debug_mode:
- _warn(
- f"The optional plugin {plugin_metadata.plugin_name} failed during load. Re-raising due"
- f" to --debug/-d."
- )
_warn(
f"The optional plugin {plugin_metadata.plugin_name} failed during load. The plugin was"
f" deactivated. Use debug mode (--debug) to show the stacktrace (the warning will become an error)"
@@ -1635,7 +1776,7 @@ def _resolve_module_initializer(
)
sys.modules[module_name] = mod
try:
- loader.exec_module(mod)
+ run_in_context_of_plugin(plugin_name, loader.exec_module, mod)
except (Exception, GeneratorExit) as e:
raise PluginInitializationError(
f"Failed to load {plugin_name} (path: {module_fs_path})."
@@ -1645,7 +1786,9 @@ def _resolve_module_initializer(
if module is None:
try:
- module = importlib.import_module(module_name)
+ module = run_in_context_of_plugin(
+ plugin_name, importlib.import_module, module_name
+ )
except ModuleNotFoundError as e:
if module_fs_path is None:
raise PluginMetadataError(
@@ -1660,7 +1803,12 @@ def _resolve_module_initializer(
f' explicit "module" definition in {json_file_path}.'
) from e
- plugin_initializer = getattr(module, plugin_initializer_name)
+ plugin_initializer = run_in_context_of_plugin_wrap_errors(
+ plugin_name,
+ getattr,
+ module,
+ plugin_initializer_name,
+ )
if plugin_initializer is None:
raise PluginMetadataError(
diff --git a/src/debputy/plugin/api/impl_types.py b/src/debputy/plugin/api/impl_types.py
index 1a9bfdf..85beaf8 100644
--- a/src/debputy/plugin/api/impl_types.py
+++ b/src/debputy/plugin/api/impl_types.py
@@ -1,6 +1,5 @@
import dataclasses
import os.path
-import textwrap
from typing import (
Optional,
Callable,
@@ -24,22 +23,21 @@ from typing import (
Set,
Iterator,
Container,
+ Protocol,
)
from weakref import ref
-from debputy import DEBPUTY_DOC_ROOT_DIR
from debputy.exceptions import (
DebputyFSIsROError,
PluginAPIViolationError,
PluginConflictError,
UnhandledOrUnexpectedErrorFromPluginError,
+ PluginBaseError,
+ PluginInitializationError,
)
from debputy.filesystem_scan import as_path_def
-from debputy.installations import InstallRule
-from debputy.maintscript_snippet import DpkgMaintscriptHelperCommand
-from debputy.manifest_conditions import ManifestCondition
-from debputy.manifest_parser.base_types import DebputyParsedContent, TypeMapping
from debputy.manifest_parser.exceptions import ManifestParseException
+from debputy.manifest_parser.tagging_types import DebputyParsedContent, TypeMapping
from debputy.manifest_parser.util import AttributePath, check_integration_mode
from debputy.packages import BinaryPackage
from debputy.plugin.api import (
@@ -62,15 +60,16 @@ from debputy.plugin.api.spec import (
TypeMappingDocumentation,
DebputyIntegrationMode,
)
+from debputy.plugin.plugin_state import (
+ run_in_context_of_plugin,
+)
from debputy.substitution import VariableContext
-from debputy.transformation_rules import TransformationRule
from debputy.util import _normalize_path, package_cross_check_precheck
if TYPE_CHECKING:
from debputy.plugin.api.spec import (
ServiceDetector,
ServiceIntegrator,
- PackageTypeSelector,
)
from debputy.manifest_parser.parser_data import ParserContextData
from debputy.highlevel_manifest import (
@@ -78,10 +77,10 @@ if TYPE_CHECKING:
PackageTransformationDefinition,
BinaryPackageData,
)
-
-
-_PACKAGE_TYPE_DEB_ONLY = frozenset(["deb"])
-_ALL_PACKAGE_TYPES = frozenset(["deb", "udeb"])
+ from debputy.plugin.debputy.to_be_api_types import (
+ BuildSystemRule,
+ BuildRuleParsedFormat,
+ )
TD = TypeVar("TD", bound="Union[DebputyParsedContent, List[DebputyParsedContent]]")
@@ -89,26 +88,12 @@ PF = TypeVar("PF")
SF = TypeVar("SF")
TP = TypeVar("TP")
TTP = Type[TP]
+BSR = TypeVar("BSR", bound="BuildSystemRule")
DIPKWHandler = Callable[[str, AttributePath, "ParserContextData"], TP]
DIPHandler = Callable[[str, PF, AttributePath, "ParserContextData"], TP]
-def resolve_package_type_selectors(
- package_type: "PackageTypeSelector",
-) -> FrozenSet[str]:
- if package_type is _ALL_PACKAGE_TYPES or package_type is _PACKAGE_TYPE_DEB_ONLY:
- return cast("FrozenSet[str]", package_type)
- if isinstance(package_type, str):
- return (
- _PACKAGE_TYPE_DEB_ONLY
- if package_type == "deb"
- else frozenset([package_type])
- )
- else:
- return frozenset(package_type)
-
-
@dataclasses.dataclass(slots=True)
class DebputyPluginMetadata:
plugin_name: str
@@ -143,7 +128,17 @@ class DebputyPluginMetadata:
def load_plugin(self) -> None:
plugin_loader = self.plugin_loader
assert plugin_loader is not None
- self.plugin_initializer = plugin_loader()
+ try:
+ self.plugin_initializer = run_in_context_of_plugin(
+ self.plugin_name,
+ plugin_loader,
+ )
+ except PluginBaseError:
+ raise
+ except Exception as e:
+ raise PluginInitializationError(
+ f"Initialization of {self.plugin_name} failed due to its initializer raising an exception"
+ ) from e
assert self.plugin_initializer is not None
@@ -270,12 +265,10 @@ class MetadataOrMaintscriptDetector:
" this stage (file system layout is committed and the attempted changes"
" would be lost)."
) from e
- except (ChildProcessError, RuntimeError, AttributeError) as e:
- nv = f"{self.plugin_metadata.plugin_name}"
- raise UnhandledOrUnexpectedErrorFromPluginError(
- f"The plugin {nv} threw an unhandled or unexpected exception from its metadata"
- f" detector with id {self.detector_id}."
- ) from e
+ except UnhandledOrUnexpectedErrorFromPluginError as e:
+ e.add_note(
+ f"The exception was raised by the detector with the ID: {self.detector_id}"
+ )
class DeclarativeInputParser(Generic[TD]):
@@ -605,7 +598,7 @@ class DispatchingObjectParser(
)
remaining_valid_attribute_names = ", ".join(remaining_valid_attributes)
raise ManifestParseException(
- f'The attribute "{first_key}" is not applicable at {attribute_path.path}(with the current set'
+ f'The attribute "{first_key}" is not applicable at {attribute_path.path} (with the current set'
" of plugins). Possible attributes available (and not already used) are:"
f" {remaining_valid_attribute_names}.{doc_ref}"
)
@@ -615,7 +608,10 @@ class DispatchingObjectParser(
if value is None:
if isinstance(provided_parser.parser, DispatchingObjectParser):
provided_parser.handler(
- key, {}, attribute_path[key], parser_context
+ key,
+ {},
+ attribute_path[key],
+ parser_context,
)
continue
value_path = attribute_path[key]
@@ -774,64 +770,6 @@ class DeclarativeValuelessKeywordInputParser(DeclarativeInputParser[None]):
)
-SUPPORTED_DISPATCHABLE_TABLE_PARSERS = {
- InstallRule: "installations",
- TransformationRule: "packages.{{PACKAGE}}.transformations",
- DpkgMaintscriptHelperCommand: "packages.{{PACKAGE}}.conffile-management",
- ManifestCondition: "*.when",
-}
-
-OPARSER_MANIFEST_ROOT = "<ROOT>"
-OPARSER_PACKAGES_ROOT = "packages"
-OPARSER_PACKAGES = "packages.{{PACKAGE}}"
-OPARSER_MANIFEST_DEFINITIONS = "definitions"
-
-SUPPORTED_DISPATCHABLE_OBJECT_PARSERS = {
- OPARSER_MANIFEST_ROOT: reference_documentation(
- reference_documentation_url=f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md",
- ),
- OPARSER_MANIFEST_DEFINITIONS: reference_documentation(
- title="Packager provided definitions",
- description="Reusable packager provided definitions such as manifest variables.",
- reference_documentation_url=f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md#packager-provided-definitions",
- ),
- OPARSER_PACKAGES: reference_documentation(
- title="Binary package rules",
- description=textwrap.dedent(
- """\
- Inside the manifest, the `packages` mapping can be used to define requests for the binary packages
- you want `debputy` to produce. Each key inside `packages` must be the name of a binary package
- defined in `debian/control`. The value is a dictionary defining which features that `debputy`
- should apply to that binary package. An example could be:
-
- packages:
- foo:
- transformations:
- - create-symlink:
- path: usr/share/foo/my-first-symlink
- target: /usr/share/bar/symlink-target
- - create-symlink:
- path: usr/lib/{{DEB_HOST_MULTIARCH}}/my-second-symlink
- target: /usr/lib/{{DEB_HOST_MULTIARCH}}/baz/symlink-target
- bar:
- transformations:
- - create-directories:
- - some/empty/directory.d
- - another/empty/integration-point.d
- - create-directories:
- path: a/third-empty/directory.d
- owner: www-data
- group: www-data
-
- In this case, `debputy` will create some symlinks inside the `foo` package and some directories for
- the `bar` package. The following subsections define the keys you can use under each binary package.
- """
- ),
- reference_documentation_url=f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md#binary-package-rules",
- ),
-}
-
-
@dataclasses.dataclass(slots=True)
class PluginProvidedManifestVariable:
plugin_metadata: DebputyPluginMetadata
@@ -1214,6 +1152,11 @@ class PluginProvidedKnownPackagingFile:
plugin_metadata: DebputyPluginMetadata
+class BuildSystemAutoDetector(Protocol):
+
+ def __call__(self, source_root: VirtualPath, *args: Any, **kwargs: Any) -> bool: ...
+
+
@dataclasses.dataclass(slots=True, frozen=True)
class PluginProvidedTypeMapping:
mapped_type: TypeMapping[Any, Any]
@@ -1221,6 +1164,19 @@ class PluginProvidedTypeMapping:
plugin_metadata: DebputyPluginMetadata
+@dataclasses.dataclass(slots=True, frozen=True)
+class PluginProvidedBuildSystemAutoDetection(Generic[BSR]):
+ manifest_keyword: str
+ build_system_rule_type: Type[BSR]
+ detector: BuildSystemAutoDetector
+ constructor: Callable[
+ ["BuildRuleParsedFormat", AttributePath, "HighLevelManifest"],
+ BSR,
+ ]
+ auto_detection_shadow_build_systems: FrozenSet[str]
+ plugin_metadata: DebputyPluginMetadata
+
+
class PackageDataTable:
def __init__(self, package_data_table: Mapping[str, "BinaryPackageData"]) -> None:
self._package_data_table = package_data_table
diff --git a/src/debputy/plugin/api/parser_tables.py b/src/debputy/plugin/api/parser_tables.py
new file mode 100644
index 0000000..37d3e37
--- /dev/null
+++ b/src/debputy/plugin/api/parser_tables.py
@@ -0,0 +1,67 @@
+import textwrap
+
+from debputy import DEBPUTY_DOC_ROOT_DIR
+from debputy.installations import InstallRule
+from debputy.maintscript_snippet import DpkgMaintscriptHelperCommand
+from debputy.manifest_conditions import ManifestCondition
+from debputy.plugin.api import reference_documentation
+from debputy.plugin.debputy.to_be_api_types import BuildRule
+from debputy.transformation_rules import TransformationRule
+
+SUPPORTED_DISPATCHABLE_TABLE_PARSERS = {
+ InstallRule: "installations",
+ TransformationRule: "packages.{{PACKAGE}}.transformations",
+ DpkgMaintscriptHelperCommand: "packages.{{PACKAGE}}.conffile-management",
+ ManifestCondition: "*.when",
+ BuildRule: "builds",
+}
+
+OPARSER_MANIFEST_ROOT = "<ROOT>"
+OPARSER_PACKAGES_ROOT = "packages"
+OPARSER_PACKAGES = "packages.{{PACKAGE}}"
+OPARSER_MANIFEST_DEFINITIONS = "definitions"
+
+SUPPORTED_DISPATCHABLE_OBJECT_PARSERS = {
+ OPARSER_MANIFEST_ROOT: reference_documentation(
+ reference_documentation_url=f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md",
+ ),
+ OPARSER_MANIFEST_DEFINITIONS: reference_documentation(
+ title="Packager provided definitions",
+ description="Reusable packager provided definitions such as manifest variables.",
+ reference_documentation_url=f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md#packager-provided-definitions",
+ ),
+ OPARSER_PACKAGES: reference_documentation(
+ title="Binary package rules",
+ description=textwrap.dedent(
+ """\
+ Inside the manifest, the `packages` mapping can be used to define requests for the binary packages
+ you want `debputy` to produce. Each key inside `packages` must be the name of a binary package
+ defined in `debian/control`. The value is a dictionary defining which features that `debputy`
+ should apply to that binary package. An example could be:
+
+ packages:
+ foo:
+ transformations:
+ - create-symlink:
+ path: usr/share/foo/my-first-symlink
+ target: /usr/share/bar/symlink-target
+ - create-symlink:
+ path: usr/lib/{{DEB_HOST_MULTIARCH}}/my-second-symlink
+ target: /usr/lib/{{DEB_HOST_MULTIARCH}}/baz/symlink-target
+ bar:
+ transformations:
+ - create-directories:
+ - some/empty/directory.d
+ - another/empty/integration-point.d
+ - create-directories:
+ path: a/third-empty/directory.d
+ owner: www-data
+ group: www-data
+
+ In this case, `debputy` will create some symlinks inside the `foo` package and some directories for
+ the `bar` package. The following subsections define the keys you can use under each binary package.
+ """
+ ),
+ reference_documentation_url=f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md#binary-package-rules",
+ ),
+}
diff --git a/src/debputy/plugin/api/plugin_parser.py b/src/debputy/plugin/api/plugin_parser.py
index dd5c0d0..0e7954b 100644
--- a/src/debputy/plugin/api/plugin_parser.py
+++ b/src/debputy/plugin/api/plugin_parser.py
@@ -1,10 +1,10 @@
from typing import NotRequired, List, Any, TypedDict
-from debputy.manifest_parser.base_types import (
+from debputy.manifest_parser.tagging_types import (
DebputyParsedContent,
- OctalMode,
TypeMapping,
)
+from debputy.manifest_parser.base_types import OctalMode
from debputy.manifest_parser.declarative_parser import ParserGenerator
from debputy.plugin.api.impl_types import KnownPackagingFileInfo
diff --git a/src/debputy/plugin/api/spec.py b/src/debputy/plugin/api/spec.py
index b7f19c0..30308f9 100644
--- a/src/debputy/plugin/api/spec.py
+++ b/src/debputy/plugin/api/spec.py
@@ -25,6 +25,7 @@ from typing import (
Tuple,
get_args,
Container,
+ final,
)
from debian.substvars import Substvars
@@ -32,17 +33,23 @@ from debian.substvars import Substvars
from debputy import util
from debputy.exceptions import TestPathWithNonExistentFSPathError, PureVirtualPathError
from debputy.interpreter import Interpreter, extract_shebang_interpreter_from_file
+from debputy.manifest_parser.tagging_types import DebputyDispatchableType
from debputy.manifest_parser.util import parse_symbolic_mode
from debputy.packages import BinaryPackage
from debputy.types import S
if TYPE_CHECKING:
+ from debputy.plugin.debputy.to_be_api_types import BuildRule, BSR, BuildSystemRule
+ from debputy.plugin.api.impl_types import DIPHandler
from debputy.manifest_parser.base_types import (
StaticFileSystemOwner,
StaticFileSystemGroup,
)
+DP = TypeVar("DP", bound=DebputyDispatchableType)
+
+
PluginInitializationEntryPoint = Callable[["DebputyPluginInitializer"], None]
MetadataAutoDetector = Callable[
["VirtualPath", "BinaryCtrlAccessor", "PackageProcessingContext"], None
@@ -86,17 +93,20 @@ DebputyIntegrationMode = Literal[
"dh-sequence-zz-debputy-rrr",
]
+INTEGRATION_MODE_FULL: DebputyIntegrationMode = "full"
INTEGRATION_MODE_DH_DEBPUTY_RRR: DebputyIntegrationMode = "dh-sequence-zz-debputy-rrr"
INTEGRATION_MODE_DH_DEBPUTY: DebputyIntegrationMode = "dh-sequence-zz-debputy"
ALL_DEBPUTY_INTEGRATION_MODES: FrozenSet[DebputyIntegrationMode] = frozenset(
get_args(DebputyIntegrationMode)
)
+_DEBPUTY_DISPATCH_METADATA_ATTR_NAME = "_debputy_dispatch_metadata"
+
def only_integrations(
*integrations: DebputyIntegrationMode,
) -> Container[DebputyIntegrationMode]:
- return frozenset(*integrations)
+ return frozenset(integrations)
def not_integrations(
@@ -212,6 +222,27 @@ class PathDef:
materialized_content: Optional[str] = None
+@dataclasses.dataclass(slots=True, frozen=True)
+class DispatchablePluggableManifestRuleMetadata(Generic[DP]):
+ """NOT PUBLIC API (used internally by part of the public API)"""
+
+ manifest_keywords: Sequence[str]
+ dispatched_type: Type[DP]
+ unwrapped_constructor: "DIPHandler"
+ expected_debputy_integration_mode: Optional[Container[DebputyIntegrationMode]] = (
+ None
+ )
+ online_reference_documentation: Optional["ParserDocumentation"] = None
+ apply_standard_attribute_documentation: bool = False
+ source_format: Optional[Any] = None
+
+
+@dataclasses.dataclass(slots=True, frozen=True)
+class BuildSystemManifestRuleMetadata(DispatchablePluggableManifestRuleMetadata):
+ build_system_impl: Optional[Type["BuildSystemRule"]] = (None,)
+ auto_detection_shadow_build_systems: FrozenSet[str] = frozenset()
+
+
def virtual_path_def(
path_name: str,
/,
@@ -1507,6 +1538,16 @@ class ParserAttributeDocumentation:
attributes: FrozenSet[str]
description: Optional[str]
+ @property
+ def is_hidden(self) -> bool:
+ return False
+
+
+@final
+@dataclasses.dataclass(slots=True, frozen=True)
+class StandardParserAttributeDocumentation(ParserAttributeDocumentation):
+ sort_category: int = 0
+
def undocumented_attr(attr: str) -> ParserAttributeDocumentation:
"""Describe an attribute as undocumented
@@ -1514,6 +1555,8 @@ def undocumented_attr(attr: str) -> ParserAttributeDocumentation:
If you for some reason do not want to document a particular attribute, you can mark it as
undocumented. This is required if you are only documenting a subset of the attributes,
because `debputy` assumes any omission to be a mistake.
+
+ :param attr: Name of the attribute
"""
return ParserAttributeDocumentation(
frozenset({attr}),
diff --git a/src/debputy/plugin/api/std_docs.py b/src/debputy/plugin/api/std_docs.py
new file mode 100644
index 0000000..f07c307
--- /dev/null
+++ b/src/debputy/plugin/api/std_docs.py
@@ -0,0 +1,142 @@
+import textwrap
+from typing import Type, Sequence, Mapping, Container, Iterable, Any
+
+from debputy.manifest_parser.base_types import DebputyParsedContentStandardConditional
+from debputy.manifest_parser.tagging_types import DebputyParsedContent
+from debputy.plugin.api.spec import (
+ ParserAttributeDocumentation,
+ StandardParserAttributeDocumentation,
+)
+from debputy.plugin.debputy.to_be_api_types import (
+ OptionalInstallDirectly,
+ OptionalInSourceBuild,
+ OptionalBuildDirectory,
+ BuildRuleParsedFormat,
+)
+
+_STD_ATTR_DOCS: Mapping[
+ Type[DebputyParsedContent],
+ Sequence[ParserAttributeDocumentation],
+] = {
+ BuildRuleParsedFormat: [
+ StandardParserAttributeDocumentation(
+ frozenset(["name"]),
+ textwrap.dedent(
+ """\
+ The name of the build step.
+
+ The name is used for multiple things, such as:
+ 1) If you ever need to reference the build elsewhere, the name will be used.
+ 2) When `debputy` references the build in log output and error, it will use the name.
+ 3) It is used as defaults for when `debputy` derives build and `DESTDIR` directories
+ for the build.
+ """
+ ),
+ # Put in top,
+ sort_category=-1000,
+ ),
+ StandardParserAttributeDocumentation(
+ frozenset(["for_packages"]),
+ textwrap.dedent(
+ """\
+ Which package or packages this build step applies to.
+
+ Either a package name or a list of package names.
+ """
+ ),
+ ),
+ StandardParserAttributeDocumentation(
+ frozenset(["environment"]),
+ textwrap.dedent(
+ """\
+ Specify that this build step uses the named environment
+
+ If omitted, the default environment will be used. If no default environment is present,
+ then this option is mandatory.
+ """
+ ),
+ ),
+ ],
+ OptionalBuildDirectory: [
+ StandardParserAttributeDocumentation(
+ frozenset(["build_directory"]),
+ textwrap.dedent(
+ """\
+ The build directory to use for the build.
+
+ By default, `debputy` will derive a build directory automatically if the build system needs
+ it. However, it can be useful if you need to reference the directory name from other parts
+ of the manifest or want a "better" name than `debputy` comes up with.
+ """
+ ),
+ ),
+ ],
+ OptionalInSourceBuild: [
+ StandardParserAttributeDocumentation(
+ frozenset(["perform_in_source_build"]),
+ textwrap.dedent(
+ """\
+ Whether the build system should use "in source" or "out of source" build.
+
+ This is mostly useful for forcing "in source" builds for build systems that default to
+ "out of source" builds like `autoconf`.
+
+ The default depends on the build system and the value of the `build-directory` attribute
+ (if supported by the build system).
+ """
+ ),
+ # Late
+ sort_category=500,
+ ),
+ ],
+ OptionalInstallDirectly: [
+ StandardParserAttributeDocumentation(
+ frozenset(["install_directly_to_package"]),
+ textwrap.dedent(
+ """\
+ Whether the build system should install all upstream content directly into the package.
+
+ This option is mostly useful for disabling said behavior by setting the attribute to `false`.
+ The attribute conditionally defaults to `true` when the build only applies to one package.
+ If explicitly set to `true`, then this build step must apply to exactly one package (usually
+ implying that `for` is set to that package when the source builds multiple packages).
+
+ When `true`, this behaves similar to `dh_auto_install --destdir=debian/PACKAGE`.
+ """
+ ),
+ ),
+ ],
+ DebputyParsedContentStandardConditional: [
+ StandardParserAttributeDocumentation(
+ frozenset(["when"]),
+ textwrap.dedent(
+ """\
+ A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules).
+
+ The conditional will disable the entire rule when the conditional evaluates to false.
+ """
+ ),
+ # Last
+ sort_category=9999,
+ ),
+ ],
+}
+
+
+def docs_from(
+ *ts: Any,
+ exclude_attributes: Container[str] = frozenset(),
+) -> Iterable[ParserAttributeDocumentation]:
+ """Provide standard attribute documentation from existing types
+
+ This is a work-around for `apply_standard_attribute_documentation` requiring python3.12.
+ If you can assume python3.12, use `apply_standard_attribute_documentation` instead.
+ """
+ for t in ts:
+ attrs = _STD_ATTR_DOCS.get(t)
+ if attrs is None:
+ raise ValueError(f"No standard documentation for {str(t)}")
+ for attr in attrs:
+ if any(a in exclude_attributes for a in attrs):
+ continue
+ yield attr
diff --git a/src/debputy/plugin/debputy/binary_package_rules.py b/src/debputy/plugin/debputy/binary_package_rules.py
index 98da763..45547b9 100644
--- a/src/debputy/plugin/debputy/binary_package_rules.py
+++ b/src/debputy/plugin/debputy/binary_package_rules.py
@@ -17,14 +17,10 @@ from typing import (
from debputy import DEBPUTY_DOC_ROOT_DIR
from debputy.maintscript_snippet import DpkgMaintscriptHelperCommand, MaintscriptSnippet
-from debputy.manifest_parser.base_types import (
- DebputyParsedContent,
- FileSystemExactMatchRule,
-)
-from debputy.manifest_parser.declarative_parser import (
- DebputyParseHint,
- ParserGenerator,
-)
+from debputy.manifest_parser.base_types import FileSystemExactMatchRule
+from debputy.manifest_parser.tagging_types import DebputyParsedContent
+from debputy.manifest_parser.parse_hints import DebputyParseHint
+from debputy.manifest_parser.declarative_parser import ParserGenerator
from debputy.manifest_parser.exceptions import ManifestParseException
from debputy.manifest_parser.parser_data import ParserContextData
from debputy.manifest_parser.util import AttributePath
@@ -34,7 +30,7 @@ from debputy.plugin.api.impl import (
DebputyPluginInitializerProvider,
ServiceDefinitionImpl,
)
-from debputy.plugin.api.impl_types import OPARSER_PACKAGES
+from debputy.plugin.api.parser_tables import OPARSER_PACKAGES
from debputy.plugin.api.spec import (
ServiceUpgradeRule,
ServiceDefinition,
diff --git a/src/debputy/plugin/debputy/build_system_rules.py b/src/debputy/plugin/debputy/build_system_rules.py
new file mode 100644
index 0000000..b7ee898
--- /dev/null
+++ b/src/debputy/plugin/debputy/build_system_rules.py
@@ -0,0 +1,2319 @@
+import dataclasses
+import json
+import os
+import subprocess
+import textwrap
+from typing import (
+ NotRequired,
+ TypedDict,
+ Self,
+ cast,
+ Dict,
+ Mapping,
+ Sequence,
+ MutableMapping,
+ Iterable,
+ Container,
+ List,
+ Tuple,
+ Union,
+ Optional,
+ TYPE_CHECKING,
+ Literal,
+)
+
+from debian.debian_support import Version
+
+from debputy import DEBPUTY_DOC_ROOT_DIR
+from debputy._manifest_constants import MK_BUILDS
+from debputy.manifest_parser.base_types import (
+ BuildEnvironmentDefinition,
+ DebputyParsedContentStandardConditional,
+ FileSystemExactMatchRule,
+)
+from debputy.manifest_parser.exceptions import (
+ ManifestParseException,
+ ManifestInvalidUserDataException,
+)
+from debputy.manifest_parser.parser_data import ParserContextData
+from debputy.manifest_parser.util import AttributePath
+from debputy.plugin.api import reference_documentation
+from debputy.plugin.api.impl import (
+ DebputyPluginInitializerProvider,
+)
+from debputy.plugin.api.parser_tables import OPARSER_MANIFEST_ROOT
+from debputy.plugin.api.spec import (
+ documented_attr,
+ INTEGRATION_MODE_FULL,
+ only_integrations,
+ VirtualPath,
+)
+from debputy.plugin.api.std_docs import docs_from
+from debputy.plugin.debputy.to_be_api_types import (
+ BuildRule,
+ StepBasedBuildSystemRule,
+ OptionalInstallDirectly,
+ BuildSystemCharacteristics,
+ OptionalBuildDirectory,
+ OptionalInSourceBuild,
+ MakefileSupport,
+ BuildRuleParsedFormat,
+ debputy_build_system,
+ CleanHelper,
+ NinjaBuildSupport,
+)
+from debputy.types import EnvironmentModification
+from debputy.util import (
+ _warn,
+ run_build_system_command,
+ _error,
+ PerlConfigVars,
+ resolve_perl_config,
+ generated_content_dir,
+)
+
+if TYPE_CHECKING:
+ from debputy.build_support.build_context import BuildContext
+ from debputy.highlevel_manifest import HighLevelManifest
+
+
+PERL_CMD = "perl"
+
+
+def register_build_system_rules(api: DebputyPluginInitializerProvider) -> None:
+ register_build_keywords(api)
+ register_build_rules(api)
+
+
+def register_build_keywords(api: DebputyPluginInitializerProvider) -> None:
+
+ api.pluggable_manifest_rule(
+ OPARSER_MANIFEST_ROOT,
+ "build-environments",
+ List[NamedEnvironmentSourceFormat],
+ _parse_build_environments,
+ expected_debputy_integration_mode=only_integrations(INTEGRATION_MODE_FULL),
+ inline_reference_documentation=reference_documentation(
+ title="Build Environments (`build-environments`)",
+ description=textwrap.dedent(
+ """\
+ Define named environments to set the environment for any build commands that needs
+ a non-default environment.
+
+ The environment definitions can be used to tweak the environment variables used by the
+ build commands. An example:
+
+ build-environments:
+ - name: custom-env
+ set:
+ ENV_VAR: foo
+ ANOTHER_ENV_VAR: bar
+ builds:
+ - autoconf:
+ environment: custom-env
+
+ The environment definition has multiple attributes for setting environment variables
+ which determines when the definition is applied. The resulting environment is the
+ result of the following order of operations.
+
+ 1. The environment `debputy` received from its parent process.
+ 2. Apply all the variable definitions from `set` (if the attribute is present)
+ 3. Apply all computed variables (such as variables from `dpkg-buildflags`).
+ 4. Apply all the variable definitions from `override` (if the attribute is present)
+ 5. Remove all variables listed in `unset` (if the attribute is present).
+
+ Accordingly, both `override` and `unset` will overrule any computed variables while
+ `set` will be overruled by any computed variables.
+
+ Note that these variables are not available via manifest substitution (they are only
+ visible to build commands). They are only available to build commands.
+ """
+ ),
+ attributes=[
+ documented_attr(
+ "name",
+ textwrap.dedent(
+ """\
+ The name of the environment
+
+ The name is used to reference the environment from build rules.
+ """
+ ),
+ ),
+ documented_attr(
+ "set",
+ textwrap.dedent(
+ """\
+ A mapping of environment variables to be set.
+
+ Note these environment variables are set before computed variables (such
+ as `dpkg-buildflags`) are provided. They can affect the content of the
+ computed variables, but they cannot overrule them. If you need to overrule
+ a computed variable, please use `override` instead.
+ """
+ ),
+ ),
+ documented_attr(
+ "override",
+ textwrap.dedent(
+ """\
+ A mapping of environment variables to set.
+
+ Similar to `set`, but it can overrule computed variables like those from
+ `dpkg-buildflags`.
+ """
+ ),
+ ),
+ documented_attr(
+ "unset",
+ textwrap.dedent(
+ """\
+ A list of environment variables to unset.
+
+ Any environment variable named here will be unset. No warnings or errors
+ will be raised if a given variable was not set.
+ """
+ ),
+ ),
+ ],
+ reference_documentation_url=f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md#build-environment-build-environment",
+ ),
+ )
+ api.pluggable_manifest_rule(
+ OPARSER_MANIFEST_ROOT,
+ "default-build-environment",
+ EnvironmentSourceFormat,
+ _parse_default_environment,
+ expected_debputy_integration_mode=only_integrations(INTEGRATION_MODE_FULL),
+ inline_reference_documentation=reference_documentation(
+ title="Default Build Environment (`default-build-environment`)",
+ description=textwrap.dedent(
+ """\
+ Define the environment variables used in all build commands that uses the default
+ environment.
+
+ The environment definition can be used to tweak the environment variables used by the
+ build commands. An example:
+
+ default-build-environment:
+ set:
+ ENV_VAR: foo
+ ANOTHER_ENV_VAR: bar
+
+ The environment definition has multiple attributes for setting environment variables
+ which determines when the definition is applied. The resulting environment is the
+ result of the following order of operations.
+
+ 1. The environment `debputy` received from its parent process.
+ 2. Apply all the variable definitions from `set` (if the attribute is present)
+ 3. Apply all computed variables (such as variables from `dpkg-buildflags`).
+ 4. Apply all the variable definitions from `override` (if the attribute is present)
+ 5. Remove all variables listed in `unset` (if the attribute is present).
+
+ Accordingly, both `override` and `unset` will overrule any computed variables while
+ `set` will be overruled by any computed variables.
+
+ Note that these variables are not available via manifest substitution (they are only
+ visible to build commands). They are only available to build commands.
+ """
+ ),
+ attributes=[
+ documented_attr(
+ "set",
+ textwrap.dedent(
+ """\
+ A mapping of environment variables to be set.
+
+ Note these environment variables are set before computed variables (such
+ as `dpkg-buildflags`) are provided. They can affect the content of the
+ computed variables, but they cannot overrule them. If you need to overrule
+ a computed variable, please use `override` instead.
+ """
+ ),
+ ),
+ documented_attr(
+ "override",
+ textwrap.dedent(
+ """\
+ A mapping of environment variables to set.
+
+ Similar to `set`, but it can overrule computed variables like those from
+ `dpkg-buildflags`.
+ """
+ ),
+ ),
+ documented_attr(
+ "unset",
+ textwrap.dedent(
+ """\
+ A list of environment variables to unset.
+
+ Any environment variable named here will be unset. No warnings or errors
+ will be raised if a given variable was not set.
+ """
+ ),
+ ),
+ ],
+ reference_documentation_url=f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md#build-environment-build-environment",
+ ),
+ )
+ api.pluggable_manifest_rule(
+ OPARSER_MANIFEST_ROOT,
+ MK_BUILDS,
+ List[BuildRule],
+ _handle_build_rules,
+ expected_debputy_integration_mode=only_integrations(INTEGRATION_MODE_FULL),
+ inline_reference_documentation=reference_documentation(
+ title="Build rules",
+ description=textwrap.dedent(
+ """\
+ Define how to build the upstream part of the package. Usually this is done via "build systems",
+ which also defines the clean rules.
+ """
+ ),
+ ),
+ )
+
+
+def register_build_rules(api: DebputyPluginInitializerProvider) -> None:
+ api.register_build_system(ParsedAutoconfBuildRuleDefinition)
+ api.register_build_system(ParsedMakeBuildRuleDefinition)
+
+ api.register_build_system(ParsedPerlBuildBuildRuleDefinition)
+ api.register_build_system(ParsedPerlMakeMakerBuildRuleDefinition)
+ api.register_build_system(ParsedDebhelperBuildRuleDefinition)
+
+ api.register_build_system(ParsedCMakeBuildRuleDefinition)
+ api.register_build_system(ParsedMesonBuildRuleDefinition)
+
+ api.register_build_system(ParsedQmakeBuildRuleDefinition)
+ api.register_build_system(ParsedQmake6BuildRuleDefinition)
+
+
+class EnvironmentSourceFormat(TypedDict):
+ set: NotRequired[Dict[str, str]]
+ override: NotRequired[Dict[str, str]]
+ unset: NotRequired[List[str]]
+
+
+class NamedEnvironmentSourceFormat(EnvironmentSourceFormat):
+ name: str
+
+
+_READ_ONLY_ENV_VARS = {
+ "DEB_CHECK_COMMAND": None,
+ "DEB_SIGN_KEYID": None,
+ "DEB_SIGN_KEYFILE": None,
+ "DEB_BUILD_OPTIONS": "DEB_BUILD_MAINT_OPTIONS",
+ "DEB_BUILD_PROFILES": None,
+ "DEB_RULES_REQUIRES_ROOT": None,
+ "DEB_GAIN_ROOT_COMMAND": None,
+ "DH_EXTRA_ADDONS": None,
+ "DH_NO_ACT": None,
+}
+
+
+def _check_variables(
+ env_vars: Iterable[str],
+ attribute_path: AttributePath,
+) -> None:
+ for env_var in env_vars:
+ if env_var not in _READ_ONLY_ENV_VARS:
+ continue
+ alt = _READ_ONLY_ENV_VARS.get(env_var)
+ var_path = attribute_path[env_var].path_key_lc
+ if alt is None:
+ raise ManifestParseException(
+ f"The variable {env_var} cannot be modified by the manifest. This restriction is generally"
+ f" because the build should not touch those variables or changing them have no effect"
+ f" (since the consumer will not see the change). The problematic definition was {var_path}"
+ )
+ else:
+ raise ManifestParseException(
+ f"The variable {env_var} cannot be modified by the manifest. This restriction is generally"
+ f" because the build should not touch those variables or changing them have no effect"
+ f" (since the consumer will not see the change). Depending on what you are trying to"
+ f' accomplish, the variable "{alt}" might be a suitable alternative.'
+ f" The problematic definition was {var_path}"
+ )
+
+
+def _no_overlap(
+ lhs: Iterable[Union[str, Tuple[int, str]]],
+ rhs: Container[str],
+ lhs_key: str,
+ rhs_key: str,
+ redundant_key: str,
+ attribute_path: AttributePath,
+) -> None:
+ for kt in lhs:
+ if isinstance(kt, tuple):
+ lhs_path_key, var = kt
+ else:
+ lhs_path_key = var = kt
+ if var not in rhs:
+ continue
+ lhs_path = attribute_path[lhs_key][lhs_path_key].path_key_lc
+ rhs_path = attribute_path[rhs_key][var].path_key_lc
+ r_path = lhs_path if redundant_key == rhs_key else rhs_path
+ raise ManifestParseException(
+ f"The environment variable {var} was declared in {lhs_path} and {rhs_path}."
+ f" Due to how the variables are applied, the definition in {r_path} is redundant"
+ f" and can effectively be removed. Please review the manifest and remove one of"
+ f" the two definitions."
+ )
+
+
+@dataclasses.dataclass(slots=True, frozen=True)
+class ManifestProvidedBuildEnvironment(BuildEnvironmentDefinition):
+
+ name: str
+ is_default: bool
+ attribute_path: AttributePath
+ parser_context: ParserContextData
+
+ set_vars: Mapping[str, str]
+ override_vars: Mapping[str, str]
+ unset_vars: Sequence[str]
+
+ @classmethod
+ def from_environment_definition(
+ cls,
+ env: EnvironmentSourceFormat,
+ attribute_path: AttributePath,
+ parser_context: ParserContextData,
+ is_default: bool = False,
+ ) -> Self:
+ reference_name: Optional[str]
+ if is_default:
+ name = "default-env"
+ reference_name = None
+ else:
+ named_env = cast("NamedEnvironmentSourceFormat", env)
+ name = named_env["name"]
+ reference_name = name
+
+ set_vars = env.get("set", {})
+ override_vars = env.get("override", {})
+ unset_vars = env.get("unset", [])
+ _check_variables(set_vars, attribute_path["set"])
+ _check_variables(override_vars, attribute_path["override"])
+ _check_variables(unset_vars, attribute_path["unset"])
+
+ if not set_vars and not override_vars and not unset_vars:
+ raise ManifestParseException(
+ f"The environment definition {attribute_path.path_key_lc} was empty. Please provide"
+ " some content or delete the definition."
+ )
+
+ _no_overlap(
+ enumerate(unset_vars),
+ set_vars,
+ "unset",
+ "set",
+ "set",
+ attribute_path,
+ )
+ _no_overlap(
+ enumerate(unset_vars),
+ override_vars,
+ "unset",
+ "override",
+ "override",
+ attribute_path,
+ )
+ _no_overlap(
+ override_vars,
+ set_vars,
+ "override",
+ "set",
+ "set",
+ attribute_path,
+ )
+
+ r = cls(
+ name,
+ is_default,
+ attribute_path,
+ parser_context,
+ set_vars,
+ override_vars,
+ unset_vars,
+ )
+ parser_context._register_build_environment(
+ reference_name,
+ r,
+ attribute_path,
+ is_default,
+ )
+
+ return r
+
+ def update_env(self, env: MutableMapping[str, str]) -> None:
+ if set_vars := self.set_vars:
+ env.update(set_vars)
+ dpkg_env = self.dpkg_buildflags_env(env, self.attribute_path.path_key_lc)
+ self.log_computed_env(f"dpkg-buildflags [{self.name}]", dpkg_env)
+ if overlapping_env := dpkg_env.keys() & set_vars.keys():
+ for var in overlapping_env:
+ key_lc = self.attribute_path["set"][var].path_key_lc
+ _warn(
+ f'The variable "{var}" defined at {key_lc} is shadowed by a computed variable.'
+ f" If the manifest definition is more important, please define it via `override` rather than"
+ f" `set`."
+ )
+ env.update(dpkg_env)
+ if override_vars := self.override_vars:
+ env.update(override_vars)
+ if unset_vars := self.unset_vars:
+ for var in unset_vars:
+ try:
+ del env[var]
+ except KeyError:
+ pass
+
+
+_MAKE_DEFAULT_TOOLS = [
+ ("CC", "gcc"),
+ ("CXX", "g++"),
+ ("PKG_CONFIG", "pkg-config"),
+]
+
+
+class MakefileBuildSystemRule(StepBasedBuildSystemRule):
+
+ __slots__ = ("_make_support", "_build_target", "_install_target", "_directory")
+
+ def __init__(
+ self,
+ attributes: "ParsedMakeBuildRuleDefinition",
+ attribute_path: AttributePath,
+ parser_context: Union[ParserContextData, "HighLevelManifest"],
+ ) -> None:
+ super().__init__(attributes, attribute_path, parser_context)
+ directory = attributes.get("directory")
+ if directory is not None:
+ self._directory = directory.match_rule.path
+ else:
+ self._directory = None
+ self._make_support = MakefileSupport.from_build_system(self)
+ self._build_target = attributes.get("build_target")
+ self._test_target = attributes.get("test_target")
+ self._install_target = attributes.get("install_target")
+
+ @classmethod
+ def auto_detect_build_system(
+ cls,
+ source_root: VirtualPath,
+ *args,
+ **kwargs,
+ ) -> bool:
+ return any(p in source_root for p in ("Makefile", "makefile", "GNUmakefile"))
+
+ @classmethod
+ def characteristics(cls) -> BuildSystemCharacteristics:
+ return BuildSystemCharacteristics(
+ out_of_source_builds="not-supported",
+ )
+
+ def configure_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ # No configure step
+ pass
+
+ def build_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ extra_vars = []
+ build_target = self._build_target
+ if build_target is not None:
+ extra_vars.append(build_target)
+ if context.is_cross_compiling:
+ for envvar, tool in _MAKE_DEFAULT_TOOLS:
+ cross_tool = os.environ.get(envvar)
+ if cross_tool is None:
+ cross_tool = context.cross_tool(tool)
+ extra_vars.append(f"{envvar}={cross_tool}")
+ self._make_support.run_make(
+ context,
+ *extra_vars,
+ "INSTALL=install --strip-program=true",
+ directory=self._directory,
+ )
+
+ def test_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ self._run_make_maybe_explicit_target(
+ context,
+ self._test_target,
+ ["test", "check"],
+ )
+
+ def install_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ dest_dir: str,
+ **kwargs,
+ ) -> None:
+ self._run_make_maybe_explicit_target(
+ context,
+ self._install_target,
+ ["install"],
+ f"DESTDIR={dest_dir}",
+ "AM_UPDATE_INFO_DIR=no",
+ "INSTALL=install --strip-program=true",
+ )
+
+ def _run_make_maybe_explicit_target(
+ self,
+ context: "BuildContext",
+ provided_target: Optional[str],
+ fallback_targets: Sequence[str],
+ *make_args: str,
+ ) -> None:
+ make_support = self._make_support
+ if provided_target is not None:
+ make_support.run_make(
+ context,
+ provided_target,
+ *make_args,
+ directory=self._directory,
+ )
+ else:
+ make_support.run_first_existing_target_if_any(
+ context,
+ fallback_targets,
+ *make_args,
+ directory=self._directory,
+ )
+
+ def clean_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ clean_helper: "CleanHelper",
+ **kwargs,
+ ) -> None:
+ self._make_support.run_first_existing_target_if_any(
+ context,
+ ["distclean", "realclean", "clean"],
+ )
+
+
+class PerlBuildBuildSystemRule(StepBasedBuildSystemRule):
+
+ __slots__ = "configure_args"
+
+ def __init__(
+ self,
+ attributes: "ParsedPerlBuildBuildRuleDefinition",
+ attribute_path: AttributePath,
+ parser_context: Union[ParserContextData, "HighLevelManifest"],
+ ) -> None:
+ super().__init__(attributes, attribute_path, parser_context)
+ self.configure_args = attributes.get("configure_args", [])
+
+ @classmethod
+ def auto_detect_build_system(
+ cls,
+ source_root: VirtualPath,
+ *args,
+ **kwargs,
+ ) -> bool:
+ return "Build.PL" in source_root
+
+ @classmethod
+ def characteristics(cls) -> BuildSystemCharacteristics:
+ return BuildSystemCharacteristics(
+ out_of_source_builds="not-supported",
+ )
+
+ @staticmethod
+ def _perl_cross_build_env(
+ context: "BuildContext",
+ ) -> Tuple[PerlConfigVars, Optional[EnvironmentModification]]:
+ perl_config_data = resolve_perl_config(
+ context.dpkg_architecture_variables,
+ None,
+ )
+ if context.is_cross_compiling:
+ perl5lib_dir = perl_config_data.cross_inc_dir
+ if perl5lib_dir is not None:
+ env_perl5lib = os.environ.get("PERL5LIB")
+ if env_perl5lib is not None:
+ perl5lib_dir = (
+ perl5lib_dir + perl_config_data.path_sep + env_perl5lib
+ )
+ env_mod = EnvironmentModification(
+ replacements=[
+ ("PERL5LIB", perl5lib_dir),
+ ],
+ )
+ return perl_config_data, env_mod
+ return perl_config_data, None
+
+ def configure_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ perl_config_data, cross_env_mod = self._perl_cross_build_env(context)
+ configure_env = EnvironmentModification(
+ replacements=[("PERL_MM_USE_DEFAULT", "1")]
+ )
+ if cross_env_mod is not None:
+ configure_env = configure_env.combine(cross_env_mod)
+
+ configure_cmd = [
+ PERL_CMD,
+ "Build.PL",
+ "--installdirs",
+ "vendor",
+ ]
+ cflags = os.environ.get("CFLAGS", "")
+ cppflags = os.environ.get("CPPFLAGS", "")
+ ldflags = os.environ.get("LDFLAGS", "")
+
+ if cflags != "" or cppflags != "":
+ configure_cmd.append("--config")
+ combined = f"{cflags} {cppflags}".strip()
+ configure_cmd.append(f"optimize={combined}")
+
+ if ldflags != "" or cflags != "" or context.is_cross_compiling:
+ configure_cmd.append("--config")
+ combined = f"{perl_config_data.ld} {cflags} {ldflags}".strip()
+ configure_cmd.append(f"ld={combined}")
+ if self.configure_args:
+ substitution = self.substitution
+ attr_path = self.attribute_path["configure_args"]
+ configure_cmd.extend(
+ substitution.substitute(v, attr_path[i].path)
+ for i, v in enumerate(self.configure_args)
+ )
+ run_build_system_command(*configure_cmd, env_mod=configure_env)
+
+ def build_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ _, cross_env_mod = self._perl_cross_build_env(context)
+ run_build_system_command(PERL_CMD, "Build", env_mod=cross_env_mod)
+
+ def test_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ _, cross_env_mod = self._perl_cross_build_env(context)
+ run_build_system_command(
+ PERL_CMD,
+ "Build",
+ "test",
+ "--verbose",
+ "1",
+ env_mod=cross_env_mod,
+ )
+
+ def install_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ dest_dir: str,
+ **kwargs,
+ ) -> None:
+ _, cross_env_mod = self._perl_cross_build_env(context)
+ run_build_system_command(
+ PERL_CMD,
+ "Build",
+ "install",
+ "--destdir",
+ dest_dir,
+ "--create_packlist",
+ "0",
+ env_mod=cross_env_mod,
+ )
+
+ def clean_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ clean_helper: "CleanHelper",
+ **kwargs,
+ ) -> None:
+ _, cross_env_mod = self._perl_cross_build_env(context)
+ if os.path.lexists("Build"):
+ run_build_system_command(
+ PERL_CMD,
+ "Build",
+ "realclean",
+ "--allow_mb_mismatch",
+ "1",
+ env_mod=cross_env_mod,
+ )
+
+
+class PerlMakeMakerBuildSystemRule(StepBasedBuildSystemRule):
+
+ __slots__ = ("configure_args", "_make_support")
+
+ def __init__(
+ self,
+ attributes: "ParsedPerlBuildBuildRuleDefinition",
+ attribute_path: AttributePath,
+ parser_context: Union[ParserContextData, "HighLevelManifest"],
+ ) -> None:
+ super().__init__(attributes, attribute_path, parser_context)
+ self.configure_args = attributes.get("configure_args", [])
+ self._make_support = MakefileSupport.from_build_system(self)
+
+ @classmethod
+ def auto_detect_build_system(
+ cls,
+ source_root: VirtualPath,
+ *args,
+ **kwargs,
+ ) -> bool:
+ return "Makefile.PL" in source_root
+
+ @classmethod
+ def characteristics(cls) -> BuildSystemCharacteristics:
+ return BuildSystemCharacteristics(
+ out_of_source_builds="not-supported",
+ )
+
+ def configure_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ configure_env = EnvironmentModification(
+ replacements=[
+ ("PERL_MM_USE_DEFAULT", "1"),
+ ("PERL_AUTOINSTALL", "--skipdeps"),
+ ]
+ )
+ perl_args = []
+ mm_args = ["INSTALLDIRS=vendor"]
+ if "CFLAGS" in os.environ:
+ mm_args.append(
+ f"OPTIMIZE={os.environ['CFLAGS']} {os.environ['CPPFLAGS']}".rstrip()
+ )
+
+ perl_config_data = resolve_perl_config(
+ context.dpkg_architecture_variables,
+ None,
+ )
+
+ if "LDFLAGS" in os.environ:
+ mm_args.append(
+ f"LD={perl_config_data.ld} {os.environ['CFLAGS']} {os.environ['LDFLAGS']}"
+ )
+
+ if context.is_cross_compiling:
+ perl5lib_dir = perl_config_data.cross_inc_dir
+ if perl5lib_dir is not None:
+ perl_args.append(f"-I{perl5lib_dir}")
+
+ if self.configure_args:
+ substitution = self.substitution
+ attr_path = self.attribute_path["configure_args"]
+ mm_args.extend(
+ substitution.substitute(v, attr_path[i].path)
+ for i, v in enumerate(self.configure_args)
+ )
+ run_build_system_command(
+ PERL_CMD,
+ *perl_args,
+ "Makefile.PL",
+ *mm_args,
+ env_mod=configure_env,
+ )
+
+ def build_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ self._make_support.run_make(context)
+
+ def test_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ self._make_support.run_first_existing_target_if_any(
+ context,
+ ["check", "test"],
+ "TEST_VERBOSE=1",
+ )
+
+ def install_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ dest_dir: str,
+ **kwargs,
+ ) -> None:
+ is_mm_makefile = False
+ with open("Makefile", "rb") as fd:
+ for line in fd:
+ if b"generated automatically by MakeMaker" in line:
+ is_mm_makefile = True
+ break
+
+ install_args = [f"DESTDIR={dest_dir}"]
+
+ # Special case for Makefile.PL that uses
+ # Module::Build::Compat. PREFIX should not be passed
+ # for those; it already installs into /usr by default.
+ if is_mm_makefile:
+ install_args.append("PREFIX=/usr")
+
+ self._make_support.run_first_existing_target_if_any(
+ context,
+ ["install"],
+ *install_args,
+ )
+
+ def clean_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ clean_helper: "CleanHelper",
+ **kwargs,
+ ) -> None:
+ self._make_support.run_first_existing_target_if_any(
+ context,
+ ["distclean", "realclean", "clean"],
+ )
+
+
+class DebhelperBuildSystemRule(StepBasedBuildSystemRule):
+
+ __slots__ = ("configure_args", "dh_build_system")
+
+ def __init__(
+ self,
+ parsed_data: "ParsedDebhelperBuildRuleDefinition",
+ attribute_path: AttributePath,
+ parser_context: Union[ParserContextData, "HighLevelManifest"],
+ ) -> None:
+ super().__init__(parsed_data, attribute_path, parser_context)
+ self.configure_args = parsed_data.get("configure_args", [])
+ self.dh_build_system = parsed_data.get("dh_build_system")
+
+ @classmethod
+ def auto_detect_build_system(
+ cls,
+ source_root: VirtualPath,
+ *args,
+ **kwargs,
+ ) -> bool:
+ try:
+ v = subprocess.check_output(
+ ["dh_assistant", "which-build-system"],
+ cwd=source_root.fs_path,
+ )
+ except subprocess.CalledProcessError:
+ return False
+ else:
+ d = json.loads(v)
+ build_system = d.get("build-system")
+ return build_system is not None
+
+ @classmethod
+ def characteristics(cls) -> BuildSystemCharacteristics:
+ return BuildSystemCharacteristics(
+ out_of_source_builds="supported-but-not-default",
+ )
+
+ def before_first_impl_step(
+ self, *, stage: Literal["build", "clean"], **kwargs
+ ) -> None:
+ dh_build_system = self.dh_build_system
+ if dh_build_system is None:
+ return
+ try:
+ subprocess.check_call(
+ ["dh_assistant", "which-build-system", f"-S{dh_build_system}"]
+ )
+ except FileNotFoundError:
+ _error(
+ "The debhelper build system assumes `dh_assistant` is available (`debhelper (>= 13.5~)`)"
+ )
+ except subprocess.SubprocessError:
+ raise ManifestInvalidUserDataException(
+ f'The debhelper build system "{dh_build_system}" does not seem to'
+ f" be available according to"
+ f" `dh_assistant which-build-system -S{dh_build_system}`"
+ ) from None
+
+ def _default_options(self) -> List[str]:
+ default_options = []
+ if self.dh_build_system is not None:
+ default_options.append(f"-S{self.dh_build_system}")
+ if self.build_directory is not None:
+ default_options.append(f"-B{self.build_directory}")
+
+ return default_options
+
+ def configure_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ if (
+ os.path.lexists("configure.ac") or os.path.lexists("configure.in")
+ ) and not os.path.lexists("debian/autoreconf.before"):
+ run_build_system_command("dh_update_autotools_config")
+ run_build_system_command("dh_autoreconf")
+
+ default_options = self._default_options()
+ configure_args = default_options.copy()
+ if self.configure_args:
+ configure_args.append("--")
+ substitution = self.substitution
+ attr_path = self.attribute_path["configure_args"]
+ configure_args.extend(
+ substitution.substitute(v, attr_path[i].path)
+ for i, v in enumerate(self.configure_args)
+ )
+ run_build_system_command("dh_auto_configure", *configure_args)
+
+ def build_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ default_options = self._default_options()
+ run_build_system_command("dh_auto_build", *default_options)
+
+ def test_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ default_options = self._default_options()
+ run_build_system_command("dh_auto_test", *default_options)
+
+ def install_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ dest_dir: str,
+ **kwargs,
+ ) -> None:
+ default_options = self._default_options()
+ run_build_system_command(
+ "dh_auto_install",
+ *default_options,
+ f"--destdir={dest_dir}",
+ )
+
+ def clean_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ clean_helper: "CleanHelper",
+ **kwargs,
+ ) -> None:
+ default_options = self._default_options()
+ run_build_system_command("dh_auto_clean", *default_options)
+ # The "global" clean logic takes care of `dh_autoreconf_clean` and `dh_clean`
+
+
+class AutoconfBuildSystemRule(StepBasedBuildSystemRule):
+
+ __slots__ = ("configure_args", "_make_support")
+
+ def __init__(
+ self,
+ parsed_data: "ParsedAutoconfBuildRuleDefinition",
+ attribute_path: AttributePath,
+ parser_context: Union[ParserContextData, "HighLevelManifest"],
+ ) -> None:
+ super().__init__(parsed_data, attribute_path, parser_context)
+ configure_args = [a for a in parsed_data.get("configure_args", [])]
+ self.configure_args = configure_args
+ self._make_support = MakefileSupport.from_build_system(self)
+
+ @classmethod
+ def characteristics(cls) -> BuildSystemCharacteristics:
+ return BuildSystemCharacteristics(
+ out_of_source_builds="supported-and-default",
+ )
+
+ @classmethod
+ def auto_detect_build_system(
+ cls,
+ source_root: VirtualPath,
+ *args,
+ **kwargs,
+ ) -> bool:
+ if "configure.ac" in source_root:
+ return True
+ configure_in = source_root.get("configure.in")
+ if configure_in is not None and configure_in.is_file:
+ with configure_in.open(byte_io=True, buffering=4096) as fd:
+ for no, line in enumerate(fd):
+ if no > 100:
+ break
+ if b"AC_INIT" in line or b"AC_PREREQ" in line:
+ return True
+ configure = source_root.get("configure")
+ if configure is None or not configure.is_executable or not configure.is_file:
+ return False
+ with configure.open(byte_io=True, buffering=4096) as fd:
+ for no, line in enumerate(fd):
+ if no > 10:
+ break
+ if b"GNU Autoconf" in line:
+ return True
+ return False
+
+ def configure_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ if (
+ os.path.lexists("configure.ac") or os.path.lexists("configure.in")
+ ) and not os.path.lexists("debian/autoreconf.before"):
+ run_build_system_command("dh_update_autotools_config")
+ run_build_system_command("dh_autoreconf")
+
+ dpkg_architecture_variables = context.dpkg_architecture_variables
+ multi_arch = dpkg_architecture_variables.current_host_multiarch
+ silent_rules = (
+ "--enable-silent-rules"
+ if context.is_terse_build
+ else "--disable-silent-rules"
+ )
+
+ configure_args = [
+ f"--build={dpkg_architecture_variables['DEB_BUILD_GNU_TYPE']}",
+ "--prefix=/usr",
+ "--includedir=${prefix}/include",
+ "--mandir=${prefix}/share/man",
+ "--infodir=${prefix}/share/info",
+ "--sysconfdir=/etc",
+ "--localstatedir=/var",
+ "--disable-option-checking",
+ silent_rules,
+ f"--libdir=${{prefix}}/{multi_arch}",
+ "--runstatedir=/run",
+ "--disable-maintainer-mode",
+ "--disable-dependency-tracking",
+ ]
+ if dpkg_architecture_variables.is_cross_compiling:
+ configure_args.append(
+ f"--host={dpkg_architecture_variables['DEB_HOST_GNU_TYPE']}"
+ )
+ if self.configure_args:
+ substitution = self.substitution
+ attr_path = self.attribute_path["configure_args"]
+ configure_args.extend(
+ substitution.substitute(v, attr_path[i].path)
+ for i, v in enumerate(self.configure_args)
+ )
+ self.ensure_build_dir_exists()
+ configure_script = self.relative_from_builddir_to_source("configure")
+ with self.dump_logs_on_error("config.log"):
+ run_build_system_command(
+ configure_script,
+ *configure_args,
+ cwd=self.build_directory,
+ )
+
+ def build_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ self._make_support.run_make(context)
+
+ def test_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ limit = context.parallelization_limit(support_zero_as_unlimited=True)
+ testsuite_flags = [f"-j{limit}"] if limit else ["-j"]
+
+ if not context.is_terse_build:
+ testsuite_flags.append("--verbose")
+ self._make_support.run_first_existing_target_if_any(
+ context,
+ # Order is deliberately inverse compared to debhelper (#924052)
+ ["check", "test"],
+ f"TESTSUITEFLAGS={' '.join(testsuite_flags)}",
+ "VERBOSE=1",
+ )
+
+ def install_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ dest_dir: str,
+ **kwargs,
+ ) -> None:
+ enable_parallelization = not os.path.lexists(self.build_dir_path("libtool"))
+ self._make_support.run_first_existing_target_if_any(
+ context,
+ ["install"],
+ f"DESTDIR={dest_dir}",
+ "AM_UPDATE_INFO_DIR=no",
+ enable_parallelization=enable_parallelization,
+ )
+
+ def clean_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ clean_helper: "CleanHelper",
+ **kwargs,
+ ) -> None:
+ if self.out_of_source_build:
+ return
+ self._make_support.run_first_existing_target_if_any(
+ context,
+ ["distclean", "realclean", "clean"],
+ )
+ # The "global" clean logic takes care of `dh_autoreconf_clean` and `dh_clean`
+
+
+class CMakeBuildSystemRule(StepBasedBuildSystemRule):
+
+ __slots__ = (
+ "configure_args",
+ "target_build_system",
+ "_make_support",
+ "_ninja_support",
+ )
+
+ def __init__(
+ self,
+ parsed_data: "ParsedCMakeBuildRuleDefinition",
+ attribute_path: AttributePath,
+ parser_context: Union[ParserContextData, "HighLevelManifest"],
+ ) -> None:
+ super().__init__(parsed_data, attribute_path, parser_context)
+ configure_args = [a for a in parsed_data.get("configure_args", [])]
+ self.configure_args = configure_args
+ self.target_build_system: Literal["make", "ninja"] = parsed_data.get(
+ "target_build_system", "make"
+ )
+ self._make_support = MakefileSupport.from_build_system(self)
+ self._ninja_support = NinjaBuildSupport.from_build_system(self)
+
+ @classmethod
+ def characteristics(cls) -> BuildSystemCharacteristics:
+ return BuildSystemCharacteristics(
+ out_of_source_builds="required",
+ )
+
+ @classmethod
+ def auto_detect_build_system(
+ cls,
+ source_root: VirtualPath,
+ *args,
+ **kwargs,
+ ) -> bool:
+ return "CMakeLists.txt" in source_root
+
+ @staticmethod
+ def _default_cmake_env(
+ build_context: "BuildContext",
+ ) -> EnvironmentModification:
+ replacements = {}
+ if "DEB_PYTHON_INSTALL_LAYOUT" not in os.environ:
+ replacements["DEB_PYTHON_INSTALL_LAYOUT"] = "deb"
+ if "PKG_CONFIG" not in os.environ:
+ replacements["PKG_CONFIG"] = build_context.cross_tool("pkg-config")
+ return EnvironmentModification(replacements=replacements)
+
+ @classmethod
+ def cmake_generator(cls, target_build_system: Literal["make", "ninja"]) -> str:
+ cmake_generators = {
+ "make": "Unix Makefiles",
+ "ninja": "Ninja",
+ }
+ return cmake_generators[target_build_system]
+
+ @staticmethod
+ def _compiler_and_cross_flags(
+ context: "BuildContext",
+ cmake_flags: List[str],
+ ) -> None:
+
+ if "CC" in os.environ:
+ cmake_flags.append(f"-DCMAKE_C_COMPILER={os.environ['CC']}")
+ elif context.is_cross_compiling:
+ cmake_flags.append(f"-DCMAKE_C_COMPILER={context.cross_tool('gcc')}")
+
+ if "CXX" in os.environ:
+ cmake_flags.append(f"-DCMAKE_CXX_COMPILER={os.environ['CXX']}")
+ elif context.is_cross_compiling:
+ cmake_flags.append(f"-DCMAKE_CXX_COMPILER={context.cross_tool('g++')}")
+
+ if context.is_cross_compiling:
+ deb_host2cmake_system = {
+ "linux": "Linux",
+ "kfreebsd": "kFreeBSD",
+ "hurd": "GNU",
+ }
+
+ gnu_cpu2system_processor = {
+ "arm": "armv7l",
+ "misp64el": "mips64",
+ "powerpc64le": "ppc64le",
+ }
+ dpkg_architecture_variables = context.dpkg_architecture_variables
+
+ try:
+ system_name = deb_host2cmake_system[
+ dpkg_architecture_variables["DEB_HOST_ARCH_OS"]
+ ]
+ except KeyError as e:
+ name = e.args[0]
+ _error(
+ f"Cannot cross-compile via cmake: Missing CMAKE_SYSTEM_NAME for the DEB_HOST_ARCH_OS {name}"
+ )
+
+ gnu_cpu = dpkg_architecture_variables["DEB_HOST_GNU_CPU"]
+ system_processor = gnu_cpu2system_processor.get(gnu_cpu, gnu_cpu)
+
+ cmake_flags.append(f"-DCMAKE_SYSTEM_NAME={system_name}")
+ cmake_flags.append(f"-DCMAKE_SYSTEM_PROCESSOR={system_processor}")
+
+ pkg_config = context.cross_tool("pkg-config")
+ # Historical uses. Current versions of cmake uses the env variable instead.
+ cmake_flags.append(f"-DPKG_CONFIG_EXECUTABLE=/usr/bin/{pkg_config}")
+ cmake_flags.append(f"-DPKGCONFIG_EXECUTABLE=/usr/bin/{pkg_config}")
+ cmake_flags.append(
+ f"-DQMAKE_EXECUTABLE=/usr/bin/{context.cross_tool('qmake')}"
+ )
+
+ def configure_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ cmake_flags = [
+ "-DCMAKE_INSTALL_PREFIX=/usr",
+ "-DCMAKE_BUILD_TYPE=None",
+ "-DCMAKE_INSTALL_SYSCONFDIR=/etc",
+ "-DCMAKE_INSTALL_LOCALSTATEDIR=/var",
+ "-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY=ON",
+ "-DCMAKE_FIND_USE_PACKAGE_REGISTRY=OFF",
+ "-DCMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY=ON",
+ "-DFETCHCONTENT_FULLY_DISCONNECTED=ON",
+ "-DCMAKE_INSTALL_RUNSTATEDIR=/run",
+ "-DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=ON",
+ "-DCMAKE_BUILD_RPATH_USE_ORIGIN=ON",
+ f"-G{self.cmake_generator(self.target_build_system)}",
+ ]
+ if not context.is_terse_build:
+ cmake_flags.append("-DCMAKE_VERBOSE_MAKEFILE=ON")
+
+ self._compiler_and_cross_flags(context, cmake_flags)
+
+ if self.configure_args:
+ substitution = self.substitution
+ attr_path = self.attribute_path["configure_args"]
+ cmake_flags.extend(
+ substitution.substitute(v, attr_path[i].path)
+ for i, v in enumerate(self.configure_args)
+ )
+
+ env_mod = self._default_cmake_env(context)
+ if "CPPFLAGS" in os.environ:
+ # CMake doesn't respect CPPFLAGS, see #653916.
+ cppflags = os.environ["CPPFLAGS"]
+ cflags = os.environ.get("CFLAGS", "") + f" {cppflags}".lstrip()
+ cxxflags = os.environ.get("CXXFLAGS", "") + f" {cppflags}".lstrip()
+ env_mod = env_mod.combine(
+ # The debhelper build system never showed this delta, so people might find it annoying.
+ EnvironmentModification(
+ replacements={
+ "CFLAGS": cflags,
+ "CXXFLAGS": cxxflags,
+ }
+ )
+ )
+ if "ASMFLAGS" not in os.environ and "ASFLAGS" in os.environ:
+ env_mod = env_mod.combine(
+ # The debhelper build system never showed this delta, so people might find it annoying.
+ EnvironmentModification(
+ replacements={
+ "ASMFLAGS": os.environ["ASFLAGS"],
+ }
+ )
+ )
+ self.ensure_build_dir_exists()
+ source_dir_from_build_dir = self.relative_from_builddir_to_source()
+
+ with self.dump_logs_on_error(
+ "CMakeCache.txt",
+ "CMakeFiles/CMakeOutput.log",
+ "CMakeFiles/CMakeError.log",
+ ):
+ run_build_system_command(
+ "cmake",
+ *cmake_flags,
+ source_dir_from_build_dir,
+ cwd=self.build_directory,
+ env_mod=env_mod,
+ )
+
+ def build_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ if self.target_build_system == "make":
+ make_flags = []
+ if not context.is_terse_build:
+ make_flags.append("VERBOSE=1")
+ self._make_support.run_make(context, *make_flags)
+ else:
+ self._ninja_support.run_ninja_build(context)
+
+ def test_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ env_mod = EnvironmentModification(
+ replacements={
+ "CTEST_OUTPUT_ON_FAILURE": "1",
+ },
+ )
+ if self.target_build_system == "make":
+ # Unlike make, CTest does not have "unlimited parallel" setting (-j implies
+ # -j1). Therefore, we do not set "allow zero as unlimited" here.
+ make_flags = [f"ARGS+=-j{context.parallelization_limit()}"]
+ if not context.is_terse_build:
+ make_flags.append("ARGS+=--verbose")
+ self._make_support.run_first_existing_target_if_any(
+ context,
+ ["check", "test"],
+ *make_flags,
+ env_mod=env_mod,
+ )
+ else:
+ self._ninja_support.run_ninja_test(context, env_mod=env_mod)
+
+ limit = context.parallelization_limit(support_zero_as_unlimited=True)
+ testsuite_flags = [f"-j{limit}"] if limit else ["-j"]
+
+ if not context.is_terse_build:
+ testsuite_flags.append("--verbose")
+ self._make_support.run_first_existing_target_if_any(
+ context,
+ # Order is deliberately inverse compared to debhelper (#924052)
+ ["check", "test"],
+ f"TESTSUITEFLAGS={' '.join(testsuite_flags)}",
+ "VERBOSE=1",
+ )
+
+ def install_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ dest_dir: str,
+ **kwargs,
+ ) -> None:
+ env_mod = EnvironmentModification(
+ replacements={
+ "LC_ALL": "C.UTF-8",
+ "DESTDIR": dest_dir,
+ }
+ ).combine(self._default_cmake_env(context))
+ run_build_system_command(
+ "cmake",
+ "--install",
+ self.build_directory,
+ env_mod=env_mod,
+ )
+
+ def clean_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ clean_helper: "CleanHelper",
+ **kwargs,
+ ) -> None:
+ if self.out_of_source_build:
+ return
+ if self.target_build_system == "make":
+ # Keep it here in case we change the `required` "out of source" to "supported-default"
+ self._make_support.run_first_existing_target_if_any(
+ context,
+ ["distclean", "realclean", "clean"],
+ )
+ else:
+ self._ninja_support.run_ninja_clean(context)
+
+
+class MesonBuildSystemRule(StepBasedBuildSystemRule):
+
+ __slots__ = (
+ "configure_args",
+ "_ninja_support",
+ )
+
+ def __init__(
+ self,
+ parsed_data: "ParsedMesonBuildRuleDefinition",
+ attribute_path: AttributePath,
+ parser_context: Union[ParserContextData, "HighLevelManifest"],
+ ) -> None:
+ super().__init__(parsed_data, attribute_path, parser_context)
+ configure_args = [a for a in parsed_data.get("configure_args", [])]
+ self.configure_args = configure_args
+ self._ninja_support = NinjaBuildSupport.from_build_system(self)
+
+ @classmethod
+ def characteristics(cls) -> BuildSystemCharacteristics:
+ return BuildSystemCharacteristics(
+ out_of_source_builds="required",
+ )
+
+ @classmethod
+ def auto_detect_build_system(
+ cls,
+ source_root: VirtualPath,
+ *args,
+ **kwargs,
+ ) -> bool:
+ return "meson.build" in source_root
+
+ @staticmethod
+ def _default_meson_env() -> EnvironmentModification:
+ replacements = {
+ "LC_ALL": "C.UTF-8",
+ }
+ if "DEB_PYTHON_INSTALL_LAYOUT" not in os.environ:
+ replacements["DEB_PYTHON_INSTALL_LAYOUT"] = "deb"
+ return EnvironmentModification(replacements=replacements)
+
+ @classmethod
+ def cmake_generator(cls, target_build_system: Literal["make", "ninja"]) -> str:
+ cmake_generators = {
+ "make": "Unix Makefiles",
+ "ninja": "Ninja",
+ }
+ return cmake_generators[target_build_system]
+
+ @staticmethod
+ def _cross_flags(
+ context: "BuildContext",
+ meson_flags: List[str],
+ ) -> None:
+ if not context.is_cross_compiling:
+ return
+ # Needs a cross-file http://mesonbuild.com/Cross-compilation.html
+ cross_files_dir = os.path.abspath(
+ generated_content_dir(
+ subdir_key="meson-cross-files",
+ )
+ )
+ host_arch = context.dpkg_architecture_variables.current_host_arch
+ cross_file = os.path.join(cross_files_dir, f"meson-cross-file-{host_arch}.conf")
+ if not os.path.isfile(cross_file):
+ env = os.environ
+ if env.get("LC_ALL") != "C.UTF-8":
+ env = dict(env)
+ env["LC_ALL"] = "C.UTF-8"
+ else:
+ env = None
+ subprocess.check_call(
+ [
+ "/usr/share/meson/debcrossgen",
+ f"--arch={host_arch}",
+ f"-o{cross_file}",
+ ],
+ stdout=subprocess.DEVNULL,
+ env=env,
+ )
+
+ meson_flags.append("--cross-file")
+ meson_flags.append(cross_file)
+
+ def configure_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ meson_version = Version(
+ subprocess.check_output(
+ ["meson", "--version"],
+ encoding="utf-8",
+ ).strip()
+ )
+ dpkg_architecture_variables = context.dpkg_architecture_variables
+
+ meson_flags = [
+ "--wrap-mode=odownload",
+ "--buildtype=plain",
+ "--sysconfdir=/etc",
+ "--localstatedir=/var",
+ f"--libdir=lib/{dpkg_architecture_variables.current_host_multiarch}",
+ "--auto-features=enabled",
+ ]
+ if meson_version >= Version("1.2.0"):
+ # There was a behaviour change in Meson 1.2.0: previously
+ # byte-compilation wasn't supported, but since 1.2.0 it is on by
+ # default. We can only use this option to turn it off in versions
+ # where the option exists.
+ meson_flags.append("-Dpython.bytecompile=-1")
+
+ self._cross_flags(context, meson_flags)
+
+ if self.configure_args:
+ substitution = self.substitution
+ attr_path = self.attribute_path["configure_args"]
+ meson_flags.extend(
+ substitution.substitute(v, attr_path[i].path)
+ for i, v in enumerate(self.configure_args)
+ )
+
+ env_mod = self._default_meson_env()
+
+ self.ensure_build_dir_exists()
+ source_dir_from_build_dir = self.relative_from_builddir_to_source()
+
+ with self.dump_logs_on_error("meson-logs/meson-log.txt"):
+ run_build_system_command(
+ "meson",
+ "setup",
+ source_dir_from_build_dir,
+ *meson_flags,
+ cwd=self.build_directory,
+ env_mod=env_mod,
+ )
+
+ def build_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ self._ninja_support.run_ninja_build(context)
+
+ def test_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ env_mod = EnvironmentModification(
+ replacements={
+ "MESON_TESTTHREDS": f"{context.parallelization_limit()}",
+ },
+ ).combine(self._default_meson_env())
+ with self.dump_logs_on_error("meson-logs/testlog.txt"):
+ run_build_system_command(
+ "meson",
+ "test",
+ env_mod=env_mod,
+ cwd=self.build_directory,
+ )
+
+ def install_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ dest_dir: str,
+ **kwargs,
+ ) -> None:
+ run_build_system_command(
+ "meson",
+ "install",
+ "--destdir",
+ dest_dir,
+ env_mod=self._default_meson_env(),
+ )
+
+ def clean_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ clean_helper: "CleanHelper",
+ **kwargs,
+ ) -> None:
+ # `debputy` will handle all the cleanup for us by virtue of "out of source build"
+ assert self.out_of_source_build
+
+
+def _add_qmake_flag(options: List[str], envvar: str, *, include_cppflags: bool) -> None:
+ value = os.environ.get(envvar)
+ if value is None:
+ return
+ if include_cppflags:
+ cppflags = os.environ.get("CPPFLAGS")
+ if cppflags:
+ value = f"{value} {cppflags}"
+
+ options.append(f"QMAKE_{envvar}_RELEASE={value}")
+ options.append(f"QMAKE_{envvar}_DEBUG={value}")
+
+
+class ParsedGenericQmakeBuildRuleDefinition(
+ OptionalInstallDirectly,
+ OptionalInSourceBuild,
+ OptionalBuildDirectory,
+):
+ configure_args: NotRequired[List[str]]
+
+
+class AbstractQmakeBuildSystemRule(StepBasedBuildSystemRule):
+
+ __slots__ = ("configure_args", "_make_support")
+
+ def __init__(
+ self,
+ parsed_data: "ParsedGenericQmakeBuildRuleDefinition",
+ attribute_path: AttributePath,
+ parser_context: Union[ParserContextData, "HighLevelManifest"],
+ ) -> None:
+ super().__init__(parsed_data, attribute_path, parser_context)
+ configure_args = [a for a in parsed_data.get("configure_args", [])]
+ self.configure_args = configure_args
+ self._make_support = MakefileSupport.from_build_system(self)
+
+ @classmethod
+ def characteristics(cls) -> BuildSystemCharacteristics:
+ return BuildSystemCharacteristics(
+ out_of_source_builds="supported-and-default",
+ )
+
+ @classmethod
+ def auto_detect_build_system(
+ cls,
+ source_root: VirtualPath,
+ *args,
+ **kwargs,
+ ) -> bool:
+ return any(p.name.endswith(".pro") for p in source_root.iterdir)
+
+ @classmethod
+ def os_mkspec_mapping(cls) -> Mapping[str, str]:
+ return {
+ "linux": "linux-g++",
+ "kfreebsd": "gnukfreebsd-g++",
+ "hurd": "hurd-g++",
+ }
+
+ def qmake_command(self) -> str:
+ raise NotImplementedError
+
+ def configure_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+
+ configure_args = [
+ "-makefile",
+ ]
+ qmake_cmd = context.cross_tool(self.qmake_command())
+
+ if context.is_cross_compiling:
+ host_os = context.dpkg_architecture_variables["DEB_HOST_ARCH_OS"]
+ os2mkspec = self.os_mkspec_mapping()
+ try:
+ spec = os2mkspec[host_os]
+ except KeyError:
+ _error(
+ f'Sorry, `debputy` cannot cross build this package for "{host_os}".'
+ f' Missing a "DEB OS -> qmake -spec <VALUE>" mapping.'
+ )
+ configure_args.append("-spec")
+ configure_args.append(spec)
+
+ _add_qmake_flag(configure_args, "CFLAGS", include_cppflags=True)
+ _add_qmake_flag(configure_args, "CXXFLAGS", include_cppflags=True)
+ _add_qmake_flag(configure_args, "LDFLAGS", include_cppflags=False)
+
+ configure_args.append("QMAKE_STRIP=:")
+ configure_args.append("PREFIX=/usr")
+
+ if self.configure_args:
+ substitution = self.substitution
+ attr_path = self.attribute_path["configure_args"]
+ configure_args.extend(
+ substitution.substitute(v, attr_path[i].path)
+ for i, v in enumerate(self.configure_args)
+ )
+
+ self.ensure_build_dir_exists()
+ if not self.out_of_source_build:
+ configure_args.append(self.relative_from_builddir_to_source())
+
+ with self.dump_logs_on_error("config.log"):
+ run_build_system_command(
+ qmake_cmd,
+ *configure_args,
+ cwd=self.build_directory,
+ )
+
+ def build_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ self._make_support.run_make(context)
+
+ def test_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ limit = context.parallelization_limit(support_zero_as_unlimited=True)
+ testsuite_flags = [f"-j{limit}"] if limit else ["-j"]
+
+ if not context.is_terse_build:
+ testsuite_flags.append("--verbose")
+ self._make_support.run_first_existing_target_if_any(
+ context,
+ # Order is deliberately inverse compared to debhelper (#924052)
+ ["check", "test"],
+ f"TESTSUITEFLAGS={' '.join(testsuite_flags)}",
+ "VERBOSE=1",
+ )
+
+ def install_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ dest_dir: str,
+ **kwargs,
+ ) -> None:
+ enable_parallelization = not os.path.lexists(self.build_dir_path("libtool"))
+ self._make_support.run_first_existing_target_if_any(
+ context,
+ ["install"],
+ f"DESTDIR={dest_dir}",
+ "AM_UPDATE_INFO_DIR=no",
+ enable_parallelization=enable_parallelization,
+ )
+
+ def clean_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ clean_helper: "CleanHelper",
+ **kwargs,
+ ) -> None:
+ if self.out_of_source_build:
+ return
+ self._make_support.run_first_existing_target_if_any(
+ context,
+ ["distclean", "realclean", "clean"],
+ )
+
+
+class QmakeBuildSystemRule(AbstractQmakeBuildSystemRule):
+
+ def qmake_command(self) -> str:
+ return "qmake"
+
+
+class Qmake6BuildSystemRule(AbstractQmakeBuildSystemRule):
+
+ def qmake_command(self) -> str:
+ return "qmake6"
+
+
+@debputy_build_system(
+ "make",
+ MakefileBuildSystemRule,
+ auto_detection_shadows_build_systems="debhelper",
+ online_reference_documentation=reference_documentation(
+ title="Make Build System",
+ description=textwrap.dedent(
+ ""
+ """\
+ Run a plain `make` file with nothing else.
+
+ This build system will attempt to use `make` to leverage instructions
+ in a makefile (such as, `Makefile` or `GNUMakefile`).
+
+ By default, the makefile build system assumes it should use "in-source"
+ build semantics. If needed be, an explicit `build-directory` can be
+ provided if the `Makefile` is not in the source folder but instead in
+ some other directory.
+ """
+ ),
+ attributes=[
+ documented_attr(
+ "directory",
+ textwrap.dedent(
+ """\
+ The directory from which to run make if it is not the source root
+
+ This works like using `make -C DIRECTORY ...` (or `cd DIRECTORY && make ...`).
+ """
+ ),
+ ),
+ documented_attr(
+ "build_target",
+ textwrap.dedent(
+ """\
+ The target name to use for the "build" step.
+
+ If omitted, `make` will be run without any explicit target leaving it to decide
+ the default.
+ """
+ ),
+ ),
+ documented_attr(
+ "test_target",
+ textwrap.dedent(
+ """\
+ The target name to use for the "test" step.
+
+ If omitted, `make check` or `make test` will be used if it looks like `make`
+ will accept one of those targets. Otherwise, the step will be skipped.
+ """
+ ),
+ ),
+ documented_attr(
+ "install_target",
+ textwrap.dedent(
+ """\
+ The target name to use for the "install" step.
+
+ If omitted, `make install` will be used if it looks like `make` will accept that target.
+ Otherwise, the step will be skipped.
+ """
+ ),
+ ),
+ *docs_from(
+ DebputyParsedContentStandardConditional,
+ OptionalInstallDirectly,
+ BuildRuleParsedFormat,
+ ),
+ ],
+ ),
+)
+class ParsedMakeBuildRuleDefinition(
+ OptionalInstallDirectly,
+):
+ directory: NotRequired[FileSystemExactMatchRule]
+ build_target: NotRequired[str]
+ test_target: NotRequired[str]
+ install_target: NotRequired[str]
+
+
+@debputy_build_system(
+ "autoconf",
+ AutoconfBuildSystemRule,
+ auto_detection_shadows_build_systems=["debhelper", "make"],
+ online_reference_documentation=reference_documentation(
+ title="Autoconf Build System",
+ description=textwrap.dedent(
+ """\
+ Run an autoconf-based build system as the upstream build system.
+
+ This build rule will attempt to use autoreconf to update the `configure`
+ script before running the `configure` script if needed. Otherwise, it
+ follows the classic `./configure && make && make install` pattern.
+
+ The build rule uses "out of source" builds by default since it is easier
+ and more reliable for clean and makes it easier to support multiple
+ builds (that is, two or more build systems for the same source package).
+ This is in contract to `debhelper`, which defaults to "in source" builds
+ for `autoconf`. If you need that behavior, please set
+ `perform-in-source-build: true`.
+ """
+ ),
+ attributes=[
+ documented_attr(
+ "configure_args",
+ textwrap.dedent(
+ """\
+ Arguments to be passed to the `configure` script.
+ """
+ ),
+ ),
+ *docs_from(
+ DebputyParsedContentStandardConditional,
+ OptionalInstallDirectly,
+ OptionalInSourceBuild,
+ OptionalBuildDirectory,
+ BuildRuleParsedFormat,
+ ),
+ ],
+ ),
+)
+class ParsedAutoconfBuildRuleDefinition(
+ OptionalInstallDirectly,
+ OptionalInSourceBuild,
+ OptionalBuildDirectory,
+):
+ configure_args: NotRequired[List[str]]
+
+
+@debputy_build_system(
+ "cmake",
+ CMakeBuildSystemRule,
+ auto_detection_shadows_build_systems=["debhelper", "make"],
+ online_reference_documentation=reference_documentation(
+ title="CMake Build System",
+ description=textwrap.dedent(
+ """\
+ Run an cmake-based build system as the upstream build system.
+
+ The build rule uses "out of source" builds.
+ """
+ ),
+ attributes=[
+ documented_attr(
+ "configure_args",
+ textwrap.dedent(
+ """\
+ Arguments to be passed to the `cmake` command.
+ """
+ ),
+ ),
+ *docs_from(
+ DebputyParsedContentStandardConditional,
+ OptionalInstallDirectly,
+ OptionalBuildDirectory,
+ BuildRuleParsedFormat,
+ ),
+ ],
+ ),
+)
+class ParsedCMakeBuildRuleDefinition(
+ OptionalInstallDirectly,
+ OptionalBuildDirectory,
+):
+ configure_args: NotRequired[List[str]]
+ target_build_system: Literal["make", "ninja"]
+
+
+@debputy_build_system(
+ "meson",
+ MesonBuildSystemRule,
+ auto_detection_shadows_build_systems=["debhelper", "make"],
+ online_reference_documentation=reference_documentation(
+ title="Meson Build System",
+ description=textwrap.dedent(
+ """\
+ Run an meson-based build system as the upstream build system.
+
+ The build rule uses "out of source" builds.
+ """
+ ),
+ attributes=[
+ documented_attr(
+ "configure_args",
+ textwrap.dedent(
+ """\
+ Arguments to be passed to the `meson` command.
+ """
+ ),
+ ),
+ *docs_from(
+ DebputyParsedContentStandardConditional,
+ OptionalInstallDirectly,
+ OptionalBuildDirectory,
+ BuildRuleParsedFormat,
+ ),
+ ],
+ ),
+)
+class ParsedMesonBuildRuleDefinition(
+ OptionalInstallDirectly,
+ OptionalBuildDirectory,
+):
+ configure_args: NotRequired[List[str]]
+
+
+@debputy_build_system(
+ "perl-build",
+ PerlBuildBuildSystemRule,
+ auto_detection_shadows_build_systems=[
+ "debhelper",
+ "make",
+ "perl-makemaker",
+ ],
+ online_reference_documentation=reference_documentation(
+ title='Perl "Build.PL" Build System',
+ description=textwrap.dedent(
+ """\
+ Build using the `Build.PL` Build system used by some Perl packages.
+
+ This build rule will attempt to use the `Build.PL` script to build the
+ upstream code.
+ """
+ ),
+ attributes=[
+ documented_attr(
+ "configure_args",
+ textwrap.dedent(
+ """\
+ Arguments to be passed to the `Build.PL` script.
+ """
+ ),
+ ),
+ *docs_from(
+ DebputyParsedContentStandardConditional,
+ OptionalInstallDirectly,
+ BuildRuleParsedFormat,
+ ),
+ ],
+ ),
+)
+class ParsedPerlBuildBuildRuleDefinition(
+ OptionalInstallDirectly,
+):
+ configure_args: NotRequired[List[str]]
+
+
+@debputy_build_system(
+ "debhelper",
+ DebhelperBuildSystemRule,
+ online_reference_documentation=reference_documentation(
+ title="Debhelper Build System",
+ description=textwrap.dedent(
+ """\
+ Delegate to a debhelper provided build system
+
+ This build rule will attempt to use the `dh_auto_*` tools to build the
+ upstream code. By default, `dh_auto_*` will use auto-detection to determine
+ which build system they will use. This can be overridden by the
+ `dh-build-system` attribute.
+ """
+ ),
+ attributes=[
+ documented_attr(
+ "dh_build_system",
+ textwrap.dedent(
+ """\
+ Which debhelper build system to use. This attribute is passed to
+ the `dh_auto_*` commands as the `-S` parameter, so any value valid
+ for that will be accepted.
+
+ Note that many debhelper build systems require extra build
+ dependencies before they can be used. Please consult the documentation
+ of the relevant debhelper build system for details.
+ """
+ ),
+ ),
+ documented_attr(
+ "configure_args",
+ textwrap.dedent(
+ """\
+ Arguments to be passed to underlying configuration command
+ (via `dh_auto_configure -- <configure-args`).
+ """
+ ),
+ ),
+ *docs_from(
+ DebputyParsedContentStandardConditional,
+ OptionalInstallDirectly,
+ OptionalBuildDirectory,
+ BuildRuleParsedFormat,
+ ),
+ ],
+ ),
+)
+class ParsedDebhelperBuildRuleDefinition(
+ OptionalInstallDirectly,
+ OptionalBuildDirectory,
+):
+ configure_args: NotRequired[List[str]]
+ dh_build_system: NotRequired[str]
+
+
+@debputy_build_system(
+ "perl-makemaker",
+ PerlMakeMakerBuildSystemRule,
+ auto_detection_shadows_build_systems=[
+ "debhelper",
+ "make",
+ ],
+ online_reference_documentation=reference_documentation(
+ title='Perl "MakeMaker" Build System',
+ description=textwrap.dedent(
+ """\
+ Build using the "MakeMaker" Build system used by some Perl packages.
+
+ This build rule will attempt to use the `Makefile.PL` script to build the
+ upstream code.
+ """
+ ),
+ attributes=[
+ documented_attr(
+ "configure_args",
+ textwrap.dedent(
+ """\
+ Arguments to be passed to the `Makefile.PL` script.
+ """
+ ),
+ ),
+ *docs_from(
+ DebputyParsedContentStandardConditional,
+ OptionalInstallDirectly,
+ BuildRuleParsedFormat,
+ ),
+ ],
+ ),
+)
+class ParsedPerlMakeMakerBuildRuleDefinition(
+ OptionalInstallDirectly,
+):
+ configure_args: NotRequired[List[str]]
+
+
+@debputy_build_system(
+ "qmake",
+ QmakeBuildSystemRule,
+ auto_detection_shadows_build_systems=[
+ "debhelper",
+ "make",
+ # Open question, should this shadow "qmake6" and later?
+ ],
+ online_reference_documentation=reference_documentation(
+ title='QT "qmake" Build System',
+ description=textwrap.dedent(
+ """\
+ Build using the "qmake" by QT.
+ """
+ ),
+ attributes=[
+ documented_attr(
+ "configure_args",
+ textwrap.dedent(
+ """\
+ Arguments to be passed to the `qmake` command.
+ """
+ ),
+ ),
+ *docs_from(
+ DebputyParsedContentStandardConditional,
+ OptionalInstallDirectly,
+ OptionalInSourceBuild,
+ OptionalBuildDirectory,
+ BuildRuleParsedFormat,
+ ),
+ ],
+ ),
+)
+class ParsedQmakeBuildRuleDefinition(ParsedGenericQmakeBuildRuleDefinition):
+ pass
+
+
+@debputy_build_system(
+ "qmake6",
+ Qmake6BuildSystemRule,
+ auto_detection_shadows_build_systems=[
+ "debhelper",
+ "make",
+ ],
+ online_reference_documentation=reference_documentation(
+ title='QT "qmake6" Build System',
+ description=textwrap.dedent(
+ """\
+ Build using the "qmake6" from the `qmake6` package. This is like the `qmake` system
+ but is specifically for QT6.
+ """
+ ),
+ attributes=[
+ documented_attr(
+ "configure_args",
+ textwrap.dedent(
+ """\
+ Arguments to be passed to the `qmake6` command.
+ """
+ ),
+ ),
+ *docs_from(
+ DebputyParsedContentStandardConditional,
+ OptionalInstallDirectly,
+ OptionalInSourceBuild,
+ OptionalBuildDirectory,
+ BuildRuleParsedFormat,
+ ),
+ ],
+ ),
+)
+class ParsedQmake6BuildRuleDefinition(ParsedGenericQmakeBuildRuleDefinition):
+ pass
+
+
+def _parse_default_environment(
+ _name: str,
+ parsed_data: EnvironmentSourceFormat,
+ attribute_path: AttributePath,
+ parser_context: ParserContextData,
+) -> ManifestProvidedBuildEnvironment:
+ return ManifestProvidedBuildEnvironment.from_environment_definition(
+ parsed_data,
+ attribute_path,
+ parser_context,
+ is_default=True,
+ )
+
+
+def _parse_build_environments(
+ _name: str,
+ parsed_data: List[NamedEnvironmentSourceFormat],
+ attribute_path: AttributePath,
+ parser_context: ParserContextData,
+) -> List[ManifestProvidedBuildEnvironment]:
+ return [
+ ManifestProvidedBuildEnvironment.from_environment_definition(
+ value,
+ attribute_path[idx],
+ parser_context,
+ is_default=False,
+ )
+ for idx, value in enumerate(parsed_data)
+ ]
+
+
+def _handle_build_rules(
+ _name: str,
+ parsed_data: List[BuildRule],
+ _attribute_path: AttributePath,
+ _parser_context: ParserContextData,
+) -> List[BuildRule]:
+ return parsed_data
diff --git a/src/debputy/plugin/debputy/manifest_root_rules.py b/src/debputy/plugin/debputy/manifest_root_rules.py
index 1d3b096..f539243 100644
--- a/src/debputy/plugin/debputy/manifest_root_rules.py
+++ b/src/debputy/plugin/debputy/manifest_root_rules.py
@@ -12,18 +12,22 @@ from debputy._manifest_constants import (
)
from debputy.exceptions import DebputySubstitutionError
from debputy.installations import InstallRule
-from debputy.manifest_parser.base_types import DebputyParsedContent
+from debputy.manifest_parser.tagging_types import DebputyParsedContent
from debputy.manifest_parser.exceptions import ManifestParseException
from debputy.manifest_parser.parser_data import ParserContextData
from debputy.manifest_parser.util import AttributePath
from debputy.plugin.api import reference_documentation
from debputy.plugin.api.impl import DebputyPluginInitializerProvider
-from debputy.plugin.api.impl_types import (
+from debputy.plugin.api.parser_tables import (
OPARSER_MANIFEST_ROOT,
OPARSER_MANIFEST_DEFINITIONS,
OPARSER_PACKAGES,
)
-from debputy.plugin.api.spec import not_integrations, INTEGRATION_MODE_DH_DEBPUTY_RRR
+from debputy.plugin.api.spec import (
+ not_integrations,
+ INTEGRATION_MODE_DH_DEBPUTY_RRR,
+)
+from debputy.plugin.debputy.build_system_rules import register_build_system_rules
from debputy.substitution import VariableNameState, SUBST_VAR_RE
if TYPE_CHECKING:
@@ -166,6 +170,8 @@ def register_manifest_root_rules(api: DebputyPluginInitializerProvider) -> None:
nested_in_package_context=True,
)
+ register_build_system_rules(api)
+
class ManifestVersionFormat(DebputyParsedContent):
manifest_version: ManifestVersion
diff --git a/src/debputy/plugin/debputy/private_api.py b/src/debputy/plugin/debputy/private_api.py
index d042378..75081a4 100644
--- a/src/debputy/plugin/debputy/private_api.py
+++ b/src/debputy/plugin/debputy/private_api.py
@@ -43,9 +43,11 @@ from debputy.manifest_conditions import (
BuildProfileMatch,
SourceContextArchMatchManifestCondition,
)
-from debputy.manifest_parser.base_types import (
+from debputy.manifest_parser.tagging_types import (
DebputyParsedContent,
- DebputyParsedContentStandardConditional,
+ TypeMapping,
+)
+from debputy.manifest_parser.base_types import (
FileSystemMode,
StaticFileSystemOwner,
StaticFileSystemGroup,
@@ -53,11 +55,12 @@ from debputy.manifest_parser.base_types import (
FileSystemExactMatchRule,
FileSystemMatchRule,
SymbolicMode,
- TypeMapping,
OctalMode,
FileSystemExactNonDirMatchRule,
+ BuildEnvironmentDefinition,
+ DebputyParsedContentStandardConditional,
)
-from debputy.manifest_parser.declarative_parser import DebputyParseHint
+from debputy.manifest_parser.parse_hints import DebputyParseHint
from debputy.manifest_parser.exceptions import ManifestParseException
from debputy.manifest_parser.mapper_code import type_mapper_str2package
from debputy.manifest_parser.parser_data import ParserContextData
@@ -79,7 +82,9 @@ from debputy.plugin.api.spec import (
not_integrations,
INTEGRATION_MODE_DH_DEBPUTY_RRR,
)
+from debputy.plugin.api.std_docs import docs_from
from debputy.plugin.debputy.binary_package_rules import register_binary_package_rules
+from debputy.plugin.debputy.build_system_rules import register_build_system_rules
from debputy.plugin.debputy.discard_rules import (
_debputy_discard_pyc_files,
_debputy_prune_la_files,
@@ -587,6 +592,16 @@ def register_type_mappings(api: DebputyPluginInitializerProvider) -> None:
],
),
)
+ api.register_mapped_type(
+ TypeMapping(
+ BuildEnvironmentDefinition,
+ str,
+ lambda v, ap, pc: pc.resolve_build_environment(v, ap),
+ ),
+ reference_documentation=type_mapping_reference_documentation(
+ description="Reference to an build environment defined in `build-environments`",
+ ),
+ )
def register_service_managers(
@@ -897,14 +912,7 @@ def register_install_rules(api: DebputyPluginInitializerProvider) -> None:
"""
),
),
- documented_attr(
- "when",
- textwrap.dedent(
- """\
- A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules).
- """
- ),
- ),
+ *docs_from(DebputyParsedContentStandardConditional),
],
reference_documentation_url=_manifest_format_doc("generic-install-install"),
),
@@ -1193,14 +1201,7 @@ def register_install_rules(api: DebputyPluginInitializerProvider) -> None:
"""
),
),
- documented_attr(
- "when",
- textwrap.dedent(
- """\
- A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules).
- """
- ),
- ),
+ *docs_from(DebputyParsedContentStandardConditional),
],
reference_documentation_url=_manifest_format_doc(
"install-manpages-install-man"
@@ -1355,14 +1356,7 @@ def register_install_rules(api: DebputyPluginInitializerProvider) -> None:
"""
),
),
- documented_attr(
- "when",
- textwrap.dedent(
- """\
- A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules).
- """
- ),
- ),
+ *docs_from(DebputyParsedContentStandardConditional),
],
reference_documentation_url=_manifest_format_doc("generic-install-install"),
),
@@ -1408,14 +1402,7 @@ def register_transformation_rules(api: DebputyPluginInitializerProvider) -> None
"""
),
),
- documented_attr(
- "when",
- textwrap.dedent(
- """\
- A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules).
- """
- ),
- ),
+ *docs_from(DebputyParsedContentStandardConditional),
],
reference_documentation_url=_manifest_format_doc(
"move-transformation-rule-move"
@@ -1564,14 +1551,7 @@ def register_transformation_rules(api: DebputyPluginInitializerProvider) -> None
"""
),
),
- documented_attr(
- "when",
- textwrap.dedent(
- """\
- A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules).
- """
- ),
- ),
+ *docs_from(DebputyParsedContentStandardConditional),
],
reference_documentation_url=_manifest_format_doc(
"create-symlinks-transformation-rule-create-symlink"
@@ -1680,14 +1660,7 @@ def register_transformation_rules(api: DebputyPluginInitializerProvider) -> None
"""
),
),
- documented_attr(
- "when",
- textwrap.dedent(
- """\
- A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules).
- """
- ),
- ),
+ *docs_from(DebputyParsedContentStandardConditional),
],
reference_documentation_url=_manifest_format_doc(
"change-path-ownergroup-or-mode-path-metadata"
@@ -1768,14 +1741,7 @@ def register_transformation_rules(api: DebputyPluginInitializerProvider) -> None
"""
),
),
- documented_attr(
- "when",
- textwrap.dedent(
- """\
- A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules).
- """
- ),
- ),
+ *docs_from(DebputyParsedContentStandardConditional),
],
reference_documentation_url=_manifest_format_doc(
"create-directories-transformation-rule-directories"
diff --git a/src/debputy/plugin/debputy/to_be_api_types.py b/src/debputy/plugin/debputy/to_be_api_types.py
new file mode 100644
index 0000000..d7be694
--- /dev/null
+++ b/src/debputy/plugin/debputy/to_be_api_types.py
@@ -0,0 +1,1039 @@
+import contextlib
+import dataclasses
+import os.path
+import subprocess
+from typing import (
+ Optional,
+ FrozenSet,
+ final,
+ TYPE_CHECKING,
+ Union,
+ Annotated,
+ List,
+ NotRequired,
+ Literal,
+ Any,
+ Type,
+ TypeVar,
+ Self,
+ Sequence,
+ Callable,
+ Container,
+ Iterable,
+ is_typeddict,
+)
+
+from debputy.exceptions import PluginAPIViolationError, PluginInitializationError
+from debputy.manifest_conditions import ManifestCondition
+from debputy.manifest_parser.base_types import (
+ BuildEnvironmentDefinition,
+ DebputyParsedContentStandardConditional,
+ FileSystemExactMatchRule,
+)
+from debputy.manifest_parser.exceptions import (
+ ManifestParseException,
+ ManifestInvalidUserDataException,
+)
+from debputy.manifest_parser.parse_hints import DebputyParseHint
+from debputy.manifest_parser.parser_data import ParserContextData
+from debputy.manifest_parser.tagging_types import DebputyDispatchableType
+from debputy.manifest_parser.util import AttributePath
+from debputy.packages import BinaryPackage
+from debputy.plugin.api.spec import (
+ ParserDocumentation,
+ DebputyIntegrationMode,
+ BuildSystemManifestRuleMetadata,
+ _DEBPUTY_DISPATCH_METADATA_ATTR_NAME,
+ VirtualPath,
+)
+from debputy.plugin.plugin_state import run_in_context_of_plugin
+from debputy.substitution import Substitution
+from debputy.types import EnvironmentModification
+from debputy.util import run_build_system_command, _debug_log, _info, _warn
+
+if TYPE_CHECKING:
+ from debputy.build_support.build_context import BuildContext
+ from debputy.highlevel_manifest import HighLevelManifest
+ from debputy.plugin.api.impl_types import DIPHandler
+
+
+AT = TypeVar("AT")
+BSR = TypeVar("BSR", bound="BuildSystemRule")
+BSPF = TypeVar("BSPF", bound="BuildRuleDefinitionBase")
+
+
+@dataclasses.dataclass(slots=True, frozen=True)
+class BuildSystemCharacteristics:
+ out_of_source_builds: Literal[
+ "required",
+ "supported-and-default",
+ "supported-but-not-default",
+ "not-supported",
+ ]
+
+
+class CleanHelper:
+ def schedule_removal_of_files(self, *args: str) -> None:
+ """Schedule removal of these files
+
+ This will remove the provided files in bulk. The files are not guaranteed
+ to be deleted in any particular order. If anything needs urgent removal,
+ `os.unlink` can be used directly.
+
+ Note: Symlinks will **not** be followed. If a symlink and target must
+ be deleted, ensure both are passed.
+
+
+ :param args: Path names to remove. Each must be removable with
+ `os.unlink`
+ """
+ raise NotImplementedError
+
+ def schedule_removal_of_directories(self, *args: str) -> None:
+ """Schedule removal of these directories
+
+ This will remove the provided dirs in bulk. The dirs are not guaranteed
+ to be deleted in any particular order. If anything needs urgent removal,
+ then it can be done directly instead of passing it to this method.
+
+ If anything needs urgent removal, then it can be removed immediately.
+
+ :param args: Path names to remove.
+ """
+ raise NotImplementedError
+
+
+class BuildRuleParsedFormat(DebputyParsedContentStandardConditional):
+ name: NotRequired[str]
+ for_packages: NotRequired[
+ Annotated[
+ Union[BinaryPackage, List[BinaryPackage]],
+ DebputyParseHint.manifest_attribute("for"),
+ ]
+ ]
+ environment: NotRequired[BuildEnvironmentDefinition]
+
+
+class OptionalBuildDirectory(BuildRuleParsedFormat):
+ build_directory: NotRequired[FileSystemExactMatchRule]
+
+
+class OptionalInSourceBuild(BuildRuleParsedFormat):
+ perform_in_source_build: NotRequired[bool]
+
+
+class OptionalInstallDirectly(BuildRuleParsedFormat):
+ install_directly_to_package: NotRequired[bool]
+
+
+BuildSystemDefinition = Union[
+ BuildRuleParsedFormat,
+ OptionalBuildDirectory,
+ OptionalInSourceBuild,
+ OptionalInstallDirectly,
+]
+
+
+class BuildRule(DebputyDispatchableType):
+ __slots__ = (
+ "_auto_generated_stem",
+ "_name",
+ "_for_packages",
+ "_manifest_condition",
+ "_attribute_path",
+ "_environment",
+ "_substitution",
+ )
+
+ def __init__(
+ self,
+ attributes: BuildRuleParsedFormat,
+ attribute_path: AttributePath,
+ parser_context: Union[ParserContextData, "HighLevelManifest"],
+ ) -> None:
+ super().__init__()
+
+ self._name = attributes.get("name")
+ for_packages = attributes.get("for_packages")
+
+ if for_packages is None:
+ if isinstance(parser_context, ParserContextData):
+ all_binaries = parser_context.binary_packages.values()
+ else:
+ all_binaries = parser_context.all_packages
+ self._for_packages = frozenset(all_binaries)
+ else:
+ self._for_packages = frozenset(
+ for_packages if isinstance(for_packages, list) else [for_packages]
+ )
+ self._manifest_condition = attributes.get("when")
+ self._attribute_path = attribute_path
+ self._substitution = parser_context.substitution
+ self._auto_generated_stem: Optional[str] = None
+ environment = attributes.get("environment")
+ if environment is None:
+ assert isinstance(parser_context, ParserContextData)
+ self._environment = parser_context.resolve_build_environment(
+ None,
+ attribute_path,
+ )
+ else:
+ self._environment = environment
+
+ @final
+ @property
+ def name(self) -> Optional[str]:
+ return self._name
+
+ @final
+ @property
+ def attribute_path(self) -> AttributePath:
+ return self._attribute_path
+
+ @final
+ @property
+ def manifest_condition(self) -> Optional[ManifestCondition]:
+ return self._manifest_condition
+
+ @final
+ @property
+ def for_packages(self) -> FrozenSet[BinaryPackage]:
+ return self._for_packages
+
+ @final
+ @property
+ def substitution(self) -> Substitution:
+ return self._substitution
+
+ @final
+ @property
+ def environment(self) -> BuildEnvironmentDefinition:
+ return self._environment
+
+ @final
+ @property
+ def auto_generated_stem(self) -> str:
+ stem = self._auto_generated_stem
+ if stem is None:
+ raise AssertionError(
+ "The auto-generated-stem is not available at this time"
+ )
+ return stem
+
+ @final
+ @auto_generated_stem.setter
+ def auto_generated_stem(self, value: str) -> None:
+ if self._auto_generated_stem is not None:
+ raise AssertionError("The auto-generated-stem should only be set once")
+ assert value is not None
+ self._auto_generated_stem = value
+
+ @final
+ def run_build(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ run_in_context_of_plugin(
+ self._debputy_plugin,
+ self.perform_build,
+ context,
+ manifest,
+ **kwargs,
+ )
+
+ def perform_build(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ raise NotImplementedError
+
+ @property
+ def is_buildsystem(self) -> bool:
+ return False
+
+ @property
+ def name_or_tag(self) -> str:
+ name = self.name
+ if name is None:
+ return self.auto_generated_stem
+ return name
+
+
+def _is_type_or_none(v: Optional[Any], expected_type: Type[AT]) -> Optional[AT]:
+ if isinstance(v, expected_type):
+ return v
+ return None
+
+
+class BuildSystemRule(BuildRule):
+
+ __slots__ = (
+ "_build_directory",
+ "source_directory",
+ "install_directly_to_package",
+ "perform_in_source_build",
+ )
+
+ def __init__(
+ self,
+ attributes: BuildSystemDefinition,
+ attribute_path: AttributePath,
+ parser_context: Union[ParserContextData, "HighLevelManifest"],
+ ) -> None:
+ super().__init__(attributes, attribute_path, parser_context)
+ build_directory = _is_type_or_none(
+ attributes.get("build_directory"), FileSystemExactMatchRule
+ )
+ if build_directory is not None:
+ self._build_directory = build_directory.match_rule.path
+ else:
+ self._build_directory = None
+ self.source_directory = "."
+ self.install_directly_to_package = False
+ self.perform_in_source_build = _is_type_or_none(
+ attributes.get("perform_in_source_build"), bool
+ )
+ install_directly_to_package = _is_type_or_none(
+ attributes.get("install_directly_to_package"), bool
+ )
+ if install_directly_to_package is None:
+ self.install_directly_to_package = len(self.for_packages) == 1
+ elif install_directly_to_package and len(self.for_packages) > 1:
+ idtp_path = attribute_path["install_directly_to_package"].path
+ raise ManifestParseException(
+ f'The attribute "install-directly-to-package" ({idtp_path}) cannot'
+ " be true when the build system applies to multiple packages."
+ )
+ else:
+ self.install_directly_to_package = install_directly_to_package
+
+ @classmethod
+ def auto_detect_build_system(
+ cls,
+ source_root: VirtualPath,
+ *args,
+ **kwargs,
+ ) -> bool:
+ """Check if the build system apply automatically.
+
+ This class method is called when the manifest does not declare any build rules at
+ all.
+
+ :param source_root: The source root (the directory containing `debian/`). Usually,
+ the detection code would look at this for files related to the upstream build system.
+ :param args: For future compat, new arguments might appear as positional arguments.
+ :param kwargs: For future compat, new arguments might appear as keyword argument.
+ :return: True if the build system can be used, False when it would not be useful
+ to use the build system (at least with all defaults).
+ Note: Be sure to use proper `bool` return values. The calling code does an
+ `isinstance` check to ensure that the version of `debputy` supports the
+ auto-detector (in case the return type is ever expanded in the future).
+ """
+ return False
+
+ @property
+ def out_of_source_build(self) -> bool:
+ build_directory = self.build_directory
+ return build_directory != self.source_directory
+
+ @property
+ def build_directory(self) -> str:
+ directory = self._build_directory
+ if directory is None:
+ return self.source_directory
+ return directory
+
+ @contextlib.contextmanager
+ def dump_logs_on_error(self, *logs: str) -> None:
+ """Context manager that will dump logs to stdout on error
+
+ :param logs: The logs to be dumped. Relative path names are assumed to be relative to
+ the build directory.
+ """
+ try:
+ yield
+ except (Exception, KeyboardInterrupt, SystemExit):
+ _warn(
+ "Error occurred, attempting to provide relevant logs as requested by the build system provider"
+ )
+ found_any = False
+ for log in logs:
+ if not os.path.isabs(log):
+ log = self.build_dir_path(log)
+ if not os.path.isfile(log):
+ _info(
+ f'Would have pushed "{log}" to stdout, but it does not exist.'
+ )
+ continue
+ subprocess.run(["tail", "-v", "-n", "+0", log])
+ found_any = True
+ if not found_any:
+ _warn(
+ f"None of the logs provided were available (relative to build directory): {', '.join(logs)}"
+ )
+ raise
+
+ @final
+ def run_clean(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ clean_helper: CleanHelper,
+ **kwargs,
+ ) -> None:
+ run_in_context_of_plugin(
+ self._debputy_plugin,
+ self.perform_clean,
+ context,
+ manifest,
+ clean_helper,
+ **kwargs,
+ )
+
+ def perform_clean(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ clean_helper: CleanHelper,
+ **kwargs,
+ ) -> None:
+ raise NotImplementedError
+
+ def ensure_build_dir_exists(self) -> None:
+ build_dir = self.build_directory
+ source_dir = self.source_directory
+ if build_dir == source_dir:
+ return
+ os.makedirs(build_dir, mode=0o755, exist_ok=True)
+
+ def build_dir_path(self, /, path: str = "") -> str:
+ build_dir = self.build_directory
+ if path == "":
+ return build_dir
+ return os.path.join(build_dir, path)
+
+ def relative_from_builddir_to_source(
+ self,
+ path_in_source_dir: Optional[str] = None,
+ ) -> str:
+ build_dir = self.build_directory
+ source_dir = self.source_directory
+ if build_dir == source_dir:
+ return path_in_source_dir
+ return os.path.relpath(os.path.join(source_dir, path_in_source_dir), build_dir)
+
+ @final
+ @property
+ def is_buildsystem(self) -> bool:
+ return True
+
+
+class StepBasedBuildSystemRule(BuildSystemRule):
+
+ @classmethod
+ def characteristics(cls) -> BuildSystemCharacteristics:
+ raise NotImplementedError
+
+ @final
+ def perform_clean(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ clean_helper: CleanHelper,
+ **kwargs,
+ ) -> None:
+ self._check_characteristics()
+ self.before_first_impl_step(stage="clean")
+ self.clean_impl(context, manifest, clean_helper, **kwargs)
+ if self.out_of_source_build:
+ build_directory = self.build_directory
+ assert build_directory is not None
+ if os.path.lexists(build_directory):
+ clean_helper.schedule_removal_of_directories(build_directory)
+ dest_dir = self.resolve_dest_dir()
+ if not isinstance(dest_dir, BinaryPackage):
+ clean_helper.schedule_removal_of_directories(dest_dir)
+
+ @final
+ def perform_build(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ self._check_characteristics()
+ self.before_first_impl_step(stage="build")
+ self.configure_impl(context, manifest, **kwargs)
+ self.build_impl(context, manifest, **kwargs)
+ if context.should_run_tests:
+ self.test_impl(context, manifest, **kwargs)
+ dest_dir = self.resolve_dest_dir()
+ if isinstance(dest_dir, BinaryPackage):
+ dest_dir = f"debian/{dest_dir.name}"
+ # Make it absolute for everyone (that worked for debhelper).
+ # At least autoconf's "make install" requires an absolute path, so making is
+ # relative would have at least one known issue.
+ abs_dest_dir = os.path.abspath(dest_dir)
+ self.install_impl(context, manifest, abs_dest_dir, **kwargs)
+
+ def before_first_impl_step(
+ self,
+ /,
+ stage: Literal["build", "clean"],
+ **kwargs,
+ ) -> None:
+ """Called before any `*_impl` method is called.
+
+ This can be used to validate input against data that is not available statically
+ (that is, it will be checked during build but not in static checks). An example
+ is that the `debhelper` build system uses this to validate the provided `dh-build-system`
+ to ensure that `debhelper` knows about the build system. This check cannot be done
+ statically since the build system is only required to be available in a chroot build
+ and not on the host system.
+
+ The method can also be used to compute common state for all the `*_impl` methods that
+ is awkward to do in `__init__`. Note there is no data sharing between the different
+ stages. This has to do with how `debputy` will be called (usually `clean` followed by
+ a source package assembly in `dpkg` and then `build`).
+
+ The check is done both on build and on clean before the relevant implementation methods
+ are invoked.
+
+ Any exception will abort the build. Prefer to raise ManifestInvalidUserDataException
+ exceptions for issues related to incorrect data.
+
+ The method is not invoked if the steps are skipped, which can happen with build profiles
+ or arch:any vs. arch:all builds.
+
+ :param stage: A discriminator variable to determine which kind of steps will be invoked
+ after this method returns. For state initialization, this can be useful if the state
+ is somewhat expensive and not needed for `clean`.
+ """
+ pass
+
+ def configure_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ """Called to handle the "configure" and "build" part of the build
+
+ This is basically a mix of `dh_auto_configure` and `dh_auto_build` from `debhelper`.
+ If the upstream build also runs test as a part of the build, this method should
+ check `context.should_run_tests` and pass the relevant flags to disable tests when
+ `context.should_run_tests` is false.
+ """
+ raise NotImplementedError
+
+ def build_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ """Called to handle the "configure" and "build" part of the build
+
+ This is basically a mix of `dh_auto_configure` and `dh_auto_build` from `debhelper`.
+ If the upstream build also runs test as a part of the build, this method should
+ check `context.should_run_tests` and pass the relevant flags to disable tests when
+ `context.should_run_tests` is false.
+ """
+ raise NotImplementedError
+
+ def test_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ **kwargs,
+ ) -> None:
+ """Called to handle the "test" part of the build
+
+ This is basically `dh_auto_test` from `debhelper`.
+
+ Note: This will be skipped when `context.should_run_tests` is False. Therefore, the
+ method can assume that when invoked then tests must be run.
+
+ It is always run after `configure_and_build_impl`.
+ """
+ raise NotImplementedError
+
+ def install_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ dest_dir: str,
+ **kwargs,
+ ) -> None:
+ """Called to handle the "install" part of the build
+
+ This is basically `dh_auto_install` from `debhelper`.
+
+ The `dest_dir` attribute is what the upstream should install its data into. It
+ follows the `DESTDIR` convention from autoconf/make. The `dest_dir` should not
+ be second-guessed since `debputy` will provide automatically as a search path
+ for installation rules when relevant.
+
+ It is always run after `configure_and_build_impl` and, if relevant, `test_impl`.
+ """
+ raise NotImplementedError
+
+ def clean_impl(
+ self,
+ context: "BuildContext",
+ manifest: "HighLevelManifest",
+ clean_helper: "CleanHelper",
+ **kwargs,
+ ) -> None:
+ """Called to handle the "clean" part of the build
+
+ This is basically `dh_auto_clean` from `debhelper`.
+
+ For out-of-source builds, `debputy` will remove the build directory for you
+ if it exists (when this method returns). This method is only "in-source" cleaning
+ or for "dirty" state left outside the designated build directory.
+
+ Note that state *cannot* be shared between `clean` and other steps due to limitations
+ of how the Debian build system works in general.
+ """
+ raise NotImplementedError
+
+ def _check_characteristics(self) -> None:
+ characteristics = self.characteristics()
+
+ _debug_log(f"Characteristics for {self.name_or_tag} {self.__class__.__name__} ")
+
+ if self.out_of_source_build and self.perform_in_source_build:
+ raise ManifestInvalidUserDataException(
+ f"Cannot use 'build-directory' with 'perform-in-source-build' at {self.attribute_path.path}"
+ )
+ if (
+ characteristics.out_of_source_builds == "required"
+ and self.perform_in_source_build
+ ):
+ path = self.attribute_path["perform_in_source_build"].path_key_lc
+
+ # FIXME: How do I determine the faulty plugin from here.
+ raise PluginAPIViolationError(
+ f"The build system {self.__class__.__qualname__} had an perform-in-source-build attribute, but claims"
+ f" it requires out of source builds. Please file a bug against the provider asking them not to use"
+ f' "{OptionalInSourceBuild.__name__}" as base for their build system definition or tweak'
+ f" the characteristics of the build system as the current combination is inconsistent."
+ f" The offending definition is at {path}."
+ )
+
+ if (
+ characteristics.out_of_source_builds
+ in ("required", "supported-and-default")
+ and not self.out_of_source_build
+ ):
+
+ if not self.perform_in_source_build:
+ self._build_directory = self._pick_build_dir()
+ else:
+ assert characteristics.out_of_source_builds != "required"
+ elif (
+ characteristics.out_of_source_builds == "not-supported"
+ and self.out_of_source_build
+ ):
+ path = self.attribute_path["build_directory"].path_key_lc
+
+ # FIXME: How do I determine the faulty plugin from here.
+ raise PluginAPIViolationError(
+ f"The build system {self.__class__.__qualname__} had a build-directory attribute, but claims it does"
+ f" not support out of source builds. Please file a bug against the provider asking them not to use"
+ f' "{OptionalBuildDirectory.__name__}" as base for their build system definition or tweak'
+ f" the characteristics of the build system as the current combination is inconsistent."
+ f" The offending definition is at {path}."
+ )
+
+ def _pick_build_dir(self) -> str:
+ tag = self.name if self.name is not None else self.auto_generated_stem
+ if tag == "":
+ return "_build"
+ return f"_build-{tag}"
+
+ @final
+ def resolve_dest_dir(self) -> Union[str, BinaryPackage]:
+ auto_generated_stem = self.auto_generated_stem
+ if self.install_directly_to_package:
+ assert len(self.for_packages) == 1
+ return next(iter(self.for_packages))
+ if auto_generated_stem == "":
+ return "debian/tmp"
+ return f"debian/tmp-{auto_generated_stem}"
+
+
+# Using the same logic as debhelper for the same reasons.
+def _make_target_exists(make_cmd: str, target: str, *, directory: str = ".") -> bool:
+ cmd = [
+ make_cmd,
+ "-s",
+ "-n",
+ "--no-print-directory",
+ ]
+ if directory and directory != ".":
+ cmd.append("-C")
+ cmd.append(directory)
+ cmd.append(target)
+ env = dict(os.environ)
+ env["LC_ALL"] = "C.UTF-8"
+ try:
+ res = subprocess.run(
+ cmd,
+ stdin=subprocess.DEVNULL,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ env=env,
+ restore_signals=True,
+ )
+ except FileNotFoundError:
+ return False
+
+ options = (
+ f"*** No rule to make target '{target}",
+ f"*** No rule to make target `{target}",
+ )
+
+ stdout = res.stdout.decode("utf-8")
+ return not any(o in stdout for o in options)
+
+
+def _find_first_existing_make_target(
+ make_cmd: str,
+ targets: Sequence[str],
+ *,
+ directory: str = ".",
+) -> Optional[str]:
+ for target in targets:
+ if _make_target_exists(make_cmd, target, directory=directory):
+ return target
+ return None
+
+
+_UNSET = object()
+
+
+class NinjaBuildSupport:
+ __slots__ = ("_provided_ninja_program", "_build_system_rule")
+
+ def __init__(
+ self,
+ provided_ninja_program: str,
+ build_system_rule: BuildSystemRule,
+ ) -> None:
+ self._provided_ninja_program = provided_ninja_program
+ self._build_system_rule = build_system_rule
+
+ @classmethod
+ def from_build_system(
+ cls,
+ build_system: BuildSystemRule,
+ *,
+ ninja_program: Optional[str] = None,
+ ) -> Self:
+ if ninja_program is None:
+ ninja_program = "ninja"
+ return cls(ninja_program, build_system)
+
+ @property
+ def _directory(self) -> str:
+ return self._build_system_rule.build_directory
+
+ def _pick_directory(
+ self, arg: Union[Optional[str], _UNSET] = _UNSET
+ ) -> Optional[str]:
+ if arg is _UNSET:
+ return self._directory
+ return arg
+
+ def run_ninja_build(
+ self,
+ build_context: "BuildContext",
+ *ninja_args: str,
+ directory: Union[Optional[str], _UNSET] = _UNSET,
+ env_mod: Optional[EnvironmentModification] = None,
+ enable_parallelization: bool = True,
+ ) -> None:
+ extra_ninja_args = []
+ if not build_context.is_terse_build:
+ extra_ninja_args.append("-v")
+ self._run_ninja(
+ build_context,
+ *extra_ninja_args,
+ *ninja_args,
+ env_mod=env_mod,
+ directory=directory,
+ enable_parallelization=enable_parallelization,
+ )
+
+ def run_ninja_test(
+ self,
+ build_context: "BuildContext",
+ *ninja_args: str,
+ directory: Union[Optional[str], _UNSET] = _UNSET,
+ env_mod: Optional[EnvironmentModification] = None,
+ enable_parallelization: bool = True,
+ ) -> None:
+ self._run_ninja(
+ build_context,
+ "test",
+ *ninja_args,
+ env_mod=env_mod,
+ directory=directory,
+ enable_parallelization=enable_parallelization,
+ )
+
+ def run_ninja_install(
+ self,
+ build_context: "BuildContext",
+ dest_dir: str,
+ *ninja_args: str,
+ directory: Union[Optional[str], _UNSET] = _UNSET,
+ env_mod: Optional[EnvironmentModification] = None,
+ # debhelper never had parallel installs, so we do not have it either for now.
+ enable_parallelization: bool = False,
+ ) -> None:
+ install_env_mod = EnvironmentModification(
+ replacements={
+ "DESTDIR": dest_dir,
+ }
+ )
+ if env_mod is not None:
+ install_env_mod = install_env_mod.combine(env_mod)
+ self._run_ninja(
+ build_context,
+ "install",
+ *ninja_args,
+ directory=directory,
+ env_mod=install_env_mod,
+ enable_parallelization=enable_parallelization,
+ )
+
+ def run_ninja_clean(
+ self,
+ build_context: "BuildContext",
+ *ninja_args: str,
+ directory: Union[Optional[str], _UNSET] = _UNSET,
+ env_mod: Optional[EnvironmentModification] = None,
+ enable_parallelization: bool = True,
+ ) -> None:
+ self._run_ninja(
+ build_context,
+ "clean",
+ *ninja_args,
+ env_mod=env_mod,
+ directory=directory,
+ enable_parallelization=enable_parallelization,
+ )
+
+ def _run_ninja(
+ self,
+ build_context: "BuildContext",
+ *ninja_args: str,
+ directory: Union[Optional[str], _UNSET] = _UNSET,
+ env_mod: Optional[EnvironmentModification] = None,
+ enable_parallelization: bool = True,
+ ) -> None:
+ extra_ninja_args = []
+ limit = (
+ build_context.parallelization_limit(support_zero_as_unlimited=True)
+ if enable_parallelization
+ else 1
+ )
+ extra_ninja_args.append(f"-j{limit}")
+ ninja_env_mod = EnvironmentModification(
+ replacements={
+ "LC_ALL": "C.UTF-8",
+ }
+ )
+ if env_mod is not None:
+ ninja_env_mod = ninja_env_mod.combine(env_mod)
+ run_build_system_command(
+ self._provided_ninja_program,
+ *extra_ninja_args,
+ *ninja_args,
+ cwd=self._pick_directory(directory),
+ env_mod=ninja_env_mod,
+ )
+
+
+class MakefileSupport:
+
+ __slots__ = ("_provided_make_program", "_build_system_rule")
+
+ def __init__(
+ self,
+ make_program: str,
+ build_system_rule: BuildSystemRule,
+ ) -> None:
+ self._provided_make_program = make_program
+ self._build_system_rule = build_system_rule
+
+ @classmethod
+ def from_build_system(
+ cls,
+ build_system: BuildSystemRule,
+ *,
+ make_program: Optional[str] = None,
+ ) -> Self:
+ if make_program is None:
+ make_program = os.environ.get("MAKE", "make")
+ return cls(make_program, build_system)
+
+ @property
+ def _directory(self) -> str:
+ return self._build_system_rule.build_directory
+
+ @property
+ def _make_program(self) -> str:
+ make_program = self._provided_make_program
+ if self._provided_make_program is None:
+ return os.environ.get("MAKE", "make")
+ return make_program
+
+ def _pick_directory(
+ self, arg: Union[Optional[str], _UNSET] = _UNSET
+ ) -> Optional[str]:
+ if arg is _UNSET:
+ return self._directory
+ return arg
+
+ def find_first_existing_make_target(
+ self,
+ targets: Sequence[str],
+ *,
+ directory: Union[Optional[str], _UNSET] = _UNSET,
+ ) -> Optional[str]:
+ for target in targets:
+ if self.make_target_exists(target, directory=directory):
+ return target
+ return None
+
+ def make_target_exists(
+ self,
+ target: str,
+ *,
+ directory: Union[Optional[str], _UNSET] = _UNSET,
+ ) -> bool:
+ return _make_target_exists(
+ self._make_program,
+ target,
+ directory=self._pick_directory(directory),
+ )
+
+ def run_first_existing_target_if_any(
+ self,
+ build_context: "BuildContext",
+ targets: Sequence[str],
+ *make_args: str,
+ enable_parallelization: bool = True,
+ directory: Union[Optional[str], _UNSET] = _UNSET,
+ env_mod: Optional[EnvironmentModification] = None,
+ ) -> bool:
+ target = self.find_first_existing_make_target(targets, directory=directory)
+ if target is None:
+ return False
+
+ self.run_make(
+ build_context,
+ target,
+ *make_args,
+ enable_parallelization=enable_parallelization,
+ directory=directory,
+ env_mod=env_mod,
+ )
+ return True
+
+ def run_make(
+ self,
+ build_context: "BuildContext",
+ *make_args: str,
+ enable_parallelization: bool = True,
+ directory: Union[Optional[str], _UNSET] = _UNSET,
+ env_mod: Optional[EnvironmentModification] = None,
+ ) -> None:
+ limit = (
+ build_context.parallelization_limit(support_zero_as_unlimited=True)
+ if enable_parallelization
+ else 1
+ )
+ extra_make_args = [f"-j{limit}"] if limit else ["-j"]
+ run_build_system_command(
+ self._make_program,
+ *extra_make_args,
+ *make_args,
+ cwd=self._pick_directory(directory),
+ env_mod=env_mod,
+ )
+
+
+def debputy_build_system(
+ # For future self: Before you get ideas about making manifest_keyword accept a list,
+ # remember it has consequences for shadowing_build_systems_when_active.
+ manifest_keyword: str,
+ provider: Type[BSR],
+ *,
+ expected_debputy_integration_mode: Optional[
+ Container[DebputyIntegrationMode]
+ ] = None,
+ auto_detection_shadows_build_systems: Optional[
+ Union[str, Iterable[str]]
+ ] = frozenset(),
+ online_reference_documentation: Optional[ParserDocumentation] = None,
+ apply_standard_attribute_documentation: bool = False,
+ source_format: Optional[Any] = None,
+) -> Callable[[Type[BSPF]], Type[BSPF]]:
+ if not isinstance(provider, type) or not issubclass(provider, BuildSystemRule):
+ raise PluginInitializationError(
+ f"The provider for @{debputy_build_system.__name__} must be subclass of {BuildSystemRule.__name__}goes on the TypedDict that defines the parsed"
+ f" variant of the manifest definition. Not the build system implementation class."
+ )
+
+ def _constructor_wrapper(
+ _rule_used: str,
+ *args,
+ **kwargs,
+ ) -> BSR:
+ return provider(*args, **kwargs)
+
+ if isinstance(auto_detection_shadows_build_systems, str):
+ shadows = frozenset([auto_detection_shadows_build_systems])
+ else:
+ shadows = frozenset(auto_detection_shadows_build_systems)
+
+ metadata = BuildSystemManifestRuleMetadata(
+ (manifest_keyword,),
+ BuildRule,
+ _constructor_wrapper,
+ expected_debputy_integration_mode=expected_debputy_integration_mode,
+ source_format=source_format,
+ online_reference_documentation=online_reference_documentation,
+ apply_standard_attribute_documentation=apply_standard_attribute_documentation,
+ auto_detection_shadow_build_systems=shadows,
+ build_system_impl=provider,
+ )
+
+ def _decorator_impl(pf_cls: Type[BSPF]) -> Type[BSPF]:
+ if isinstance(pf_cls, type) and issubclass(pf_cls, BuildSystemRule):
+ raise PluginInitializationError(
+ f"The @{debputy_build_system.__name__} annotation goes on the TypedDict that defines the parsed"
+ f" variant of the manifest definition. Not the build system implementation class."
+ )
+
+ # TODO: In python3.12 we can check more than just `is_typeddict`. In python3.11, woe is us and
+ # is_typeddict is the only thing that reliably works (cpython#103699)
+ if not is_typeddict(pf_cls):
+ raise PluginInitializationError(
+ f"Expected annotated class to be a subclass of {BuildRuleParsedFormat.__name__},"
+ f" but got {pf_cls.__name__} instead"
+ )
+
+ setattr(pf_cls, _DEBPUTY_DISPATCH_METADATA_ATTR_NAME, metadata)
+ return pf_cls
+
+ return _decorator_impl
diff --git a/src/debputy/plugin/plugin_state.py b/src/debputy/plugin/plugin_state.py
new file mode 100644
index 0000000..ef4dabb
--- /dev/null
+++ b/src/debputy/plugin/plugin_state.py
@@ -0,0 +1,113 @@
+import contextvars
+import functools
+import inspect
+from contextvars import ContextVar
+from typing import Optional, Callable, ParamSpec, TypeVar, NoReturn, Union
+
+from debputy.exceptions import (
+ UnhandledOrUnexpectedErrorFromPluginError,
+ DebputyRuntimeError,
+)
+from debputy.util import _debug_log, _is_debug_log_enabled
+
+_current_debputy_plugin_cxt_var: ContextVar[Optional[str]] = ContextVar(
+ "current_debputy_plugin",
+ default=None,
+)
+
+P = ParamSpec("P")
+R = TypeVar("R")
+
+
+def current_debputy_plugin_if_present() -> Optional[str]:
+ return _current_debputy_plugin_cxt_var.get()
+
+
+def current_debputy_plugin_required() -> str:
+ v = current_debputy_plugin_if_present()
+ if v is None:
+ raise AssertionError(
+ "current_debputy_plugin_required() was called, but no plugin was set."
+ )
+ return v
+
+
+def wrap_plugin_code(
+ plugin_name: str,
+ func: Callable[P, R],
+ *,
+ non_debputy_exception_handling: Union[bool, Callable[[Exception], NoReturn]] = True,
+) -> Callable[P, R]:
+ if isinstance(non_debputy_exception_handling, bool):
+
+ runner = run_in_context_of_plugin
+ if non_debputy_exception_handling:
+ runner = run_in_context_of_plugin_wrap_errors
+
+ def _wrapper(*args: P.args, **kwargs: P.kwargs) -> None:
+ return runner(plugin_name, func, *args, **kwargs)
+
+ functools.update_wrapper(_wrapper, func)
+ return _wrapper
+
+ def _wrapper(*args: P.args, **kwargs: P.kwargs) -> None:
+ try:
+ return run_in_context_of_plugin(plugin_name, func, *args, **kwargs)
+ except DebputyRuntimeError:
+ raise
+ except Exception as e:
+ non_debputy_exception_handling(e)
+
+ functools.update_wrapper(_wrapper, func)
+ return _wrapper
+
+
+def run_in_context_of_plugin(
+ plugin: str,
+ func: Callable[P, R],
+ *args: P.args,
+ **kwargs: P.kwargs,
+) -> R:
+ context = contextvars.copy_context()
+ if _is_debug_log_enabled():
+ call_stack = inspect.stack()
+ caller: str = "[N/A]"
+ for frame in call_stack:
+ if frame.filename != __file__:
+ try:
+ fname = frame.frame.f_code.co_qualname
+ except AttributeError:
+ fname = None
+ if fname is None:
+ fname = frame.function
+ caller = f"{frame.filename}:{frame.lineno} ({fname})"
+ break
+ # Do not keep the reference longer than necessary
+ del call_stack
+ _debug_log(
+ f"Switching plugin context to {plugin} at {caller} (from context: {current_debputy_plugin_if_present()})"
+ )
+ # Wish we could just do a regular set without wrapping it in `context.run`
+ context.run(_current_debputy_plugin_cxt_var.set, plugin)
+ return context.run(func, *args, **kwargs)
+
+
+def run_in_context_of_plugin_wrap_errors(
+ plugin: str,
+ func: Callable[P, R],
+ *args: P.args,
+ **kwargs: P.kwargs,
+) -> R:
+ try:
+ return run_in_context_of_plugin(plugin, func, *args, **kwargs)
+ except DebputyRuntimeError:
+ raise
+ except Exception as e:
+ if plugin != "debputy":
+ raise UnhandledOrUnexpectedErrorFromPluginError(
+ f"{func.__qualname__} from the plugin {plugin} raised exception that was not expected here."
+ ) from e
+ else:
+ raise AssertionError(
+ "Bug in the `debputy` plugin: Unhandled exception."
+ ) from e
diff --git a/src/debputy/transformation_rules.py b/src/debputy/transformation_rules.py
index c7f8a2a..6e96c64 100644
--- a/src/debputy/transformation_rules.py
+++ b/src/debputy/transformation_rules.py
@@ -11,6 +11,7 @@ from typing import (
Dict,
TypeVar,
cast,
+ final,
)
from debputy.exceptions import (
@@ -27,12 +28,15 @@ from debputy.manifest_parser.base_types import (
FileSystemMode,
StaticFileSystemOwner,
StaticFileSystemGroup,
- DebputyDispatchableType,
)
+from debputy.manifest_parser.tagging_types import DebputyDispatchableType
from debputy.manifest_parser.util import AttributePath
from debputy.path_matcher import MatchRule
from debputy.plugin.api import VirtualPath
from debputy.plugin.debputy.types import DebputyCapability
+from debputy.plugin.plugin_state import (
+ run_in_context_of_plugin_wrap_errors,
+)
from debputy.util import _warn
@@ -59,10 +63,26 @@ class PreProvidedExclusion:
class TransformationRule(DebputyDispatchableType):
+
__slots__ = ()
+ @final
+ def run_transform_file_system(
+ self,
+ fs_root: FSPath,
+ condition_context: ConditionContext,
+ ) -> None:
+ run_in_context_of_plugin_wrap_errors(
+ self._debputy_plugin,
+ self.transform_file_system,
+ fs_root,
+ condition_context,
+ )
+
def transform_file_system(
- self, fs_root: FSPath, condition_context: ConditionContext
+ self,
+ fs_root: FSPath,
+ condition_context: ConditionContext,
) -> None:
raise NotImplementedError
@@ -134,6 +154,7 @@ class RemoveTransformationRule(TransformationRule):
keep_empty_parent_dirs: bool,
definition_source: AttributePath,
) -> None:
+ super().__init__()
self._match_rules = match_rules
self._keep_empty_parent_dirs = keep_empty_parent_dirs
self._definition_source = definition_source.path
@@ -180,6 +201,7 @@ class MoveTransformationRule(TransformationRule):
definition_source: AttributePath,
condition: Optional[ManifestCondition],
) -> None:
+ super().__init__()
self._match_rule = match_rule
self._dest_path = dest_path
self._dest_is_dir = dest_is_dir
@@ -283,6 +305,7 @@ class CreateSymlinkPathTransformationRule(TransformationRule):
definition_source: AttributePath,
condition: Optional[ManifestCondition],
) -> None:
+ super().__init__()
self._link_target = link_target
self._link_dest = link_dest
self._replacement_rule = replacement_rule
@@ -550,6 +573,9 @@ class ModeNormalizationTransformationRule(TransformationRule):
self,
normalizations: Sequence[Tuple[MatchRule, FileSystemMode]],
) -> None:
+ # A bit of a hack since it is initialized outside `debputy`. It probably should not
+ # be a "TransformationRule" (hindsight and all)
+ run_in_context_of_plugin_wrap_errors("debputy", super().__init__)
self._normalizations = normalizations
def transform_file_system(
@@ -575,6 +601,12 @@ class ModeNormalizationTransformationRule(TransformationRule):
class NormalizeShebangLineTransformation(TransformationRule):
+
+ def __init__(self) -> None:
+ # A bit of a hack since it is initialized outside `debputy`. It probably should not
+ # be a "TransformationRule" (hindsight and all)
+ run_in_context_of_plugin_wrap_errors("debputy", super().__init__)
+
def transform_file_system(
self,
fs_root: VirtualPath,
diff --git a/src/debputy/types.py b/src/debputy/types.py
index 05e68c9..dc3cbd3 100644
--- a/src/debputy/types.py
+++ b/src/debputy/types.py
@@ -1,9 +1,138 @@
-from typing import TypeVar, TYPE_CHECKING
+import dataclasses
+from typing import (
+ TypeVar,
+ TYPE_CHECKING,
+ Sequence,
+ Tuple,
+ Mapping,
+ Dict,
+ Optional,
+ TypedDict,
+ NotRequired,
+ List,
+ MutableMapping,
+)
if TYPE_CHECKING:
from debputy.plugin.api import VirtualPath
from debputy.filesystem_scan import FSPath
+ VP = TypeVar("VP", VirtualPath, FSPath)
+ S = TypeVar("S", str, bytes)
+else:
+ VP = TypeVar("VP", "VirtualPath", "FSPath")
+ S = TypeVar("S", str, bytes)
-VP = TypeVar("VP", "VirtualPath", "FSPath")
-S = TypeVar("S", str, bytes)
+
+class EnvironmentModificationSerialized(TypedDict):
+ replacements: NotRequired[Dict[str, str]]
+ removals: NotRequired[List[str]]
+
+
+@dataclasses.dataclass(slots=True, frozen=True)
+class EnvironmentModification:
+ replacements: Sequence[Tuple[str, str]] = tuple()
+ removals: Sequence[str] = tuple()
+
+ @staticmethod
+ def from_serialized_format(
+ serial_form: EnvironmentModificationSerialized,
+ ) -> "EnvironmentModification":
+ replacements_raw = serial_form.get("replacements")
+ if replacements_raw is not None:
+ replacements = tuple((k, v) for k, v in replacements_raw.items())
+ else:
+ replacements = tuple()
+ return EnvironmentModification(
+ replacements=replacements, removals=serial_form.get("removals", tuple())
+ )
+
+ def __bool__(self) -> bool:
+ return not self.removals and not self.replacements
+
+ def combine(
+ self, other: "Optional[EnvironmentModification]"
+ ) -> "EnvironmentModification":
+ if not other:
+ return self
+ existing_replacements = {k: v for k, v in self.replacements}
+ extra_replacements = {
+ k: v
+ for k, v in other.replacements
+ if k not in existing_replacements or existing_replacements[k] != v
+ }
+ seen_removals = set(self.removals)
+ extra_removals = [r for r in other.removals if r not in seen_removals]
+
+ if not extra_replacements and isinstance(self.replacements, tuple):
+ new_replacements = self.replacements
+ else:
+ new_replacements = []
+ for k, v in existing_replacements:
+ if k not in extra_replacements:
+ new_replacements.append((k, v))
+
+ for k, v in other.replacements:
+ if k in extra_replacements:
+ new_replacements.append((k, v))
+
+ new_replacements = tuple(new_replacements)
+
+ if not extra_removals and isinstance(self.removals, tuple):
+ new_removals = self.removals
+ else:
+ new_removals = list(self.removals)
+ new_removals.extend(extra_removals)
+ new_removals = tuple(new_removals)
+
+ if self.replacements is new_replacements and self.removals is new_removals:
+ return self
+
+ return EnvironmentModification(
+ new_replacements,
+ new_removals,
+ )
+
+ def serialize(self) -> EnvironmentModificationSerialized:
+ serial_form = {}
+ replacements = self.replacements
+ if replacements:
+ serial_form["replacements"] = {k: v for k, v in replacements}
+ removals = self.removals
+ if removals:
+ serial_form["removals"] = list(removals)
+ return serial_form
+
+ def update_inplace(self, env: MutableMapping[str, str]) -> None:
+ for k, v in self.replacements:
+ existing_value = env.get(k)
+ if v == existing_value:
+ continue
+ env[k] = v
+
+ for k in self.removals:
+ if k not in env:
+ continue
+ del env[k]
+
+ def compute_env(self, base_env: Mapping[str, str]) -> Mapping[str, str]:
+ updated_env: Optional[Dict[str, str]] = None
+ for k, v in self.replacements:
+ existing_value = base_env.get(k)
+ if v == existing_value:
+ continue
+
+ if updated_env is None:
+ updated_env = dict(base_env)
+ updated_env[k] = v
+
+ for k in self.removals:
+ if k not in base_env:
+ continue
+ if updated_env is None:
+ updated_env = dict(base_env)
+ del updated_env[k]
+
+ if updated_env is not None:
+ return updated_env
+ return base_env
diff --git a/src/debputy/util.py b/src/debputy/util.py
index ebd38c2..911e6fa 100644
--- a/src/debputy/util.py
+++ b/src/debputy/util.py
@@ -34,7 +34,7 @@ from debian.deb822 import Deb822
from debputy.architecture_support import DpkgArchitectureBuildProcessValuesTable
from debputy.exceptions import DebputySubstitutionError
-
+from debputy.types import EnvironmentModification
try:
from Levenshtein import distance
@@ -105,7 +105,12 @@ _PROFILE_GROUP_SPLIT = re.compile(r">\s+<")
_DEFAULT_LOGGER: Optional[logging.Logger] = None
_STDOUT_HANDLER: Optional[logging.StreamHandler[Any]] = None
_STDERR_HANDLER: Optional[logging.StreamHandler[Any]] = None
-PRINT_COMMAND = logging.INFO + 5
+PRINT_COMMAND = logging.INFO + 3
+PRINT_BUILD_SYSTEM_COMMAND = PRINT_COMMAND + 3
+
+# Map them back to `INFO`. The names must be unique so the prefix is stripped.
+logging.addLevelName(PRINT_COMMAND, "__INFO")
+logging.addLevelName(PRINT_BUILD_SYSTEM_COMMAND, "_INFO")
def assume_not_none(x: Optional[T]) -> T:
@@ -116,6 +121,13 @@ def assume_not_none(x: Optional[T]) -> T:
return x
+def _non_verbose_info(msg: str) -> None:
+ global _DEFAULT_LOGGER
+ logger = _DEFAULT_LOGGER
+ if logger is not None:
+ logger.log(PRINT_BUILD_SYSTEM_COMMAND, msg)
+
+
def _info(msg: str) -> None:
global _DEFAULT_LOGGER
logger = _DEFAULT_LOGGER
@@ -124,6 +136,20 @@ def _info(msg: str) -> None:
# No fallback print for info
+def _is_debug_log_enabled() -> bool:
+ global _DEFAULT_LOGGER
+ logger = _DEFAULT_LOGGER
+ return logger is not None and logger.isEnabledFor(logging.DEBUG)
+
+
+def _debug_log(msg: str) -> None:
+ global _DEFAULT_LOGGER
+ logger = _DEFAULT_LOGGER
+ if logger:
+ logger.debug(msg)
+ # No fallback print for info
+
+
def _error(msg: str, *, prog: Optional[str] = None) -> "NoReturn":
global _DEFAULT_LOGGER
logger = _DEFAULT_LOGGER
@@ -226,9 +252,88 @@ def escape_shell(*args: str) -> str:
return " ".join(_escape_shell_word(w) for w in args)
-def print_command(*args: str, print_at_log_level: int = PRINT_COMMAND) -> None:
- if _DEFAULT_LOGGER and _DEFAULT_LOGGER.isEnabledFor(print_at_log_level):
- print(f" {escape_shell(*args)}")
+def render_command(
+ *args: str,
+ cwd: Optional[str] = None,
+ env_mod: Optional[EnvironmentModification] = None,
+) -> str:
+ env_mod_prefix = ""
+ if env_mod:
+ env_mod_parts = []
+ if bool(env_mod.removals):
+ env_mod_parts.append("env")
+ if cwd is not None:
+ env_mod_parts.append(f"--chdir={escape_shell(cwd)}")
+ env_mod_parts.extend(f"--unset={escape_shell(v)}" for v in env_mod.removals)
+ env_mod_parts.extend(
+ f"{escape_shell(k)}={escape_shell(v)}" for k, v in env_mod.replacements
+ )
+
+ chdir_prefix = ""
+ if cwd is not None and cwd != ".":
+ chdir_prefix = f"cd {escape_shell(cwd)} && "
+ return f"{chdir_prefix}{env_mod_prefix}{escape_shell(*args)}"
+
+
+def print_command(
+ *args: str,
+ cwd: Optional[str] = None,
+ env_mod: Optional[EnvironmentModification] = None,
+ print_at_log_level: int = PRINT_COMMAND,
+) -> None:
+ if _DEFAULT_LOGGER is None or not _DEFAULT_LOGGER.isEnabledFor(print_at_log_level):
+ return
+
+ rendered_cmd = render_command(
+ *args,
+ cwd=cwd,
+ env_mod=env_mod,
+ )
+ print(f" {rendered_cmd}")
+
+
+def run_command(
+ *args: str,
+ cwd: Optional[str] = None,
+ env: Optional[Mapping[str, str]] = None,
+ env_mod: Optional[EnvironmentModification] = None,
+ print_at_log_level: int = PRINT_COMMAND,
+) -> None:
+ print_command(
+ *args,
+ cwd=cwd,
+ env_mod=env_mod,
+ print_at_log_level=print_at_log_level,
+ )
+ if env_mod:
+ if env is None:
+ env = os.environ
+ env = env_mod.compute_env(env)
+ if env is os.environ:
+ env = None
+ try:
+ subprocess.check_call(args, cwd=cwd, env=env)
+ # At least "clean_logic.py" relies on catching FileNotFoundError
+ except KeyboardInterrupt:
+ _error(f"Interrupted (SIGINT) while running {escape_shell(*args)}")
+ except subprocess.CalledProcessError as e:
+ _error(f"The command {escape_shell(*args)} failed with status: {e.returncode}")
+
+
+def run_build_system_command(
+ *args: str,
+ cwd: Optional[str] = None,
+ env: Optional[Mapping[str, str]] = None,
+ env_mod: Optional[EnvironmentModification] = None,
+ print_at_log_level: int = PRINT_BUILD_SYSTEM_COMMAND,
+) -> None:
+ run_command(
+ *args,
+ cwd=cwd,
+ env=env,
+ env_mod=env_mod,
+ print_at_log_level=print_at_log_level,
+ )
def debian_policy_normalize_symlink_target(
@@ -398,7 +503,7 @@ def integrated_with_debhelper() -> None:
_DH_INTEGRATION_MODE = True
-def scratch_dir() -> str:
+def scratch_dir(*, create_if_not_exists: bool = True) -> str:
global _SCRATCH_DIR
if _SCRATCH_DIR is not None:
return _SCRATCH_DIR
@@ -411,9 +516,10 @@ def scratch_dir() -> str:
is_debputy_dir = False
else:
_SCRATCH_DIR = debputy_scratch_dir
- ensure_dir(_SCRATCH_DIR)
- if is_debputy_dir:
- Path("debian/.debputy/.gitignore").write_text("*\n")
+ if create_if_not_exists:
+ ensure_dir(_SCRATCH_DIR)
+ if is_debputy_dir:
+ Path("debian/.debputy/.gitignore").write_text("*\n")
return _SCRATCH_DIR
@@ -455,9 +561,11 @@ def generated_content_dir(
return directory
-PerlIncDir = collections.namedtuple("PerlIncDir", ["vendorlib", "vendorarch"])
+PerlConfigVars = collections.namedtuple(
+ "PerlIncDir", ["vendorlib", "vendorarch", "cross_inc_dir", "ld", "path_sep"]
+)
PerlConfigData = collections.namedtuple("PerlConfigData", ["version", "debian_abi"])
-_PERL_MODULE_DIRS: Dict[str, PerlIncDir] = {}
+_PERL_MODULE_DIRS: Dict[str, PerlConfigVars] = {}
@functools.lru_cache(1)
@@ -490,42 +598,56 @@ def perlxs_api_dependency() -> str:
return f"perlapi-{config.version}"
-def perl_module_dirs(
+def resolve_perl_config(
dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable,
- dctrl_bin: "BinaryPackage",
-) -> PerlIncDir:
+ dctrl_bin: Optional["BinaryPackage"],
+) -> PerlConfigVars:
global _PERL_MODULE_DIRS
- arch = (
- dctrl_bin.resolved_architecture
- if dpkg_architecture_variables.is_cross_compiling
- else "_default_"
- )
- module_dir = _PERL_MODULE_DIRS.get(arch)
- if module_dir is None:
+ if dpkg_architecture_variables.is_cross_compiling:
+ arch = (
+ dctrl_bin.resolved_architecture
+ if dctrl_bin is not None
+ else dpkg_architecture_variables.current_host_arch
+ )
+ else:
+ arch = "_build_arch_"
+ config_vars = _PERL_MODULE_DIRS.get(arch)
+ if config_vars is None:
cmd = ["perl"]
if dpkg_architecture_variables.is_cross_compiling:
version = _perl_version()
- inc_dir = f"/usr/lib/{dctrl_bin.deb_multiarch}/perl/cross-config-{version}"
+ cross_inc_dir = (
+ f"/usr/lib/{dctrl_bin.deb_multiarch}/perl/cross-config-{version}"
+ )
# FIXME: This should not fallback to "build-arch" but on the other hand, we use the perl module dirs
# for every package at the moment. So mandating correct perl dirs implies mandating perl-xs-dev in
# cross builds... meh.
- if os.path.exists(os.path.join(inc_dir, "Config.pm")):
- cmd.append(f"-I{inc_dir}")
+ if os.path.exists(os.path.join(cross_inc_dir, "Config.pm")):
+ cmd.append(f"-I{cross_inc_dir}")
+ else:
+ cross_inc_dir = None
cmd.extend(
- ["-MConfig", "-e", 'print "$Config{vendorlib}\n$Config{vendorarch}\n"']
+ [
+ "-MConfig",
+ "-e",
+ 'print "$Config{vendorlib}\n$Config{vendorarch}\n$Config{ld}\n$Config{path_sep}\n"',
+ ]
)
output = subprocess.check_output(cmd).decode("utf-8").splitlines(keepends=False)
- if len(output) != 2:
+ if len(output) != 4:
raise ValueError(
"Internal error: Unable to determine the perl include directories:"
f" Raw output from perl snippet: {output}"
)
- module_dir = PerlIncDir(
- vendorlib=_normalize_path(output[0]),
- vendorarch=_normalize_path(output[1]),
+ config_vars = PerlConfigVars(
+ vendorlib="/" + _normalize_path(output[0], with_prefix=False),
+ vendorarch="/" + _normalize_path(output[1], with_prefix=False),
+ cross_inc_dir=cross_inc_dir,
+ ld=output[2],
+ path_sep=output[3],
)
- _PERL_MODULE_DIRS[arch] = module_dir
- return module_dir
+ _PERL_MODULE_DIRS[arch] = config_vars
+ return config_vars
@functools.lru_cache(1)
@@ -736,6 +858,12 @@ def change_log_level(
_DEFAULT_LOGGER.setLevel(log_level)
+def current_log_level() -> Optional[int]:
+ if _DEFAULT_LOGGER is not None:
+ return _DEFAULT_LOGGER.level
+ return None
+
+
def setup_logging(
*,
log_only_to_stderr: bool = False,
@@ -748,13 +876,20 @@ def setup_logging(
" Use reconfigure_logging=True if you need to reconfigure it"
)
stdout_color, stderr_color, bad_request = _check_color()
+ colors: Optional[Dict[str, str]] = None
if stdout_color or stderr_color:
try:
import colorlog
+
except ImportError:
stdout_color = False
stderr_color = False
+ else:
+ colors = dict(colorlog.default_log_colors)
+ # Add our custom levels.
+ colors["_INFO"] = colors["INFO"]
+ colors["__INFO"] = colors["INFO"]
if log_only_to_stderr:
stdout = sys.stderr
@@ -785,7 +920,12 @@ def setup_logging(
if stdout_color:
stdout_handler = colorlog.StreamHandler(stdout)
stdout_handler.setFormatter(
- colorlog.ColoredFormatter(color_format, style="{", force_color=True)
+ colorlog.ColoredFormatter(
+ color_format,
+ style="{",
+ force_color=True,
+ log_colors=colors,
+ )
)
logger = colorlog.getLogger()
if existing_stdout_handler is not None:
@@ -804,7 +944,12 @@ def setup_logging(
if stderr_color:
stderr_handler = colorlog.StreamHandler(sys.stderr)
stderr_handler.setFormatter(
- colorlog.ColoredFormatter(color_format, style="{", force_color=True)
+ colorlog.ColoredFormatter(
+ color_format,
+ style="{",
+ force_color=True,
+ log_colors=colors,
+ )
)
logger = logging.getLogger()
if existing_stderr_handler is not None:
@@ -831,6 +976,7 @@ def setup_logging(
*args: Any, **kwargs: Any
) -> logging.LogRecord: # pragma: no cover
record = old_factory(*args, **kwargs)
+ record.levelname = record.levelname.lstrip("_")
record.levelnamelower = record.levelname.lower()
return record