From 03a1bd448be99d872d663a57a1cf4492882e090d Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Thu, 25 Apr 2024 04:59:47 +0200 Subject: Adding upstream version 0.1.29. Signed-off-by: Daniel Baumann --- .gitlab-ci.yml | 23 + coverage-report/coverage_html.js | 624 ---- ...d_08cd1ad648464ded_debputy_self_hosting_py.html | 160 - ...d1ad648464ded_debputy_self_hosting_test_py.html | 109 - .../d_128305113a77411b___init___py.html | 99 - .../d_128305113a77411b_assemble_deb_py.html | 354 -- .../d_23db3c975895bd86___init___py.html | 99 - .../d_23db3c975895bd86_migration_py.html | 445 --- .../d_23db3c975895bd86_migrators_impl_py.html | 1829 ---------- .../d_23db3c975895bd86_migrators_py.html | 166 - coverage-report/d_23db3c975895bd86_models_py.html | 272 -- .../d_267b6307937f1878___init___py.html | 115 - ..._267b6307937f1878__deb_options_profiles_py.html | 190 -- .../d_267b6307937f1878__manifest_constants_py.html | 148 - ...d_267b6307937f1878_architecture_support_py.html | 332 -- ...267b6307937f1878_builtin_manifest_rules_py.html | 360 -- ..._267b6307937f1878_deb_packaging_support_py.html | 1794 ---------- .../d_267b6307937f1878_debhelper_emulation_py.html | 368 -- .../d_267b6307937f1878_elf_util_py.html | 307 -- .../d_267b6307937f1878_exceptions_py.html | 189 - .../d_267b6307937f1878_filesystem_scan_py.html | 2020 ----------- ...b6307937f1878_highlevel_manifest_parser_py.html | 637 ---- .../d_267b6307937f1878_highlevel_manifest_py.html | 1706 ---------- .../d_267b6307937f1878_installations_py.html | 1261 ------- ..._267b6307937f1878_intermediate_manifest_py.html | 432 --- .../d_267b6307937f1878_interpreter_py.html | 319 -- .../d_267b6307937f1878_maintscript_snippet_py.html | 283 -- .../d_267b6307937f1878_manifest_conditions_py.html | 338 -- ...67b6307937f1878_packager_provided_files_py.html | 422 --- .../d_267b6307937f1878_packages_py.html | 431 --- .../d_267b6307937f1878_path_matcher_py.html | 628 ---- .../d_267b6307937f1878_substitution_py.html | 435 --- ...d_267b6307937f1878_transformation_rules_py.html | 695 ---- coverage-report/d_267b6307937f1878_types_py.html | 108 - coverage-report/d_267b6307937f1878_util_py.html | 903 ----- coverage-report/d_267b6307937f1878_version_py.html | 166 - .../d_2882d0a735873825___init___py.html | 99 - .../d_2882d0a735873825_deb_materialization_py.html | 686 ---- .../d_2882d0a735873825_deb_packer_py.html | 656 ---- .../d_36a196ce5f578895___init___py.html | 99 - .../d_36a196ce5f578895_alternatives_py.html | 324 -- .../d_36a196ce5f578895_debconf_templates_py.html | 176 - .../d_36a196ce5f578895_makeshlibs_py.html | 413 --- .../d_4b9be07fb6071cd2___init___py.html | 120 - .../d_4b9be07fb6071cd2_test_impl_py.html | 902 ----- .../d_4b9be07fb6071cd2_test_spec_py.html | 463 --- .../d_4f754ff76d8638bb___init___py.html | 99 - .../d_4f754ff76d8638bb_base_types_py.html | 539 --- .../d_4f754ff76d8638bb_declarative_parser_py.html | 2102 ------------ .../d_4f754ff76d8638bb_exceptions_py.html | 108 - .../d_4f754ff76d8638bb_mapper_code_py.html | 176 - .../d_4f754ff76d8638bb_parser_data_py.html | 232 -- .../d_4f754ff76d8638bb_parser_doc_py.html | 372 -- coverage-report/d_4f754ff76d8638bb_util_py.html | 419 --- .../d_4faea183f900b252___init___py.html | 99 - .../d_50e3cc0df0cc5f51___init___py.html | 99 - .../d_5d0ec0d5422112df___init___py.html | 99 - .../d_5d0ec0d5422112df_debputy_ls_py.html | 174 - ...d_5d0ec0d5422112df_lsp_debian_changelog_py.html | 392 --- .../d_5d0ec0d5422112df_lsp_debian_control_py.html | 729 ---- ...112df_lsp_debian_control_reference_data_py.html | 2896 ---------------- ...d_5d0ec0d5422112df_lsp_debian_copyright_py.html | 604 ---- ...0d5422112df_lsp_debian_debputy_manifest_py.html | 1134 ------ .../d_5d0ec0d5422112df_lsp_debian_rules_py.html | 483 --- ...0ec0d5422112df_lsp_debian_tests_control_py.html | 585 ---- .../d_5d0ec0d5422112df_lsp_dispatch_py.html | 330 -- .../d_5d0ec0d5422112df_lsp_features_py.html | 316 -- .../d_5d0ec0d5422112df_lsp_generic_deb822_py.html | 481 --- .../d_5d0ec0d5422112df_quickfixes_py.html | 301 -- .../d_5d0ec0d5422112df_spellchecking_py.html | 403 --- .../d_5d0ec0d5422112df_text_edit_py.html | 209 -- .../d_5d0ec0d5422112df_text_util_py.html | 221 -- .../d_64287305fe0c6642___init___py.html | 136 - .../d_64287305fe0c6642_example_processing_py.html | 198 -- .../d_64287305fe0c6642_feature_set_py.html | 191 -- coverage-report/d_64287305fe0c6642_impl_py.html | 2060 ----------- .../d_64287305fe0c6642_impl_types_py.html | 1383 -------- .../d_64287305fe0c6642_plugin_parser_py.html | 165 - coverage-report/d_64287305fe0c6642_spec_py.html | 1842 ---------- .../d_6c155ce9dd9f7742___init___py.html | 108 - coverage-report/d_6c155ce9dd9f7742_compat_py.html | 118 - .../d_6e57078c9ef7177d___init___py.html | 99 - coverage-report/d_6e57078c9ef7177d_context_py.html | 715 ---- coverage-report/d_6e57078c9ef7177d_dc_util_py.html | 114 - .../d_6e57078c9ef7177d_lint_and_lsp_cmds_py.html | 340 -- coverage-report/d_6e57078c9ef7177d_output_py.html | 434 --- .../d_6e57078c9ef7177d_plugin_cmds_py.html | 1295 ------- .../d_7764373ba25ba45b___init___py.html | 99 - .../d_7764373ba25ba45b_lint_impl_py.html | 448 --- .../d_7764373ba25ba45b_lint_util_py.html | 318 -- coverage-report/d_9ae9c81fc31f2694_gnome_py.html | 170 - coverage-report/d_9ae9c81fc31f2694_numpy3_py.html | 161 - .../d_9ae9c81fc31f2694_perl_openssl_py.html | 142 - .../d_d5d6843b45eec01e___init___py.html | 99 - ...d_d5d6843b45eec01e_binary_package_rules_py.html | 817 ----- .../d_d5d6843b45eec01e_debputy_plugin_py.html | 499 --- .../d_d5d6843b45eec01e_discard_rules_py.html | 196 -- .../d_d5d6843b45eec01e_manifest_root_rules_py.html | 349 -- .../d_d5d6843b45eec01e_metadata_detectors_py.html | 649 ---- .../d_d5d6843b45eec01e_package_processors_py.html | 419 --- coverage-report/d_d5d6843b45eec01e_paths_py.html | 103 - .../d_d5d6843b45eec01e_private_api_py.html | 3030 ----------------- .../d_d5d6843b45eec01e_service_management_py.html | 549 --- ...d6843b45eec01e_shlib_metadata_detectors_py.html | 146 - ..._d5d6843b45eec01e_strip_non_determinism_py.html | 363 -- coverage-report/d_d5d6843b45eec01e_types_py.html | 109 - .../d_e9c451f4ae334f76___init___py.html | 290 -- coverage-report/d_e9c451f4ae334f76__util_py.html | 390 --- .../d_e9c451f4ae334f76_formatter_py.html | 577 ---- .../d_e9c451f4ae334f76_locatable_py.html | 512 --- coverage-report/d_e9c451f4ae334f76_parsing_py.html | 3596 -------------------- coverage-report/d_e9c451f4ae334f76_tokens_py.html | 615 ---- coverage-report/d_e9c451f4ae334f76_types_py.html | 192 -- coverage-report/deb_materialization_py.html | 110 - coverage-report/deb_packer_py.html | 110 - coverage-report/favicon_32.png | Bin 1732 -> 0 bytes coverage-report/index.html | 1118 ------ coverage-report/keybd_closed.png | Bin 9004 -> 0 bytes coverage-report/keybd_open.png | Bin 9003 -> 0 bytes coverage-report/status.json | 1 - coverage-report/style.css | 309 -- debputy.pod | 48 + pyproject.toml | 3 + src/debputy/commands/debputy_cmd/__main__.py | 22 +- .../commands/debputy_cmd/lint_and_lsp_cmds.py | 40 +- src/debputy/commands/debputy_cmd/output.py | 12 +- src/debputy/commands/debputy_cmd/plugin_cmds.py | 2 +- src/debputy/deb_packaging_support.py | 9 +- src/debputy/debhelper_emulation.py | 4 +- src/debputy/dh_migration/migrators_impl.py | 4 +- src/debputy/filesystem_scan.py | 4 +- src/debputy/highlevel_manifest.py | 2 +- src/debputy/highlevel_manifest_parser.py | 11 +- src/debputy/installations.py | 1 + src/debputy/interpreter.py | 4 + src/debputy/linting/lint_impl.py | 5 +- src/debputy/lsp/debputy_ls.py | 133 +- src/debputy/lsp/lsp_debian_changelog.py | 2 +- src/debputy/lsp/lsp_debian_control.py | 404 ++- .../lsp/lsp_debian_control_reference_data.py | 285 +- src/debputy/lsp/lsp_debian_copyright.py | 48 +- src/debputy/lsp/lsp_debian_debputy_manifest.py | 17 +- src/debputy/lsp/lsp_debian_rules.py | 7 +- src/debputy/lsp/lsp_debian_tests_control.py | 45 +- src/debputy/lsp/lsp_dispatch.py | 30 +- src/debputy/lsp/lsp_features.py | 24 +- src/debputy/lsp/lsp_generic_deb822.py | 275 +- src/debputy/lsp/lsp_self_check.py | 2 +- src/debputy/lsp/quickfixes.py | 114 +- src/debputy/lsp/text_util.py | 15 +- src/debputy/lsp/vendoring/_deb822_repro/parsing.py | 25 + src/debputy/lsp/vendoring/_deb822_repro/tokens.py | 14 +- src/debputy/path_matcher.py | 22 +- src/debputy/plugin/api/impl_types.py | 2 +- src/debputy/plugin/api/spec.py | 8 +- src/debputy/plugin/debputy/metadata_detectors.py | 4 +- src/debputy/plugin/debputy/private_api.py | 8 +- .../plugin/debputy/strip_non_determinism.py | 4 +- src/debputy/util.py | 8 +- src/debputy/yaml/compat.py | 10 +- tests/lint_tests/test_lint_changelog.py | 12 +- tests/lint_tests/test_lint_dctrl.py | 1 + tests/lint_tests/test_lint_debputy.py | 187 +- tests/lsp_tests/lsp_tutil.py | 92 +- tests/lsp_tests/test_debpkg_metadata.py | 25 + tests/lsp_tests/test_lsp_dctrl.py | 70 +- tests/lsp_tests/test_lsp_debputy_manifest_hover.py | 21 +- tests/plugin_tests/__init__.py | 0 tests/plugin_tests/conftest.py | 3 +- tests/plugin_tests/numpy3_test.py | 3 +- tests/plugin_tests/perl-openssl_test.py | 3 +- tests/test_alternatives.py | 2 + tests/test_deb_packaging_support.py | 4 +- tests/test_debputy_plugin.py | 20 +- tests/test_declarative_parser.py | 11 +- tests/test_fs_metadata.py | 12 +- tests/test_interpreter.py | 6 +- tests/test_migrations.py | 6 +- tests/test_packager_provided_files.py | 12 +- tests/test_plugin_tester.py | 9 +- typing-stubs/Levenshtein/__init__.pyi | 1 + typing-stubs/README.md | 4 + typing-stubs/colored/__init__.pyi | 8 + typing-stubs/colored/attributes.pyi | 14 + typing-stubs/colored/background.pyi | 14 + typing-stubs/colored/colored.pyi | 30 + typing-stubs/colored/controls.pyi | 8 + typing-stubs/colored/cprint.pyi | 3 + typing-stubs/colored/exceptions.pyi | 17 + typing-stubs/colored/foreground.pyi | 14 + typing-stubs/colored/hexadecimal.pyi | 7 + typing-stubs/colored/library.pyi | 17 + typing-stubs/colored/utilities.pyi | 13 + 193 files changed, 1633 insertions(+), 62684 deletions(-) delete mode 100644 coverage-report/coverage_html.js delete mode 100644 coverage-report/d_08cd1ad648464ded_debputy_self_hosting_py.html delete mode 100644 coverage-report/d_08cd1ad648464ded_debputy_self_hosting_test_py.html delete mode 100644 coverage-report/d_128305113a77411b___init___py.html delete mode 100644 coverage-report/d_128305113a77411b_assemble_deb_py.html delete mode 100644 coverage-report/d_23db3c975895bd86___init___py.html delete mode 100644 coverage-report/d_23db3c975895bd86_migration_py.html delete mode 100644 coverage-report/d_23db3c975895bd86_migrators_impl_py.html delete mode 100644 coverage-report/d_23db3c975895bd86_migrators_py.html delete mode 100644 coverage-report/d_23db3c975895bd86_models_py.html delete mode 100644 coverage-report/d_267b6307937f1878___init___py.html delete mode 100644 coverage-report/d_267b6307937f1878__deb_options_profiles_py.html delete mode 100644 coverage-report/d_267b6307937f1878__manifest_constants_py.html delete mode 100644 coverage-report/d_267b6307937f1878_architecture_support_py.html delete mode 100644 coverage-report/d_267b6307937f1878_builtin_manifest_rules_py.html delete mode 100644 coverage-report/d_267b6307937f1878_deb_packaging_support_py.html delete mode 100644 coverage-report/d_267b6307937f1878_debhelper_emulation_py.html delete mode 100644 coverage-report/d_267b6307937f1878_elf_util_py.html delete mode 100644 coverage-report/d_267b6307937f1878_exceptions_py.html delete mode 100644 coverage-report/d_267b6307937f1878_filesystem_scan_py.html delete mode 100644 coverage-report/d_267b6307937f1878_highlevel_manifest_parser_py.html delete mode 100644 coverage-report/d_267b6307937f1878_highlevel_manifest_py.html delete mode 100644 coverage-report/d_267b6307937f1878_installations_py.html delete mode 100644 coverage-report/d_267b6307937f1878_intermediate_manifest_py.html delete mode 100644 coverage-report/d_267b6307937f1878_interpreter_py.html delete mode 100644 coverage-report/d_267b6307937f1878_maintscript_snippet_py.html delete mode 100644 coverage-report/d_267b6307937f1878_manifest_conditions_py.html delete mode 100644 coverage-report/d_267b6307937f1878_packager_provided_files_py.html delete mode 100644 coverage-report/d_267b6307937f1878_packages_py.html delete mode 100644 coverage-report/d_267b6307937f1878_path_matcher_py.html delete mode 100644 coverage-report/d_267b6307937f1878_substitution_py.html delete mode 100644 coverage-report/d_267b6307937f1878_transformation_rules_py.html delete mode 100644 coverage-report/d_267b6307937f1878_types_py.html delete mode 100644 coverage-report/d_267b6307937f1878_util_py.html delete mode 100644 coverage-report/d_267b6307937f1878_version_py.html delete mode 100644 coverage-report/d_2882d0a735873825___init___py.html delete mode 100644 coverage-report/d_2882d0a735873825_deb_materialization_py.html delete mode 100644 coverage-report/d_2882d0a735873825_deb_packer_py.html delete mode 100644 coverage-report/d_36a196ce5f578895___init___py.html delete mode 100644 coverage-report/d_36a196ce5f578895_alternatives_py.html delete mode 100644 coverage-report/d_36a196ce5f578895_debconf_templates_py.html delete mode 100644 coverage-report/d_36a196ce5f578895_makeshlibs_py.html delete mode 100644 coverage-report/d_4b9be07fb6071cd2___init___py.html delete mode 100644 coverage-report/d_4b9be07fb6071cd2_test_impl_py.html delete mode 100644 coverage-report/d_4b9be07fb6071cd2_test_spec_py.html delete mode 100644 coverage-report/d_4f754ff76d8638bb___init___py.html delete mode 100644 coverage-report/d_4f754ff76d8638bb_base_types_py.html delete mode 100644 coverage-report/d_4f754ff76d8638bb_declarative_parser_py.html delete mode 100644 coverage-report/d_4f754ff76d8638bb_exceptions_py.html delete mode 100644 coverage-report/d_4f754ff76d8638bb_mapper_code_py.html delete mode 100644 coverage-report/d_4f754ff76d8638bb_parser_data_py.html delete mode 100644 coverage-report/d_4f754ff76d8638bb_parser_doc_py.html delete mode 100644 coverage-report/d_4f754ff76d8638bb_util_py.html delete mode 100644 coverage-report/d_4faea183f900b252___init___py.html delete mode 100644 coverage-report/d_50e3cc0df0cc5f51___init___py.html delete mode 100644 coverage-report/d_5d0ec0d5422112df___init___py.html delete mode 100644 coverage-report/d_5d0ec0d5422112df_debputy_ls_py.html delete mode 100644 coverage-report/d_5d0ec0d5422112df_lsp_debian_changelog_py.html delete mode 100644 coverage-report/d_5d0ec0d5422112df_lsp_debian_control_py.html delete mode 100644 coverage-report/d_5d0ec0d5422112df_lsp_debian_control_reference_data_py.html delete mode 100644 coverage-report/d_5d0ec0d5422112df_lsp_debian_copyright_py.html delete mode 100644 coverage-report/d_5d0ec0d5422112df_lsp_debian_debputy_manifest_py.html delete mode 100644 coverage-report/d_5d0ec0d5422112df_lsp_debian_rules_py.html delete mode 100644 coverage-report/d_5d0ec0d5422112df_lsp_debian_tests_control_py.html delete mode 100644 coverage-report/d_5d0ec0d5422112df_lsp_dispatch_py.html delete mode 100644 coverage-report/d_5d0ec0d5422112df_lsp_features_py.html delete mode 100644 coverage-report/d_5d0ec0d5422112df_lsp_generic_deb822_py.html delete mode 100644 coverage-report/d_5d0ec0d5422112df_quickfixes_py.html delete mode 100644 coverage-report/d_5d0ec0d5422112df_spellchecking_py.html delete mode 100644 coverage-report/d_5d0ec0d5422112df_text_edit_py.html delete mode 100644 coverage-report/d_5d0ec0d5422112df_text_util_py.html delete mode 100644 coverage-report/d_64287305fe0c6642___init___py.html delete mode 100644 coverage-report/d_64287305fe0c6642_example_processing_py.html delete mode 100644 coverage-report/d_64287305fe0c6642_feature_set_py.html delete mode 100644 coverage-report/d_64287305fe0c6642_impl_py.html delete mode 100644 coverage-report/d_64287305fe0c6642_impl_types_py.html delete mode 100644 coverage-report/d_64287305fe0c6642_plugin_parser_py.html delete mode 100644 coverage-report/d_64287305fe0c6642_spec_py.html delete mode 100644 coverage-report/d_6c155ce9dd9f7742___init___py.html delete mode 100644 coverage-report/d_6c155ce9dd9f7742_compat_py.html delete mode 100644 coverage-report/d_6e57078c9ef7177d___init___py.html delete mode 100644 coverage-report/d_6e57078c9ef7177d_context_py.html delete mode 100644 coverage-report/d_6e57078c9ef7177d_dc_util_py.html delete mode 100644 coverage-report/d_6e57078c9ef7177d_lint_and_lsp_cmds_py.html delete mode 100644 coverage-report/d_6e57078c9ef7177d_output_py.html delete mode 100644 coverage-report/d_6e57078c9ef7177d_plugin_cmds_py.html delete mode 100644 coverage-report/d_7764373ba25ba45b___init___py.html delete mode 100644 coverage-report/d_7764373ba25ba45b_lint_impl_py.html delete mode 100644 coverage-report/d_7764373ba25ba45b_lint_util_py.html delete mode 100644 coverage-report/d_9ae9c81fc31f2694_gnome_py.html delete mode 100644 coverage-report/d_9ae9c81fc31f2694_numpy3_py.html delete mode 100644 coverage-report/d_9ae9c81fc31f2694_perl_openssl_py.html delete mode 100644 coverage-report/d_d5d6843b45eec01e___init___py.html delete mode 100644 coverage-report/d_d5d6843b45eec01e_binary_package_rules_py.html delete mode 100644 coverage-report/d_d5d6843b45eec01e_debputy_plugin_py.html delete mode 100644 coverage-report/d_d5d6843b45eec01e_discard_rules_py.html delete mode 100644 coverage-report/d_d5d6843b45eec01e_manifest_root_rules_py.html delete mode 100644 coverage-report/d_d5d6843b45eec01e_metadata_detectors_py.html delete mode 100644 coverage-report/d_d5d6843b45eec01e_package_processors_py.html delete mode 100644 coverage-report/d_d5d6843b45eec01e_paths_py.html delete mode 100644 coverage-report/d_d5d6843b45eec01e_private_api_py.html delete mode 100644 coverage-report/d_d5d6843b45eec01e_service_management_py.html delete mode 100644 coverage-report/d_d5d6843b45eec01e_shlib_metadata_detectors_py.html delete mode 100644 coverage-report/d_d5d6843b45eec01e_strip_non_determinism_py.html delete mode 100644 coverage-report/d_d5d6843b45eec01e_types_py.html delete mode 100644 coverage-report/d_e9c451f4ae334f76___init___py.html delete mode 100644 coverage-report/d_e9c451f4ae334f76__util_py.html delete mode 100644 coverage-report/d_e9c451f4ae334f76_formatter_py.html delete mode 100644 coverage-report/d_e9c451f4ae334f76_locatable_py.html delete mode 100644 coverage-report/d_e9c451f4ae334f76_parsing_py.html delete mode 100644 coverage-report/d_e9c451f4ae334f76_tokens_py.html delete mode 100644 coverage-report/d_e9c451f4ae334f76_types_py.html delete mode 100644 coverage-report/deb_materialization_py.html delete mode 100644 coverage-report/deb_packer_py.html delete mode 100644 coverage-report/favicon_32.png delete mode 100644 coverage-report/index.html delete mode 100644 coverage-report/keybd_closed.png delete mode 100644 coverage-report/keybd_open.png delete mode 100644 coverage-report/status.json delete mode 100644 coverage-report/style.css create mode 100644 tests/lsp_tests/test_debpkg_metadata.py create mode 100644 tests/plugin_tests/__init__.py create mode 100644 typing-stubs/Levenshtein/__init__.pyi create mode 100644 typing-stubs/README.md create mode 100644 typing-stubs/colored/__init__.pyi create mode 100644 typing-stubs/colored/attributes.pyi create mode 100644 typing-stubs/colored/background.pyi create mode 100644 typing-stubs/colored/colored.pyi create mode 100644 typing-stubs/colored/controls.pyi create mode 100644 typing-stubs/colored/cprint.pyi create mode 100644 typing-stubs/colored/exceptions.pyi create mode 100644 typing-stubs/colored/foreground.pyi create mode 100644 typing-stubs/colored/hexadecimal.pyi create mode 100644 typing-stubs/colored/library.pyi create mode 100644 typing-stubs/colored/utilities.pyi diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 6be9bf4..565d0d9 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -30,6 +30,27 @@ tests-unstable: - apt-get build-dep -Ppkg.debputy.ci -y . - dpkg-buildpackage -Ppkg.debputy.ci -us -uc -tc +code-lint-mypy: + stage: ci-test + image: debian:unstable + script: + - apt-get update + - apt-get build-dep -Ppkg.debputy.ci -y . + - apt-get install -y mypy python3-lxml + # Remove the `|| true` once we get to a sufficient level of typing where we can set the minimum bar + - mypy --html-report mypy-report --junit-format per_file --junit-xml mypy-xunit-report.xml --cobertura-xml-report mypy-cobertura-report src tests || true + artifacts: + paths: + - mypy-report + reports: + junit: mypy-xunit-report.xml + coverage_report: + coverage_format: cobertura + path: mypy-cobertura-report/cobertura.xml + except: + variables: + - $CI_COMMIT_TAG != null && $SALSA_CI_ENABLE_PIPELINE_ON_TAGS !~ /^(1|yes|true)$/ + tests-unstable-coverage-without-optional-bd: stage: ci-test image: debian:unstable @@ -111,8 +132,10 @@ pages: script: - mkdir public - mv coverage-report public/ + - mv mypy-report public/ dependencies: - aggregate-coverage + - code-lint-mypy artifacts: paths: - public diff --git a/coverage-report/coverage_html.js b/coverage-report/coverage_html.js deleted file mode 100644 index 4c32118..0000000 --- a/coverage-report/coverage_html.js +++ /dev/null @@ -1,624 +0,0 @@ -// Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 -// For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt - -// Coverage.py HTML report browser code. -/*jslint browser: true, sloppy: true, vars: true, plusplus: true, maxerr: 50, indent: 4 */ -/*global coverage: true, document, window, $ */ - -coverage = {}; - -// General helpers -function debounce(callback, wait) { - let timeoutId = null; - return function(...args) { - clearTimeout(timeoutId); - timeoutId = setTimeout(() => { - callback.apply(this, args); - }, wait); - }; -}; - -function checkVisible(element) { - const rect = element.getBoundingClientRect(); - const viewBottom = Math.max(document.documentElement.clientHeight, window.innerHeight); - const viewTop = 30; - return !(rect.bottom < viewTop || rect.top >= viewBottom); -} - -function on_click(sel, fn) { - const elt = document.querySelector(sel); - if (elt) { - elt.addEventListener("click", fn); - } -} - -// Helpers for table sorting -function getCellValue(row, column = 0) { - const cell = row.cells[column] - if (cell.childElementCount == 1) { - const child = cell.firstElementChild - if (child instanceof HTMLTimeElement && child.dateTime) { - return child.dateTime - } else if (child instanceof HTMLDataElement && child.value) { - return child.value - } - } - return cell.innerText || cell.textContent; -} - -function rowComparator(rowA, rowB, column = 0) { - let valueA = getCellValue(rowA, column); - let valueB = getCellValue(rowB, column); - if (!isNaN(valueA) && !isNaN(valueB)) { - return valueA - valueB - } - return valueA.localeCompare(valueB, undefined, {numeric: true}); -} - -function sortColumn(th) { - // Get the current sorting direction of the selected header, - // clear state on other headers and then set the new sorting direction - const currentSortOrder = th.getAttribute("aria-sort"); - [...th.parentElement.cells].forEach(header => header.setAttribute("aria-sort", "none")); - if (currentSortOrder === "none") { - th.setAttribute("aria-sort", th.dataset.defaultSortOrder || "ascending"); - } else { - th.setAttribute("aria-sort", currentSortOrder === "ascending" ? "descending" : "ascending"); - } - - const column = [...th.parentElement.cells].indexOf(th) - - // Sort all rows and afterwards append them in order to move them in the DOM - Array.from(th.closest("table").querySelectorAll("tbody tr")) - .sort((rowA, rowB) => rowComparator(rowA, rowB, column) * (th.getAttribute("aria-sort") === "ascending" ? 1 : -1)) - .forEach(tr => tr.parentElement.appendChild(tr) ); -} - -// Find all the elements with data-shortcut attribute, and use them to assign a shortcut key. -coverage.assign_shortkeys = function () { - document.querySelectorAll("[data-shortcut]").forEach(element => { - document.addEventListener("keypress", event => { - if (event.target.tagName.toLowerCase() === "input") { - return; // ignore keypress from search filter - } - if (event.key === element.dataset.shortcut) { - element.click(); - } - }); - }); -}; - -// Create the events for the filter box. -coverage.wire_up_filter = function () { - // Cache elements. - const table = document.querySelector("table.index"); - const table_body_rows = table.querySelectorAll("tbody tr"); - const no_rows = document.getElementById("no_rows"); - - // Observe filter keyevents. - document.getElementById("filter").addEventListener("input", debounce(event => { - // Keep running total of each metric, first index contains number of shown rows - const totals = new Array(table.rows[0].cells.length).fill(0); - // Accumulate the percentage as fraction - totals[totals.length - 1] = { "numer": 0, "denom": 0 }; - - // Hide / show elements. - table_body_rows.forEach(row => { - if (!row.cells[0].textContent.includes(event.target.value)) { - // hide - row.classList.add("hidden"); - return; - } - - // show - row.classList.remove("hidden"); - totals[0]++; - - for (let column = 1; column < totals.length; column++) { - // Accumulate dynamic totals - cell = row.cells[column] - if (column === totals.length - 1) { - // Last column contains percentage - const [numer, denom] = cell.dataset.ratio.split(" "); - totals[column]["numer"] += parseInt(numer, 10); - totals[column]["denom"] += parseInt(denom, 10); - } else { - totals[column] += parseInt(cell.textContent, 10); - } - } - }); - - // Show placeholder if no rows will be displayed. - if (!totals[0]) { - // Show placeholder, hide table. - no_rows.style.display = "block"; - table.style.display = "none"; - return; - } - - // Hide placeholder, show table. - no_rows.style.display = null; - table.style.display = null; - - const footer = table.tFoot.rows[0]; - // Calculate new dynamic sum values based on visible rows. - for (let column = 1; column < totals.length; column++) { - // Get footer cell element. - const cell = footer.cells[column]; - - // Set value into dynamic footer cell element. - if (column === totals.length - 1) { - // Percentage column uses the numerator and denominator, - // and adapts to the number of decimal places. - const match = /\.([0-9]+)/.exec(cell.textContent); - const places = match ? match[1].length : 0; - const { numer, denom } = totals[column]; - cell.dataset.ratio = `${numer} ${denom}`; - // Check denom to prevent NaN if filtered files contain no statements - cell.textContent = denom - ? `${(numer * 100 / denom).toFixed(places)}%` - : `${(100).toFixed(places)}%`; - } else { - cell.textContent = totals[column]; - } - } - })); - - // Trigger change event on setup, to force filter on page refresh - // (filter value may still be present). - document.getElementById("filter").dispatchEvent(new Event("input")); -}; - -coverage.INDEX_SORT_STORAGE = "COVERAGE_INDEX_SORT_2"; - -// Loaded on index.html -coverage.index_ready = function () { - coverage.assign_shortkeys(); - coverage.wire_up_filter(); - document.querySelectorAll("[data-sortable] th[aria-sort]").forEach( - th => th.addEventListener("click", e => sortColumn(e.target)) - ); - - // Look for a localStorage item containing previous sort settings: - const stored_list = localStorage.getItem(coverage.INDEX_SORT_STORAGE); - - if (stored_list) { - const {column, direction} = JSON.parse(stored_list); - const th = document.querySelector("[data-sortable]").tHead.rows[0].cells[column]; - th.setAttribute("aria-sort", direction === "ascending" ? "descending" : "ascending"); - th.click() - } - - // Watch for page unload events so we can save the final sort settings: - window.addEventListener("unload", function () { - const th = document.querySelector('[data-sortable] th[aria-sort="ascending"], [data-sortable] [aria-sort="descending"]'); - if (!th) { - return; - } - localStorage.setItem(coverage.INDEX_SORT_STORAGE, JSON.stringify({ - column: [...th.parentElement.cells].indexOf(th), - direction: th.getAttribute("aria-sort"), - })); - }); - - on_click(".button_prev_file", coverage.to_prev_file); - on_click(".button_next_file", coverage.to_next_file); - - on_click(".button_show_hide_help", coverage.show_hide_help); -}; - -// -- pyfile stuff -- - -coverage.LINE_FILTERS_STORAGE = "COVERAGE_LINE_FILTERS"; - -coverage.pyfile_ready = function () { - // If we're directed to a particular line number, highlight the line. - var frag = location.hash; - if (frag.length > 2 && frag[1] === "t") { - document.querySelector(frag).closest(".n").classList.add("highlight"); - coverage.set_sel(parseInt(frag.substr(2), 10)); - } else { - coverage.set_sel(0); - } - - on_click(".button_toggle_run", coverage.toggle_lines); - on_click(".button_toggle_mis", coverage.toggle_lines); - on_click(".button_toggle_exc", coverage.toggle_lines); - on_click(".button_toggle_par", coverage.toggle_lines); - - on_click(".button_next_chunk", coverage.to_next_chunk_nicely); - on_click(".button_prev_chunk", coverage.to_prev_chunk_nicely); - on_click(".button_top_of_page", coverage.to_top); - on_click(".button_first_chunk", coverage.to_first_chunk); - - on_click(".button_prev_file", coverage.to_prev_file); - on_click(".button_next_file", coverage.to_next_file); - on_click(".button_to_index", coverage.to_index); - - on_click(".button_show_hide_help", coverage.show_hide_help); - - coverage.filters = undefined; - try { - coverage.filters = localStorage.getItem(coverage.LINE_FILTERS_STORAGE); - } catch(err) {} - - if (coverage.filters) { - coverage.filters = JSON.parse(coverage.filters); - } - else { - coverage.filters = {run: false, exc: true, mis: true, par: true}; - } - - for (cls in coverage.filters) { - coverage.set_line_visibilty(cls, coverage.filters[cls]); - } - - coverage.assign_shortkeys(); - coverage.init_scroll_markers(); - coverage.wire_up_sticky_header(); - - document.querySelectorAll("[id^=ctxs]").forEach( - cbox => cbox.addEventListener("click", coverage.expand_contexts) - ); - - // Rebuild scroll markers when the window height changes. - window.addEventListener("resize", coverage.build_scroll_markers); -}; - -coverage.toggle_lines = function (event) { - const btn = event.target.closest("button"); - const category = btn.value - const show = !btn.classList.contains("show_" + category); - coverage.set_line_visibilty(category, show); - coverage.build_scroll_markers(); - coverage.filters[category] = show; - try { - localStorage.setItem(coverage.LINE_FILTERS_STORAGE, JSON.stringify(coverage.filters)); - } catch(err) {} -}; - -coverage.set_line_visibilty = function (category, should_show) { - const cls = "show_" + category; - const btn = document.querySelector(".button_toggle_" + category); - if (btn) { - if (should_show) { - document.querySelectorAll("#source ." + category).forEach(e => e.classList.add(cls)); - btn.classList.add(cls); - } - else { - document.querySelectorAll("#source ." + category).forEach(e => e.classList.remove(cls)); - btn.classList.remove(cls); - } - } -}; - -// Return the nth line div. -coverage.line_elt = function (n) { - return document.getElementById("t" + n)?.closest("p"); -}; - -// Set the selection. b and e are line numbers. -coverage.set_sel = function (b, e) { - // The first line selected. - coverage.sel_begin = b; - // The next line not selected. - coverage.sel_end = (e === undefined) ? b+1 : e; -}; - -coverage.to_top = function () { - coverage.set_sel(0, 1); - coverage.scroll_window(0); -}; - -coverage.to_first_chunk = function () { - coverage.set_sel(0, 1); - coverage.to_next_chunk(); -}; - -coverage.to_prev_file = function () { - window.location = document.getElementById("prevFileLink").href; -} - -coverage.to_next_file = function () { - window.location = document.getElementById("nextFileLink").href; -} - -coverage.to_index = function () { - location.href = document.getElementById("indexLink").href; -} - -coverage.show_hide_help = function () { - const helpCheck = document.getElementById("help_panel_state") - helpCheck.checked = !helpCheck.checked; -} - -// Return a string indicating what kind of chunk this line belongs to, -// or null if not a chunk. -coverage.chunk_indicator = function (line_elt) { - const classes = line_elt?.className; - if (!classes) { - return null; - } - const match = classes.match(/\bshow_\w+\b/); - if (!match) { - return null; - } - return match[0]; -}; - -coverage.to_next_chunk = function () { - const c = coverage; - - // Find the start of the next colored chunk. - var probe = c.sel_end; - var chunk_indicator, probe_line; - while (true) { - probe_line = c.line_elt(probe); - if (!probe_line) { - return; - } - chunk_indicator = c.chunk_indicator(probe_line); - if (chunk_indicator) { - break; - } - probe++; - } - - // There's a next chunk, `probe` points to it. - var begin = probe; - - // Find the end of this chunk. - var next_indicator = chunk_indicator; - while (next_indicator === chunk_indicator) { - probe++; - probe_line = c.line_elt(probe); - next_indicator = c.chunk_indicator(probe_line); - } - c.set_sel(begin, probe); - c.show_selection(); -}; - -coverage.to_prev_chunk = function () { - const c = coverage; - - // Find the end of the prev colored chunk. - var probe = c.sel_begin-1; - var probe_line = c.line_elt(probe); - if (!probe_line) { - return; - } - var chunk_indicator = c.chunk_indicator(probe_line); - while (probe > 1 && !chunk_indicator) { - probe--; - probe_line = c.line_elt(probe); - if (!probe_line) { - return; - } - chunk_indicator = c.chunk_indicator(probe_line); - } - - // There's a prev chunk, `probe` points to its last line. - var end = probe+1; - - // Find the beginning of this chunk. - var prev_indicator = chunk_indicator; - while (prev_indicator === chunk_indicator) { - probe--; - if (probe <= 0) { - return; - } - probe_line = c.line_elt(probe); - prev_indicator = c.chunk_indicator(probe_line); - } - c.set_sel(probe+1, end); - c.show_selection(); -}; - -// Returns 0, 1, or 2: how many of the two ends of the selection are on -// the screen right now? -coverage.selection_ends_on_screen = function () { - if (coverage.sel_begin === 0) { - return 0; - } - - const begin = coverage.line_elt(coverage.sel_begin); - const end = coverage.line_elt(coverage.sel_end-1); - - return ( - (checkVisible(begin) ? 1 : 0) - + (checkVisible(end) ? 1 : 0) - ); -}; - -coverage.to_next_chunk_nicely = function () { - if (coverage.selection_ends_on_screen() === 0) { - // The selection is entirely off the screen: - // Set the top line on the screen as selection. - - // This will select the top-left of the viewport - // As this is most likely the span with the line number we take the parent - const line = document.elementFromPoint(0, 0).parentElement; - if (line.parentElement !== document.getElementById("source")) { - // The element is not a source line but the header or similar - coverage.select_line_or_chunk(1); - } else { - // We extract the line number from the id - coverage.select_line_or_chunk(parseInt(line.id.substring(1), 10)); - } - } - coverage.to_next_chunk(); -}; - -coverage.to_prev_chunk_nicely = function () { - if (coverage.selection_ends_on_screen() === 0) { - // The selection is entirely off the screen: - // Set the lowest line on the screen as selection. - - // This will select the bottom-left of the viewport - // As this is most likely the span with the line number we take the parent - const line = document.elementFromPoint(document.documentElement.clientHeight-1, 0).parentElement; - if (line.parentElement !== document.getElementById("source")) { - // The element is not a source line but the header or similar - coverage.select_line_or_chunk(coverage.lines_len); - } else { - // We extract the line number from the id - coverage.select_line_or_chunk(parseInt(line.id.substring(1), 10)); - } - } - coverage.to_prev_chunk(); -}; - -// Select line number lineno, or if it is in a colored chunk, select the -// entire chunk -coverage.select_line_or_chunk = function (lineno) { - var c = coverage; - var probe_line = c.line_elt(lineno); - if (!probe_line) { - return; - } - var the_indicator = c.chunk_indicator(probe_line); - if (the_indicator) { - // The line is in a highlighted chunk. - // Search backward for the first line. - var probe = lineno; - var indicator = the_indicator; - while (probe > 0 && indicator === the_indicator) { - probe--; - probe_line = c.line_elt(probe); - if (!probe_line) { - break; - } - indicator = c.chunk_indicator(probe_line); - } - var begin = probe + 1; - - // Search forward for the last line. - probe = lineno; - indicator = the_indicator; - while (indicator === the_indicator) { - probe++; - probe_line = c.line_elt(probe); - indicator = c.chunk_indicator(probe_line); - } - - coverage.set_sel(begin, probe); - } - else { - coverage.set_sel(lineno); - } -}; - -coverage.show_selection = function () { - // Highlight the lines in the chunk - document.querySelectorAll("#source .highlight").forEach(e => e.classList.remove("highlight")); - for (let probe = coverage.sel_begin; probe < coverage.sel_end; probe++) { - coverage.line_elt(probe).querySelector(".n").classList.add("highlight"); - } - - coverage.scroll_to_selection(); -}; - -coverage.scroll_to_selection = function () { - // Scroll the page if the chunk isn't fully visible. - if (coverage.selection_ends_on_screen() < 2) { - const element = coverage.line_elt(coverage.sel_begin); - coverage.scroll_window(element.offsetTop - 60); - } -}; - -coverage.scroll_window = function (to_pos) { - window.scroll({top: to_pos, behavior: "smooth"}); -}; - -coverage.init_scroll_markers = function () { - // Init some variables - coverage.lines_len = document.querySelectorAll("#source > p").length; - - // Build html - coverage.build_scroll_markers(); -}; - -coverage.build_scroll_markers = function () { - const temp_scroll_marker = document.getElementById("scroll_marker") - if (temp_scroll_marker) temp_scroll_marker.remove(); - // Don't build markers if the window has no scroll bar. - if (document.body.scrollHeight <= window.innerHeight) { - return; - } - - const marker_scale = window.innerHeight / document.body.scrollHeight; - const line_height = Math.min(Math.max(3, window.innerHeight / coverage.lines_len), 10); - - let previous_line = -99, last_mark, last_top; - - const scroll_marker = document.createElement("div"); - scroll_marker.id = "scroll_marker"; - document.getElementById("source").querySelectorAll( - "p.show_run, p.show_mis, p.show_exc, p.show_exc, p.show_par" - ).forEach(element => { - const line_top = Math.floor(element.offsetTop * marker_scale); - const line_number = parseInt(element.querySelector(".n a").id.substr(1)); - - if (line_number === previous_line + 1) { - // If this solid missed block just make previous mark higher. - last_mark.style.height = `${line_top + line_height - last_top}px`; - } else { - // Add colored line in scroll_marker block. - last_mark = document.createElement("div"); - last_mark.id = `m${line_number}`; - last_mark.classList.add("marker"); - last_mark.style.height = `${line_height}px`; - last_mark.style.top = `${line_top}px`; - scroll_marker.append(last_mark); - last_top = line_top; - } - - previous_line = line_number; - }); - - // Append last to prevent layout calculation - document.body.append(scroll_marker); -}; - -coverage.wire_up_sticky_header = function () { - const header = document.querySelector("header"); - const header_bottom = ( - header.querySelector(".content h2").getBoundingClientRect().top - - header.getBoundingClientRect().top - ); - - function updateHeader() { - if (window.scrollY > header_bottom) { - header.classList.add("sticky"); - } else { - header.classList.remove("sticky"); - } - } - - window.addEventListener("scroll", updateHeader); - updateHeader(); -}; - -coverage.expand_contexts = function (e) { - var ctxs = e.target.parentNode.querySelector(".ctxs"); - - if (!ctxs.classList.contains("expanded")) { - var ctxs_text = ctxs.textContent; - var width = Number(ctxs_text[0]); - ctxs.textContent = ""; - for (var i = 1; i < ctxs_text.length; i += width) { - key = ctxs_text.substring(i, i + width).trim(); - ctxs.appendChild(document.createTextNode(contexts[key])); - ctxs.appendChild(document.createElement("br")); - } - ctxs.classList.add("expanded"); - } -}; - -document.addEventListener("DOMContentLoaded", () => { - if (document.body.classList.contains("indexfile")) { - coverage.index_ready(); - } else { - coverage.pyfile_ready(); - } -}); diff --git a/coverage-report/d_08cd1ad648464ded_debputy_self_hosting_py.html b/coverage-report/d_08cd1ad648464ded_debputy_self_hosting_py.html deleted file mode 100644 index 44fa87f..0000000 --- a/coverage-report/d_08cd1ad648464ded_debputy_self_hosting_py.html +++ /dev/null @@ -1,160 +0,0 @@ - - - - - Coverage for self-hosting-plugins/debputy_self_hosting.py: 73% - - - - - -
-
-

- Coverage for self-hosting-plugins/debputy_self_hosting.py: - 73% -

- -

- 13 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import textwrap 

-

2 

-

3from debputy.plugin.api import ( 

-

4 DebputyPluginInitializer, 

-

5 VirtualPath, 

-

6 BinaryCtrlAccessor, 

-

7 PackageProcessingContext, 

-

8) 

-

9from debputy.util import POSTINST_DEFAULT_CONDITION 

-

10 

-

11 

-

12def _maintscript_generator( 

-

13 _path: VirtualPath, 

-

14 ctrl: BinaryCtrlAccessor, 

-

15 context: PackageProcessingContext, 

-

16) -> None: 

-

17 maintscript = ctrl.maintscript 

-

18 

-

19 # When `debputy` becomes a stand-alone package, it should have these maintscripts instead of dh-debputy 

-

20 # Admittedly, I hope to get rid of this plugin before then, but ... 

-

21 assert context.binary_package.name != "debputy", "Update the self-hosting plugin" 

-

22 dirname = "/usr/share/debputy" 

-

23 

-

24 if context.binary_package.name == "dh-debputy": 24 ↛ 25line 24 didn't jump to line 25, because the condition on line 24 was never true

-

25 ctrl.dpkg_trigger("interest-noawait", dirname) 

-

26 maintscript.unconditionally_in_script( 

-

27 "postinst", 

-

28 textwrap.dedent( 

-

29 f"""\ 

-

30 if {POSTINST_DEFAULT_CONDITION} || [ "$1" = "triggered" ] ; then 

-

31 # Ensure all plugins are byte-compiled (plus uninstalled plugins are cleaned up) 

-

32 py3clean {dirname} 

-

33 if command -v py3compile >/dev/null 2>&1; then 

-

34 py3compile {dirname} 

-

35 fi 

-

36 if command -v pypy3compile >/dev/null 2>&1; then 

-

37 pypy3compile {dirname} || true 

-

38 fi 

-

39 fi 

-

40 """ 

-

41 ), 

-

42 ) 

-

43 maintscript.unconditionally_in_script( 

-

44 "prerm", 

-

45 textwrap.dedent( 

-

46 f"""\ 

-

47 if command -v py3clean >/dev/null 2>&1; then 

-

48 py3clean {dirname} 

-

49 else 

-

50 find {dirname}/ -type d -name __pycache__ -empty -print0 | xargs --null --no-run-if-empty rmdir 

-

51 fi 

-

52 """ 

-

53 ), 

-

54 ) 

-

55 

-

56 

-

57def initializer(api: DebputyPluginInitializer) -> None: 

-

58 api.metadata_or_maintscript_detector( 

-

59 "debputy-self-hosting", 

-

60 _maintscript_generator, 

-

61 ) 

-
- - - diff --git a/coverage-report/d_08cd1ad648464ded_debputy_self_hosting_test_py.html b/coverage-report/d_08cd1ad648464ded_debputy_self_hosting_test_py.html deleted file mode 100644 index d1ca33e..0000000 --- a/coverage-report/d_08cd1ad648464ded_debputy_self_hosting_test_py.html +++ /dev/null @@ -1,109 +0,0 @@ - - - - - Coverage for self-hosting-plugins/debputy_self_hosting_test.py: 100% - - - - - -
-
-

- Coverage for self-hosting-plugins/debputy_self_hosting_test.py: - 100% -

- -

- 5 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from debputy.plugin.api.test_api import ( 

-

2 initialize_plugin_under_test, 

-

3 build_virtual_file_system, 

-

4) 

-

5 

-

6 

-

7def test_plugin(): 

-

8 plugin = initialize_plugin_under_test() 

-

9 fs = build_virtual_file_system([]) 

-

10 plugin.run_metadata_detector("debputy-self-hosting", fs) 

-
- - - diff --git a/coverage-report/d_128305113a77411b___init___py.html b/coverage-report/d_128305113a77411b___init___py.html deleted file mode 100644 index a110cd4..0000000 --- a/coverage-report/d_128305113a77411b___init___py.html +++ /dev/null @@ -1,99 +0,0 @@ - - - - - Coverage for src/debputy/package_build/__init__.py: 100% - - - - - -
-
-

- Coverage for src/debputy/package_build/__init__.py: - 100% -

- -

- 0 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-
- - - diff --git a/coverage-report/d_128305113a77411b_assemble_deb_py.html b/coverage-report/d_128305113a77411b_assemble_deb_py.html deleted file mode 100644 index 9e6b9ef..0000000 --- a/coverage-report/d_128305113a77411b_assemble_deb_py.html +++ /dev/null @@ -1,354 +0,0 @@ - - - - - Coverage for src/debputy/package_build/assemble_deb.py: 14% - - - - - -
-
-

- Coverage for src/debputy/package_build/assemble_deb.py: - 14% -

- -

- 98 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import json 

-

2import os 

-

3import subprocess 

-

4from typing import Optional, Sequence, List, Tuple 

-

5 

-

6from debputy import DEBPUTY_ROOT_DIR 

-

7from debputy.commands.debputy_cmd.context import CommandContext 

-

8from debputy.deb_packaging_support import setup_control_files 

-

9from debputy.debhelper_emulation import dhe_dbgsym_root_dir 

-

10from debputy.filesystem_scan import FSRootDir 

-

11from debputy.highlevel_manifest import HighLevelManifest 

-

12from debputy.intermediate_manifest import IntermediateManifest 

-

13from debputy.plugin.api.impl_types import PackageDataTable 

-

14from debputy.util import ( 

-

15 escape_shell, 

-

16 _error, 

-

17 compute_output_filename, 

-

18 scratch_dir, 

-

19 ensure_dir, 

-

20 _warn, 

-

21 assume_not_none, 

-

22) 

-

23 

-

24 

-

25_RRR_DEB_ASSEMBLY_KEYWORD = "debputy/deb-assembly" 

-

26_WARNED_ABOUT_FALLBACK_ASSEMBLY = False 

-

27 

-

28 

-

29def _serialize_intermediate_manifest(members: IntermediateManifest) -> str: 

-

30 serial_format = [m.to_manifest() for m in members] 

-

31 return json.dumps(serial_format) 

-

32 

-

33 

-

34def determine_assembly_method( 

-

35 package: str, 

-

36 intermediate_manifest: IntermediateManifest, 

-

37) -> Tuple[bool, bool, List[str]]: 

-

38 paths_needing_root = ( 

-

39 tm for tm in intermediate_manifest if tm.owner != "root" or tm.group != "root" 

-

40 ) 

-

41 matched_path = next(paths_needing_root, None) 

-

42 if matched_path is None: 

-

43 return False, False, [] 

-

44 rrr = os.environ.get("DEB_RULES_REQUIRES_ROOT") 

-

45 if rrr and _RRR_DEB_ASSEMBLY_KEYWORD in rrr: 

-

46 gain_root_cmd = os.environ.get("DEB_GAIN_ROOT_CMD") 

-

47 if not gain_root_cmd: 

-

48 _error( 

-

49 "DEB_RULES_REQUIRES_ROOT contains a debputy keyword but DEB_GAIN_ROOT_CMD does not contain a " 

-

50 '"gain root" command' 

-

51 ) 

-

52 return True, False, gain_root_cmd.split() 

-

53 if rrr == "no": 

-

54 global _WARNED_ABOUT_FALLBACK_ASSEMBLY 

-

55 if not _WARNED_ABOUT_FALLBACK_ASSEMBLY: 

-

56 _warn( 

-

57 'Using internal assembly method due to "Rules-Requires-Root" being "no" and dpkg-deb assembly would' 

-

58 " require (fake)root for binary packages that needs it." 

-

59 ) 

-

60 _WARNED_ABOUT_FALLBACK_ASSEMBLY = True 

-

61 return True, True, [] 

-

62 

-

63 _error( 

-

64 f'Due to the path "{matched_path.member_path}" in {package}, the package assembly will require (fake)root.' 

-

65 " However, this command is not run as root nor was debputy requested to use a root command via" 

-

66 f' "Rules-Requires-Root". Please consider adding "{_RRR_DEB_ASSEMBLY_KEYWORD}" to "Rules-Requires-Root"' 

-

67 " in debian/control. Though, due to #1036865, you may have to revert to" 

-

68 ' "Rules-Requires-Root: binary-targets" depending on which version of dpkg you need to support.' 

-

69 ' Alternatively, you can set "Rules-Requires-Root: no" in debian/control and debputy will assemble' 

-

70 " the package anyway. In this case, dpkg-deb will not be used, but the output should be bit-for-bit" 

-

71 " compatible with what debputy would have produced with dpkg-deb (and root/fakeroot)." 

-

72 ) 

-

73 

-

74 

-

75def assemble_debs( 

-

76 context: CommandContext, 

-

77 manifest: HighLevelManifest, 

-

78 package_data_table: PackageDataTable, 

-

79 is_dh_rrr_only_mode: bool, 

-

80) -> None: 

-

81 parsed_args = context.parsed_args 

-

82 output_path = parsed_args.output 

-

83 upstream_args = parsed_args.upstream_args 

-

84 deb_materialize = str(DEBPUTY_ROOT_DIR / "deb_materialization.py") 

-

85 mtime = context.mtime 

-

86 

-

87 for dctrl_bin in manifest.active_packages: 

-

88 package = dctrl_bin.name 

-

89 dbgsym_package_name = f"{package}-dbgsym" 

-

90 dctrl_data = package_data_table[package] 

-

91 fs_root = dctrl_data.fs_root 

-

92 control_output_dir = assume_not_none(dctrl_data.control_output_dir) 

-

93 package_metadata_context = dctrl_data.package_metadata_context 

-

94 if ( 

-

95 dbgsym_package_name in package_data_table 

-

96 or "noautodbgsym" in manifest.build_env.deb_build_options 

-

97 or "noddebs" in manifest.build_env.deb_build_options 

-

98 ): 

-

99 # Discard the dbgsym part if it conflicts with a real package, or 

-

100 # we were asked not to build it. 

-

101 dctrl_data.dbgsym_info.dbgsym_fs_root = FSRootDir() 

-

102 dctrl_data.dbgsym_info.dbgsym_ids.clear() 

-

103 dbgsym_fs_root = dctrl_data.dbgsym_info.dbgsym_fs_root 

-

104 dbgsym_ids = dctrl_data.dbgsym_info.dbgsym_ids 

-

105 intermediate_manifest = manifest.finalize_data_tar_contents( 

-

106 package, fs_root, mtime 

-

107 ) 

-

108 

-

109 setup_control_files( 

-

110 dctrl_data, 

-

111 manifest, 

-

112 dbgsym_fs_root, 

-

113 dbgsym_ids, 

-

114 package_metadata_context, 

-

115 allow_ctrl_file_management=not is_dh_rrr_only_mode, 

-

116 ) 

-

117 

-

118 needs_root, use_fallback_assembly, gain_root_cmd = determine_assembly_method( 

-

119 package, intermediate_manifest 

-

120 ) 

-

121 

-

122 if not dctrl_bin.is_udeb and any( 

-

123 f for f in dbgsym_fs_root.all_paths() if f.is_file 

-

124 ): 

-

125 # We never built udebs due to #797391. We currently do not generate a control 

-

126 # file for it either for the same reason. 

-

127 dbgsym_root = dhe_dbgsym_root_dir(dctrl_bin) 

-

128 if not os.path.isdir(output_path): 

-

129 _error( 

-

130 "Cannot produce a dbgsym package when output path is not a directory." 

-

131 ) 

-

132 dbgsym_intermediate_manifest = manifest.finalize_data_tar_contents( 

-

133 dbgsym_package_name, 

-

134 dbgsym_fs_root, 

-

135 mtime, 

-

136 ) 

-

137 _assemble_deb( 

-

138 dbgsym_package_name, 

-

139 deb_materialize, 

-

140 dbgsym_intermediate_manifest, 

-

141 mtime, 

-

142 os.path.join(dbgsym_root, "DEBIAN"), 

-

143 output_path, 

-

144 upstream_args, 

-

145 is_udeb=dctrl_bin.is_udeb, # Review this if we ever do dbgsyms for udebs 

-

146 use_fallback_assembly=False, 

-

147 needs_root=False, 

-

148 ) 

-

149 

-

150 _assemble_deb( 

-

151 package, 

-

152 deb_materialize, 

-

153 intermediate_manifest, 

-

154 mtime, 

-

155 control_output_dir, 

-

156 output_path, 

-

157 upstream_args, 

-

158 is_udeb=dctrl_bin.is_udeb, 

-

159 use_fallback_assembly=use_fallback_assembly, 

-

160 needs_root=needs_root, 

-

161 gain_root_cmd=gain_root_cmd, 

-

162 ) 

-

163 

-

164 

-

165def _assemble_deb( 

-

166 package: str, 

-

167 deb_materialize_cmd: str, 

-

168 intermediate_manifest: IntermediateManifest, 

-

169 mtime: int, 

-

170 control_output_dir: str, 

-

171 output_path: str, 

-

172 upstream_args: Optional[List[str]], 

-

173 is_udeb: bool = False, 

-

174 use_fallback_assembly: bool = False, 

-

175 needs_root: bool = False, 

-

176 gain_root_cmd: Optional[Sequence[str]] = None, 

-

177) -> None: 

-

178 scratch_root_dir = scratch_dir() 

-

179 materialization_dir = os.path.join( 

-

180 scratch_root_dir, "materialization-dirs", package 

-

181 ) 

-

182 ensure_dir(os.path.dirname(materialization_dir)) 

-

183 materialize_cmd: List[str] = [] 

-

184 assert not use_fallback_assembly or not gain_root_cmd 

-

185 if needs_root and gain_root_cmd: 

-

186 # Only use the gain_root_cmd if we absolutely need it. 

-

187 # Note that gain_root_cmd will be empty unless R³ is set to the relevant keyword 

-

188 # that would make us use targeted promotion. Therefore, we do not need to check other 

-

189 # conditions than the package needing root. (R³: binary-targets implies `needs_root=True` 

-

190 # without a gain_root_cmd) 

-

191 materialize_cmd.extend(gain_root_cmd) 

-

192 materialize_cmd.extend( 

-

193 [ 

-

194 deb_materialize_cmd, 

-

195 "materialize-deb", 

-

196 "--intermediate-package-manifest", 

-

197 "-", 

-

198 "--may-move-control-files", 

-

199 "--may-move-data-files", 

-

200 "--source-date-epoch", 

-

201 str(mtime), 

-

202 "--discard-existing-output", 

-

203 control_output_dir, 

-

204 materialization_dir, 

-

205 ] 

-

206 ) 

-

207 output = output_path 

-

208 if is_udeb: 

-

209 materialize_cmd.append("--udeb") 

-

210 output = os.path.join( 

-

211 output_path, compute_output_filename(control_output_dir, True) 

-

212 ) 

-

213 

-

214 assembly_method = "debputy" if needs_root and use_fallback_assembly else "dpkg-deb" 

-

215 combined_materialization_and_assembly = not needs_root 

-

216 if combined_materialization_and_assembly: 

-

217 materialize_cmd.extend( 

-

218 ["--build-method", assembly_method, "--assembled-deb-output", output] 

-

219 ) 

-

220 

-

221 if upstream_args: 

-

222 materialize_cmd.append("--") 

-

223 materialize_cmd.extend(upstream_args) 

-

224 

-

225 if combined_materialization_and_assembly: 

-

226 print( 

-

227 f"Materializing and assembling {package} via: {escape_shell(*materialize_cmd)}" 

-

228 ) 

-

229 else: 

-

230 print(f"Materializing {package} via: {escape_shell(*materialize_cmd)}") 

-

231 proc = subprocess.Popen(materialize_cmd, stdin=subprocess.PIPE) 

-

232 proc.communicate( 

-

233 _serialize_intermediate_manifest(intermediate_manifest).encode("utf-8") 

-

234 ) 

-

235 if proc.returncode != 0: 

-

236 _error(f"{escape_shell(deb_materialize_cmd)} exited with a non-zero exit code!") 

-

237 

-

238 if not combined_materialization_and_assembly: 

-

239 build_materialization = [ 

-

240 deb_materialize_cmd, 

-

241 "build-materialized-deb", 

-

242 materialization_dir, 

-

243 assembly_method, 

-

244 "--output", 

-

245 output, 

-

246 ] 

-

247 print(f"Assembling {package} via: {escape_shell(*build_materialization)}") 

-

248 try: 

-

249 subprocess.check_call(build_materialization) 

-

250 except subprocess.CalledProcessError as e: 

-

251 exit_code = f" with exit code {e.returncode}" if e.returncode else "" 

-

252 _error( 

-

253 f"Assembly command for {package} failed{exit_code}. Please review the output of the command" 

-

254 f" for more details on the problem." 

-

255 ) 

-
- - - diff --git a/coverage-report/d_23db3c975895bd86___init___py.html b/coverage-report/d_23db3c975895bd86___init___py.html deleted file mode 100644 index 38e3e3e..0000000 --- a/coverage-report/d_23db3c975895bd86___init___py.html +++ /dev/null @@ -1,99 +0,0 @@ - - - - - Coverage for src/debputy/dh_migration/__init__.py: 100% - - - - - -
-
-

- Coverage for src/debputy/dh_migration/__init__.py: - 100% -

- -

- 0 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-
- - - diff --git a/coverage-report/d_23db3c975895bd86_migration_py.html b/coverage-report/d_23db3c975895bd86_migration_py.html deleted file mode 100644 index 05f9cac..0000000 --- a/coverage-report/d_23db3c975895bd86_migration_py.html +++ /dev/null @@ -1,445 +0,0 @@ - - - - - Coverage for src/debputy/dh_migration/migration.py: 7% - - - - - -
-
-

- Coverage for src/debputy/dh_migration/migration.py: - 7% -

- -

- 192 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import json 

-

2import os 

-

3import re 

-

4import subprocess 

-

5from itertools import chain 

-

6from typing import Optional, List, Callable, Set 

-

7 

-

8from debian.deb822 import Deb822 

-

9 

-

10from debputy.debhelper_emulation import CannotEmulateExecutableDHConfigFile 

-

11from debputy.dh_migration.migrators import MIGRATORS 

-

12from debputy.dh_migration.migrators_impl import ( 

-

13 read_dh_addon_sequences, 

-

14 MIGRATION_TARGET_DH_DEBPUTY, 

-

15 MIGRATION_TARGET_DH_DEBPUTY_RRR, 

-

16) 

-

17from debputy.dh_migration.models import ( 

-

18 FeatureMigration, 

-

19 AcceptableMigrationIssues, 

-

20 UnsupportedFeature, 

-

21 ConflictingChange, 

-

22) 

-

23from debputy.highlevel_manifest import HighLevelManifest 

-

24from debputy.manifest_parser.exceptions import ManifestParseException 

-

25from debputy.plugin.api import VirtualPath 

-

26from debputy.util import _error, _warn, _info, escape_shell, assume_not_none 

-

27 

-

28 

-

29def _print_migration_summary( 

-

30 migrations: List[FeatureMigration], 

-

31 compat: int, 

-

32 min_compat_level: int, 

-

33 required_plugins: Set[str], 

-

34 requested_plugins: Optional[Set[str]], 

-

35) -> None: 

-

36 warning_count = 0 

-

37 

-

38 for migration in migrations: 

-

39 if not migration.anything_to_do: 

-

40 continue 

-

41 underline = "-" * len(migration.tagline) 

-

42 if migration.warnings: 

-

43 _warn(f"Summary for migration: {migration.tagline}") 

-

44 _warn(f"-----------------------{underline}") 

-

45 _warn(" /!\\ ATTENTION /!\\") 

-

46 warning_count += len(migration.warnings) 

-

47 for warning in migration.warnings: 

-

48 _warn(f" * {warning}") 

-

49 

-

50 if compat < min_compat_level: 

-

51 if warning_count: 

-

52 _warn("") 

-

53 _warn("Supported debhelper compat check") 

-

54 _warn("--------------------------------") 

-

55 warning_count += 1 

-

56 _warn( 

-

57 f"The migration tool assumes debhelper compat {min_compat_level}+ semantics, but this package" 

-

58 f" is using compat {compat}. Consider upgrading the package to compat {min_compat_level}" 

-

59 " first." 

-

60 ) 

-

61 

-

62 if required_plugins: 

-

63 if requested_plugins is None: 

-

64 warning_count += 1 

-

65 needed_plugins = ", ".join(f"debputy-plugin-{n}" for n in required_plugins) 

-

66 if warning_count: 

-

67 _warn("") 

-

68 _warn("Missing debputy plugin check") 

-

69 _warn("----------------------------") 

-

70 _warn( 

-

71 f"The migration tool could not read d/control and therefore cannot tell if all the required" 

-

72 f" plugins have been requested. Please ensure that the package Build-Depends on: {needed_plugins}" 

-

73 ) 

-

74 else: 

-

75 missing_plugins = required_plugins - requested_plugins 

-

76 if missing_plugins: 

-

77 warning_count += 1 

-

78 needed_plugins = ", ".join( 

-

79 f"debputy-plugin-{n}" for n in missing_plugins 

-

80 ) 

-

81 if warning_count: 

-

82 _warn("") 

-

83 _warn("Missing debputy plugin check") 

-

84 _warn("----------------------------") 

-

85 _warn( 

-

86 f"The migration tool asserted that the following `debputy` plugins would be required, which" 

-

87 f" are not explicitly requested. Please add the following to Build-Depends: {needed_plugins}" 

-

88 ) 

-

89 

-

90 if warning_count: 

-

91 _warn("") 

-

92 _warn( 

-

93 f"/!\\ Total number of warnings or manual migrations required: {warning_count}" 

-

94 ) 

-

95 

-

96 

-

97def _dh_compat_level() -> Optional[int]: 

-

98 try: 

-

99 res = subprocess.check_output( 

-

100 ["dh_assistant", "active-compat-level"], stderr=subprocess.DEVNULL 

-

101 ) 

-

102 except subprocess.CalledProcessError: 

-

103 compat = None 

-

104 else: 

-

105 try: 

-

106 compat = json.loads(res)["declared-compat-level"] 

-

107 except RuntimeError: 

-

108 compat = None 

-

109 else: 

-

110 if not isinstance(compat, int): 

-

111 compat = None 

-

112 return compat 

-

113 

-

114 

-

115def _requested_debputy_plugins(debian_dir: VirtualPath) -> Optional[Set[str]]: 

-

116 ctrl_file = debian_dir.get("control") 

-

117 if not ctrl_file: 

-

118 return None 

-

119 

-

120 dep_regex = re.compile("^([a-z0-9][-+.a-z0-9]+)", re.ASCII) 

-

121 plugins = set() 

-

122 

-

123 with ctrl_file.open() as fd: 

-

124 ctrl = list(Deb822.iter_paragraphs(fd)) 

-

125 source_paragraph = ctrl[0] if ctrl else {} 

-

126 

-

127 for f in ("Build-Depends", "Build-Depends-Indep", "Build-Depends-Arch"): 

-

128 field = source_paragraph.get(f) 

-

129 if not field: 

-

130 continue 

-

131 

-

132 for dep_clause in (d.strip() for d in field.split(",")): 

-

133 match = dep_regex.match(dep_clause.strip()) 

-

134 if not match: 

-

135 continue 

-

136 dep = match.group(1) 

-

137 if not dep.startswith("debputy-plugin-"): 

-

138 continue 

-

139 plugins.add(dep[15:]) 

-

140 return plugins 

-

141 

-

142 

-

143def _check_migration_target( 

-

144 debian_dir: VirtualPath, 

-

145 migration_target: Optional[str], 

-

146) -> str: 

-

147 r = read_dh_addon_sequences(debian_dir) 

-

148 if r is None and migration_target is None: 

-

149 _error("debian/control is missing and no migration target was provided") 

-

150 bd_sequences, dr_sequences = r 

-

151 all_sequences = bd_sequences | dr_sequences 

-

152 

-

153 has_zz_debputy = "zz-debputy" in all_sequences or "debputy" in all_sequences 

-

154 has_zz_debputy_rrr = "zz-debputy-rrr" in all_sequences 

-

155 has_any_existing = has_zz_debputy or has_zz_debputy_rrr 

-

156 

-

157 if migration_target == "dh-sequence-zz-debputy-rrr" and has_zz_debputy: 

-

158 _error("Cannot migrate from (zz-)debputy to zz-debputy-rrr") 

-

159 

-

160 if has_zz_debputy_rrr and not has_zz_debputy: 

-

161 resolved_migration_target = MIGRATION_TARGET_DH_DEBPUTY_RRR 

-

162 else: 

-

163 resolved_migration_target = MIGRATION_TARGET_DH_DEBPUTY 

-

164 

-

165 if migration_target is not None: 

-

166 resolved_migration_target = migration_target 

-

167 

-

168 if has_any_existing: 

-

169 _info( 

-

170 f'Using "{resolved_migration_target}" as migration target based on the packaging' 

-

171 ) 

-

172 else: 

-

173 _info( 

-

174 f'Using "{resolved_migration_target}" as default migration target. Use --migration-target to choose!' 

-

175 ) 

-

176 

-

177 return resolved_migration_target 

-

178 

-

179 

-

180def migrate_from_dh( 

-

181 manifest: HighLevelManifest, 

-

182 acceptable_migration_issues: AcceptableMigrationIssues, 

-

183 permit_destructive_changes: Optional[bool], 

-

184 migration_target: Optional[str], 

-

185 manifest_parser_factory: Callable[[str], HighLevelManifest], 

-

186) -> None: 

-

187 migrations = [] 

-

188 compat = _dh_compat_level() 

-

189 if compat is None: 

-

190 _error( 

-

191 'Cannot detect declared compat level (try running "dh_assistant active-compat-level")' 

-

192 ) 

-

193 

-

194 debian_dir = manifest.debian_dir 

-

195 mutable_manifest = assume_not_none(manifest.mutable_manifest) 

-

196 

-

197 resolved_migration_target = _check_migration_target(debian_dir, migration_target) 

-

198 

-

199 try: 

-

200 for migrator in MIGRATORS[resolved_migration_target]: 

-

201 feature_migration = FeatureMigration(migrator.__name__) 

-

202 migrator( 

-

203 debian_dir, 

-

204 manifest, 

-

205 acceptable_migration_issues, 

-

206 feature_migration, 

-

207 resolved_migration_target, 

-

208 ) 

-

209 migrations.append(feature_migration) 

-

210 except CannotEmulateExecutableDHConfigFile as e: 

-

211 _error( 

-

212 f"Unable to process the executable dh config file {e.config_file().fs_path}: {e.message()}" 

-

213 ) 

-

214 except UnsupportedFeature as e: 

-

215 msg = ( 

-

216 f"Unable to migrate automatically due to missing features in debputy. The feature is:" 

-

217 f"\n\n * {e.message}" 

-

218 ) 

-

219 keys = e.issue_keys 

-

220 if keys: 

-

221 primary_key = keys[0] 

-

222 alt_keys = "" 

-

223 if len(keys) > 1: 

-

224 alt_keys = ( 

-

225 f' Alternatively you can also use one of: {", ".join(keys[1:])}. Please note that some' 

-

226 " of these may cover more cases." 

-

227 ) 

-

228 msg += ( 

-

229 f"\n\nUse --acceptable-migration-issues={primary_key} to convert this into a warning and try again." 

-

230 " However, you should only do that if you believe you can replace the functionality manually" 

-

231 f" or the usage is obsolete / can be removed. {alt_keys}" 

-

232 ) 

-

233 _error(msg) 

-

234 except ConflictingChange as e: 

-

235 _error( 

-

236 "The migration tool detected a conflict data being migrated and data already migrated / in the existing" 

-

237 "manifest." 

-

238 f"\n\n * {e.message}" 

-

239 "\n\nPlease review the situation and resolve the conflict manually." 

-

240 ) 

-

241 

-

242 # We start on compat 12 for arch:any due to the new dh_makeshlibs and dh_installinit default 

-

243 min_compat = 12 

-

244 min_compat = max( 

-

245 (m.assumed_compat for m in migrations if m.assumed_compat is not None), 

-

246 default=min_compat, 

-

247 ) 

-

248 

-

249 if compat < min_compat and "min-compat-level" not in acceptable_migration_issues: 

-

250 # The migration summary special-cases the compat mismatch and warns for us. 

-

251 _error( 

-

252 f"The migration tool assumes debhelper compat {min_compat} or later but the package is only on" 

-

253 f" compat {compat}. This may lead to incorrect result." 

-

254 f"\n\nUse --acceptable-migration-issues=min-compat-level to convert this into a warning and" 

-

255 f" try again, if you want to continue regardless." 

-

256 ) 

-

257 

-

258 requested_plugins = _requested_debputy_plugins(debian_dir) 

-

259 required_plugins: Set[str] = set() 

-

260 required_plugins.update( 

-

261 chain.from_iterable( 

-

262 m.required_plugins for m in migrations if m.required_plugins 

-

263 ) 

-

264 ) 

-

265 

-

266 _print_migration_summary( 

-

267 migrations, compat, min_compat, required_plugins, requested_plugins 

-

268 ) 

-

269 migration_count = sum((m.performed_changes for m in migrations), 0) 

-

270 

-

271 if not migration_count: 

-

272 _info( 

-

273 "debputy was not able to find any (supported) migrations that it could perform for you." 

-

274 ) 

-

275 return 

-

276 

-

277 if any(m.successful_manifest_changes for m in migrations): 

-

278 new_manifest_path = manifest.manifest_path + ".new" 

-

279 

-

280 with open(new_manifest_path, "w") as fd: 

-

281 mutable_manifest.write_to(fd) 

-

282 

-

283 try: 

-

284 _info("Verifying the generating manifest") 

-

285 manifest_parser_factory(new_manifest_path) 

-

286 except ManifestParseException as e: 

-

287 raise AssertionError( 

-

288 "Could not parse the manifest generated from the migrator" 

-

289 ) from e 

-

290 

-

291 if permit_destructive_changes: 

-

292 if os.path.isfile(manifest.manifest_path): 

-

293 os.rename(manifest.manifest_path, manifest.manifest_path + ".orig") 

-

294 os.rename(new_manifest_path, manifest.manifest_path) 

-

295 _info(f"Updated manifest {manifest.manifest_path}") 

-

296 else: 

-

297 _info( 

-

298 f'Created draft manifest "{new_manifest_path}" (rename to "{manifest.manifest_path}"' 

-

299 " to activate it)" 

-

300 ) 

-

301 else: 

-

302 _info("No manifest changes detected; skipping update of manifest.") 

-

303 

-

304 removals: int = sum((len(m.remove_paths_on_success) for m in migrations), 0) 

-

305 renames: int = sum((len(m.rename_paths_on_success) for m in migrations), 0) 

-

306 

-

307 if renames: 

-

308 if permit_destructive_changes: 

-

309 _info("Paths being renamed:") 

-

310 else: 

-

311 _info("Migration *would* rename the following paths:") 

-

312 for previous_path, new_path in ( 

-

313 p for m in migrations for p in m.rename_paths_on_success 

-

314 ): 

-

315 _info(f" mv {escape_shell(previous_path, new_path)}") 

-

316 

-

317 if removals: 

-

318 if permit_destructive_changes: 

-

319 _info("Removals:") 

-

320 else: 

-

321 _info("Migration *would* remove the following files:") 

-

322 for path in (p for m in migrations for p in m.remove_paths_on_success): 

-

323 _info(f" rm -f {escape_shell(path)}") 

-

324 

-

325 if permit_destructive_changes is None: 

-

326 print() 

-

327 _info( 

-

328 "If you would like to perform the migration, please re-run with --apply-changes." 

-

329 ) 

-

330 elif permit_destructive_changes: 

-

331 for previous_path, new_path in ( 

-

332 p for m in migrations for p in m.rename_paths_on_success 

-

333 ): 

-

334 os.rename(previous_path, new_path) 

-

335 for path in (p for m in migrations for p in m.remove_paths_on_success): 

-

336 os.unlink(path) 

-

337 

-

338 print() 

-

339 _info("Migrations performed successfully") 

-

340 print() 

-

341 _info( 

-

342 "Remember to validate the resulting binary packages after rebuilding with debputy" 

-

343 ) 

-

344 else: 

-

345 print() 

-

346 _info("No migrations performed as requested") 

-
- - - diff --git a/coverage-report/d_23db3c975895bd86_migrators_impl_py.html b/coverage-report/d_23db3c975895bd86_migrators_impl_py.html deleted file mode 100644 index 34ee9ac..0000000 --- a/coverage-report/d_23db3c975895bd86_migrators_impl_py.html +++ /dev/null @@ -1,1829 +0,0 @@ - - - - - Coverage for src/debputy/dh_migration/migrators_impl.py: 81% - - - - - -
-
-

- Coverage for src/debputy/dh_migration/migrators_impl.py: - 81% -

- -

- 669 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import collections 

-

2import dataclasses 

-

3import functools 

-

4import json 

-

5import os 

-

6import re 

-

7import subprocess 

-

8from typing import ( 

-

9 Iterable, 

-

10 Optional, 

-

11 Tuple, 

-

12 List, 

-

13 Set, 

-

14 Mapping, 

-

15 Any, 

-

16 Union, 

-

17 Callable, 

-

18 TypeVar, 

-

19 Dict, 

-

20) 

-

21 

-

22from debian.deb822 import Deb822 

-

23 

-

24from debputy import DEBPUTY_DOC_ROOT_DIR 

-

25from debputy.architecture_support import dpkg_architecture_table 

-

26from debputy.deb_packaging_support import dpkg_field_list_pkg_dep 

-

27from debputy.debhelper_emulation import ( 

-

28 dhe_filedoublearray, 

-

29 DHConfigFileLine, 

-

30 dhe_pkgfile, 

-

31 parse_drules_for_addons, 

-

32 extract_dh_addons_from_control, 

-

33) 

-

34from debputy.dh_migration.models import ( 

-

35 ConflictingChange, 

-

36 FeatureMigration, 

-

37 UnsupportedFeature, 

-

38 AcceptableMigrationIssues, 

-

39 DHMigrationSubstitution, 

-

40) 

-

41from debputy.highlevel_manifest import ( 

-

42 MutableYAMLSymlink, 

-

43 HighLevelManifest, 

-

44 MutableYAMLConffileManagementItem, 

-

45 AbstractMutableYAMLInstallRule, 

-

46) 

-

47from debputy.installations import MAN_GUESS_FROM_BASENAME, MAN_GUESS_LANG_FROM_PATH 

-

48from debputy.packages import BinaryPackage 

-

49from debputy.plugin.api import VirtualPath 

-

50from debputy.util import ( 

-

51 _error, 

-

52 PKGVERSION_REGEX, 

-

53 PKGNAME_REGEX, 

-

54 _normalize_path, 

-

55 assume_not_none, 

-

56 has_glob_magic, 

-

57) 

-

58 

-

59MIGRATION_TARGET_DH_DEBPUTY_RRR = "dh-sequence-zz-debputy-rrr" 

-

60MIGRATION_TARGET_DH_DEBPUTY = "dh-sequence-zz-debputy" 

-

61 

-

62 

-

63# Align with debputy.py 

-

64DH_COMMANDS_REPLACED = { 

-

65 MIGRATION_TARGET_DH_DEBPUTY_RRR: frozenset( 

-

66 { 

-

67 "dh_fixperms", 

-

68 "dh_shlibdeps", 

-

69 "dh_gencontrol", 

-

70 "dh_md5sums", 

-

71 "dh_builddeb", 

-

72 } 

-

73 ), 

-

74 MIGRATION_TARGET_DH_DEBPUTY: frozenset( 

-

75 { 

-

76 "dh_install", 

-

77 "dh_installdocs", 

-

78 "dh_installchangelogs", 

-

79 "dh_installexamples", 

-

80 "dh_installman", 

-

81 "dh_installcatalogs", 

-

82 "dh_installcron", 

-

83 "dh_installdebconf", 

-

84 "dh_installemacsen", 

-

85 "dh_installifupdown", 

-

86 "dh_installinfo", 

-

87 "dh_installinit", 

-

88 "dh_installsysusers", 

-

89 "dh_installtmpfiles", 

-

90 "dh_installsystemd", 

-

91 "dh_installsystemduser", 

-

92 "dh_installmenu", 

-

93 "dh_installmime", 

-

94 "dh_installmodules", 

-

95 "dh_installlogcheck", 

-

96 "dh_installlogrotate", 

-

97 "dh_installpam", 

-

98 "dh_installppp", 

-

99 "dh_installudev", 

-

100 "dh_installgsettings", 

-

101 "dh_installinitramfs", 

-

102 "dh_installalternatives", 

-

103 "dh_bugfiles", 

-

104 "dh_ucf", 

-

105 "dh_lintian", 

-

106 "dh_icons", 

-

107 "dh_usrlocal", 

-

108 "dh_perl", 

-

109 "dh_link", 

-

110 "dh_installwm", 

-

111 "dh_installxfonts", 

-

112 "dh_strip_nondeterminism", 

-

113 "dh_compress", 

-

114 "dh_fixperms", 

-

115 "dh_dwz", 

-

116 "dh_strip", 

-

117 "dh_makeshlibs", 

-

118 "dh_shlibdeps", 

-

119 "dh_missing", 

-

120 "dh_installdeb", 

-

121 "dh_gencontrol", 

-

122 "dh_md5sums", 

-

123 "dh_builddeb", 

-

124 } 

-

125 ), 

-

126} 

-

127 

-

128_GS_DOC = f"{DEBPUTY_DOC_ROOT_DIR}/GETTING-STARTED-WITH-dh-debputy.md" 

-

129MIGRATION_AID_FOR_OVERRIDDEN_COMMANDS = { 

-

130 "dh_installinit": f"{_GS_DOC}#covert-your-overrides-for-dh_installsystemd-dh_installinit-if-any", 

-

131 "dh_installsystemd": f"{_GS_DOC}#covert-your-overrides-for-dh_installsystemd-dh_installinit-if-any", 

-

132 "dh_fixperms": f"{_GS_DOC}#convert-your-overrides-or-excludes-for-dh_fixperms-if-any", 

-

133 "dh_gencontrol": f"{_GS_DOC}#convert-your-overrides-for-dh_gencontrol-if-any", 

-

134} 

-

135 

-

136 

-

137@dataclasses.dataclass(frozen=True, slots=True) 

-

138class UnsupportedDHConfig: 

-

139 dh_config_basename: str 

-

140 dh_tool: str 

-

141 bug_950723_prefix_matching: bool = False 

-

142 is_missing_migration: bool = False 

-

143 

-

144 

-

145@dataclasses.dataclass(frozen=True, slots=True) 

-

146class DHSequenceMigration: 

-

147 debputy_plugin: str 

-

148 remove_dh_sequence: bool = True 

-

149 must_use_zz_debputy: bool = False 

-

150 

-

151 

-

152UNSUPPORTED_DH_CONFIGS_AND_TOOLS_FOR_ZZ_DEBPUTY = [ 

-

153 UnsupportedDHConfig("config", "dh_installdebconf"), 

-

154 UnsupportedDHConfig("templates", "dh_installdebconf"), 

-

155 UnsupportedDHConfig("emacsen-compat", "dh_installemacsen"), 

-

156 UnsupportedDHConfig("emacsen-install", "dh_installemacsen"), 

-

157 UnsupportedDHConfig("emacsen-remove", "dh_installemacsen"), 

-

158 UnsupportedDHConfig("emacsen-startup", "dh_installemacsen"), 

-

159 # The `upstart` file should be long dead, but we might as well detect it. 

-

160 UnsupportedDHConfig("upstart", "dh_installinit"), 

-

161 # dh_installsystemduser 

-

162 UnsupportedDHConfig( 

-

163 "user.path", "dh_installsystemduser", bug_950723_prefix_matching=False 

-

164 ), 

-

165 UnsupportedDHConfig( 

-

166 "user.path", "dh_installsystemduser", bug_950723_prefix_matching=True 

-

167 ), 

-

168 UnsupportedDHConfig( 

-

169 "user.service", "dh_installsystemduser", bug_950723_prefix_matching=False 

-

170 ), 

-

171 UnsupportedDHConfig( 

-

172 "user.service", "dh_installsystemduser", bug_950723_prefix_matching=True 

-

173 ), 

-

174 UnsupportedDHConfig( 

-

175 "user.socket", "dh_installsystemduser", bug_950723_prefix_matching=False 

-

176 ), 

-

177 UnsupportedDHConfig( 

-

178 "user.socket", "dh_installsystemduser", bug_950723_prefix_matching=True 

-

179 ), 

-

180 UnsupportedDHConfig( 

-

181 "user.target", "dh_installsystemduser", bug_950723_prefix_matching=False 

-

182 ), 

-

183 UnsupportedDHConfig( 

-

184 "user.target", "dh_installsystemduser", bug_950723_prefix_matching=True 

-

185 ), 

-

186 UnsupportedDHConfig( 

-

187 "user.timer", "dh_installsystemduser", bug_950723_prefix_matching=False 

-

188 ), 

-

189 UnsupportedDHConfig( 

-

190 "user.timer", "dh_installsystemduser", bug_950723_prefix_matching=True 

-

191 ), 

-

192 UnsupportedDHConfig("udev", "dh_installudev"), 

-

193 UnsupportedDHConfig("menu", "dh_installmenu"), 

-

194 UnsupportedDHConfig("menu-method", "dh_installmenu"), 

-

195 UnsupportedDHConfig("ucf", "dh_ucf"), 

-

196 UnsupportedDHConfig("wm", "dh_installwm"), 

-

197 UnsupportedDHConfig("triggers", "dh_installdeb"), 

-

198 UnsupportedDHConfig("postinst", "dh_installdeb"), 

-

199 UnsupportedDHConfig("postrm", "dh_installdeb"), 

-

200 UnsupportedDHConfig("preinst", "dh_installdeb"), 

-

201 UnsupportedDHConfig("prerm", "dh_installdeb"), 

-

202 UnsupportedDHConfig("menutest", "dh_installdeb"), 

-

203 UnsupportedDHConfig("isinstallable", "dh_installdeb"), 

-

204] 

-

205SUPPORTED_DH_ADDONS = frozenset( 

-

206 { 

-

207 # debputy's own 

-

208 "debputy", 

-

209 "zz-debputy", 

-

210 # debhelper provided sequences that should work. 

-

211 "single-binary", 

-

212 } 

-

213) 

-

214DH_ADDONS_TO_REMOVE = frozenset( 

-

215 [ 

-

216 # Sequences debputy directly replaces 

-

217 "dwz", 

-

218 "elf-tools", 

-

219 "installinitramfs", 

-

220 "installsysusers", 

-

221 "doxygen", 

-

222 # Sequences that are embedded fully into debputy 

-

223 "bash-completion", 

-

224 "sodeps", 

-

225 ] 

-

226) 

-

227DH_ADDONS_TO_PLUGINS = { 

-

228 "gnome": DHSequenceMigration( 

-

229 "gnome", 

-

230 # The sequence still provides a command for the clean sequence 

-

231 remove_dh_sequence=False, 

-

232 must_use_zz_debputy=True, 

-

233 ), 

-

234 "numpy3": DHSequenceMigration( 

-

235 "numpy3", 

-

236 # The sequence provides (build-time) dependencies that we cannot provide 

-

237 remove_dh_sequence=False, 

-

238 must_use_zz_debputy=True, 

-

239 ), 

-

240 "perl-openssl": DHSequenceMigration( 

-

241 "perl-openssl", 

-

242 # The sequence provides (build-time) dependencies that we cannot provide 

-

243 remove_dh_sequence=False, 

-

244 must_use_zz_debputy=True, 

-

245 ), 

-

246} 

-

247 

-

248 

-

249def _dh_config_file( 

-

250 debian_dir: VirtualPath, 

-

251 dctrl_bin: BinaryPackage, 

-

252 basename: str, 

-

253 helper_name: str, 

-

254 acceptable_migration_issues: AcceptableMigrationIssues, 

-

255 feature_migration: FeatureMigration, 

-

256 manifest: HighLevelManifest, 

-

257 support_executable_files: bool = False, 

-

258 allow_dh_exec_rename: bool = False, 

-

259 pkgfile_lookup: bool = True, 

-

260 remove_on_migration: bool = True, 

-

261) -> Union[Tuple[None, None], Tuple[VirtualPath, Iterable[DHConfigFileLine]]]: 

-

262 mutable_manifest = assume_not_none(manifest.mutable_manifest) 

-

263 dh_config_file = ( 

-

264 dhe_pkgfile(debian_dir, dctrl_bin, basename) 

-

265 if pkgfile_lookup 

-

266 else debian_dir.get(basename) 

-

267 ) 

-

268 if dh_config_file is None or dh_config_file.is_dir: 

-

269 return None, None 

-

270 if dh_config_file.is_executable and not support_executable_files: 

-

271 primary_key = f"executable-{helper_name}-config" 

-

272 if ( 

-

273 primary_key in acceptable_migration_issues 

-

274 or "any-executable-dh-configs" in acceptable_migration_issues 

-

275 ): 

-

276 feature_migration.warn( 

-

277 f'TODO: MANUAL MIGRATION of executable dh config "{dh_config_file}" is required.' 

-

278 ) 

-

279 return None, None 

-

280 raise UnsupportedFeature( 

-

281 f"Executable configuration files not supported (found: {dh_config_file}).", 

-

282 [primary_key, "any-executable-dh-configs"], 

-

283 ) 

-

284 

-

285 if remove_on_migration: 

-

286 feature_migration.remove_on_success(dh_config_file.fs_path) 

-

287 substitution = DHMigrationSubstitution( 

-

288 dpkg_architecture_table(), 

-

289 acceptable_migration_issues, 

-

290 feature_migration, 

-

291 mutable_manifest, 

-

292 ) 

-

293 content = dhe_filedoublearray( 

-

294 dh_config_file, 

-

295 substitution, 

-

296 allow_dh_exec_rename=allow_dh_exec_rename, 

-

297 ) 

-

298 return dh_config_file, content 

-

299 

-

300 

-

301def _validate_rm_mv_conffile( 

-

302 package: str, 

-

303 config_line: DHConfigFileLine, 

-

304) -> Tuple[str, str, Optional[str], Optional[str], Optional[str]]: 

-

305 cmd, *args = config_line.tokens 

-

306 if "--" in config_line.tokens: 306 ↛ 307line 306 didn't jump to line 307, because the condition on line 306 was never true

-

307 raise ValueError( 

-

308 f'The maintscripts file "{config_line.config_file.path}" for {package} includes a "--" in line' 

-

309 f" {config_line.line_no}. The offending line is: {config_line.original_line}" 

-

310 ) 

-

311 if cmd == "rm_conffile": 

-

312 min_args = 1 

-

313 max_args = 3 

-

314 else: 

-

315 min_args = 2 

-

316 max_args = 4 

-

317 if len(args) > max_args or len(args) < min_args: 317 ↛ 318line 317 didn't jump to line 318, because the condition on line 317 was never true

-

318 raise ValueError( 

-

319 f'The "{cmd}" command takes at least {min_args} and at most {max_args} arguments. However,' 

-

320 f' in "{config_line.config_file.path}" line {config_line.line_no} (for {package}), there' 

-

321 f" are {len(args)} arguments. The offending line is: {config_line.original_line}" 

-

322 ) 

-

323 

-

324 obsolete_conffile = args[0] 

-

325 new_conffile = args[1] if cmd == "mv_conffile" else None 

-

326 prior_version = args[min_args] if len(args) > min_args else None 

-

327 owning_package = args[min_args + 1] if len(args) > min_args + 1 else None 

-

328 if not obsolete_conffile.startswith("/"): 328 ↛ 329line 328 didn't jump to line 329, because the condition on line 328 was never true

-

329 raise ValueError( 

-

330 f'The (old-)conffile parameter for {cmd} must be absolute (i.e., start with "/"). However,' 

-

331 f' in "{config_line.config_file.path}" line {config_line.line_no} (for {package}), it was specified' 

-

332 f' as "{obsolete_conffile}". The offending line is: {config_line.original_line}' 

-

333 ) 

-

334 if new_conffile is not None and not new_conffile.startswith("/"): 334 ↛ 335line 334 didn't jump to line 335, because the condition on line 334 was never true

-

335 raise ValueError( 

-

336 f'The new-conffile parameter for {cmd} must be absolute (i.e., start with "/"). However,' 

-

337 f' in "{config_line.config_file.path}" line {config_line.line_no} (for {package}), it was specified' 

-

338 f' as "{new_conffile}". The offending line is: {config_line.original_line}' 

-

339 ) 

-

340 if prior_version is not None and not PKGVERSION_REGEX.fullmatch(prior_version): 340 ↛ 341line 340 didn't jump to line 341, because the condition on line 340 was never true

-

341 raise ValueError( 

-

342 f"The prior-version parameter for {cmd} must be a valid package version (i.e., match" 

-

343 f' {PKGVERSION_REGEX}). However, in "{config_line.config_file.path}" line {config_line.line_no}' 

-

344 f' (for {package}), it was specified as "{prior_version}". The offending line is:' 

-

345 f" {config_line.original_line}" 

-

346 ) 

-

347 if owning_package is not None and not PKGNAME_REGEX.fullmatch(owning_package): 347 ↛ 348line 347 didn't jump to line 348, because the condition on line 347 was never true

-

348 raise ValueError( 

-

349 f"The package parameter for {cmd} must be a valid package name (i.e., match {PKGNAME_REGEX})." 

-

350 f' However, in "{config_line.config_file.path}" line {config_line.line_no} (for {package}), it' 

-

351 f' was specified as "{owning_package}". The offending line is: {config_line.original_line}' 

-

352 ) 

-

353 return cmd, obsolete_conffile, new_conffile, prior_version, owning_package 

-

354 

-

355 

-

356_BASH_COMPLETION_RE = re.compile( 

-

357 r""" 

-

358 (^|[|&;])\s*complete.*-[A-Za-z].* 

-

359 | \$\(.*\) 

-

360 | \s*compgen.*-[A-Za-z].* 

-

361 | \s*if.*;.*then/ 

-

362""", 

-

363 re.VERBOSE, 

-

364) 

-

365 

-

366 

-

367def migrate_bash_completion( 

-

368 debian_dir: VirtualPath, 

-

369 manifest: HighLevelManifest, 

-

370 acceptable_migration_issues: AcceptableMigrationIssues, 

-

371 feature_migration: FeatureMigration, 

-

372 _migration_target: str, 

-

373) -> None: 

-

374 feature_migration.tagline = "dh_bash-completion files" 

-

375 is_single_binary = sum(1 for _ in manifest.all_packages) == 1 

-

376 mutable_manifest = assume_not_none(manifest.mutable_manifest) 

-

377 installations = mutable_manifest.installations(create_if_absent=False) 

-

378 

-

379 for dctrl_bin in manifest.all_packages: 

-

380 dh_file = dhe_pkgfile(debian_dir, dctrl_bin, "bash-completion") 

-

381 if dh_file is None: 

-

382 continue 

-

383 is_bash_completion_file = False 

-

384 with dh_file.open() as fd: 

-

385 for line in fd: 

-

386 line = line.strip() 

-

387 if not line or line[0] == "#": 387 ↛ 388line 387 didn't jump to line 388, because the condition on line 387 was never true

-

388 continue 

-

389 if _BASH_COMPLETION_RE.search(line): 

-

390 is_bash_completion_file = True 

-

391 break 

-

392 if not is_bash_completion_file: 

-

393 _, content = _dh_config_file( 

-

394 debian_dir, 

-

395 dctrl_bin, 

-

396 "bash-completion", 

-

397 "dh_bash-completion", 

-

398 acceptable_migration_issues, 

-

399 feature_migration, 

-

400 manifest, 

-

401 support_executable_files=True, 

-

402 ) 

-

403 else: 

-

404 content = None 

-

405 

-

406 if content: 

-

407 install_dest_sources: List[str] = [] 

-

408 install_as_rules: List[Tuple[str, str]] = [] 

-

409 for dhe_line in content: 

-

410 if len(dhe_line.tokens) > 2: 410 ↛ 411line 410 didn't jump to line 411, because the condition on line 410 was never true

-

411 raise UnsupportedFeature( 

-

412 f"The dh_bash-completion file {dh_file.path} more than two words on" 

-

413 f' line {dhe_line.line_no} (line: "{dhe_line.original_line}").' 

-

414 ) 

-

415 source = dhe_line.tokens[0] 

-

416 dest_basename = ( 

-

417 dhe_line.tokens[1] 

-

418 if len(dhe_line.tokens) > 1 

-

419 else os.path.basename(source) 

-

420 ) 

-

421 if source.startswith("debian/") and not has_glob_magic(source): 

-

422 if dctrl_bin.name != dest_basename: 

-

423 dest_path = ( 

-

424 f"debian/{dctrl_bin.name}.{dest_basename}.bash-completion" 

-

425 ) 

-

426 else: 

-

427 dest_path = f"debian/{dest_basename}.bash-completion" 

-

428 feature_migration.rename_on_success(source, dest_path) 

-

429 elif len(dhe_line.tokens) == 1: 

-

430 install_dest_sources.append(source) 

-

431 else: 

-

432 install_as_rules.append((source, dest_basename)) 

-

433 

-

434 if install_dest_sources: 434 ↛ 448line 434 didn't jump to line 448, because the condition on line 434 was never false

-

435 sources = ( 

-

436 install_dest_sources 

-

437 if len(install_dest_sources) > 1 

-

438 else install_dest_sources[0] 

-

439 ) 

-

440 installations.append( 

-

441 AbstractMutableYAMLInstallRule.install_dest( 

-

442 sources=sources, 

-

443 dest_dir="{{path:BASH_COMPLETION_DIR}}", 

-

444 into=dctrl_bin.name if not is_single_binary else None, 

-

445 ) 

-

446 ) 

-

447 

-

448 for source, dest_basename in install_as_rules: 

-

449 installations.append( 

-

450 AbstractMutableYAMLInstallRule.install_as( 

-

451 source=source, 

-

452 install_as="{{path:BASH_COMPLETION_DIR}}/" + dest_basename, 

-

453 into=dctrl_bin.name if not is_single_binary else None, 

-

454 ) 

-

455 ) 

-

456 

-

457 

-

458def migrate_dh_installsystemd_files( 

-

459 debian_dir: VirtualPath, 

-

460 manifest: HighLevelManifest, 

-

461 _acceptable_migration_issues: AcceptableMigrationIssues, 

-

462 feature_migration: FeatureMigration, 

-

463 _migration_target: str, 

-

464) -> None: 

-

465 feature_migration.tagline = "dh_installsystemd files" 

-

466 for dctrl_bin in manifest.all_packages: 

-

467 for stem in [ 

-

468 "path", 

-

469 "service", 

-

470 "socket", 

-

471 "target", 

-

472 "timer", 

-

473 ]: 

-

474 pkgfile = dhe_pkgfile( 

-

475 debian_dir, dctrl_bin, stem, bug_950723_prefix_matching=True 

-

476 ) 

-

477 if not pkgfile: 

-

478 continue 

-

479 if not pkgfile.name.endswith(f".{stem}") or "@." not in pkgfile.name: 479 ↛ 480line 479 didn't jump to line 480, because the condition on line 479 was never true

-

480 raise UnsupportedFeature( 

-

481 f'Unable to determine the correct name for {pkgfile.fs_path}. It should be a ".@{stem}"' 

-

482 f" file now (foo@.service => foo.@service)" 

-

483 ) 

-

484 newname = pkgfile.name.replace("@.", ".") 

-

485 newname = newname[: -len(stem)] + f"@{stem}" 

-

486 feature_migration.rename_on_success( 

-

487 pkgfile.fs_path, os.path.join(debian_dir.fs_path, newname) 

-

488 ) 

-

489 

-

490 

-

491def migrate_maintscript( 

-

492 debian_dir: VirtualPath, 

-

493 manifest: HighLevelManifest, 

-

494 acceptable_migration_issues: AcceptableMigrationIssues, 

-

495 feature_migration: FeatureMigration, 

-

496 _migration_target: str, 

-

497) -> None: 

-

498 feature_migration.tagline = "dh_installdeb files" 

-

499 mutable_manifest = assume_not_none(manifest.mutable_manifest) 

-

500 for dctrl_bin in manifest.all_packages: 

-

501 mainscript_file, content = _dh_config_file( 

-

502 debian_dir, 

-

503 dctrl_bin, 

-

504 "maintscript", 

-

505 "dh_installdeb", 

-

506 acceptable_migration_issues, 

-

507 feature_migration, 

-

508 manifest, 

-

509 ) 

-

510 

-

511 if mainscript_file is None: 

-

512 continue 

-

513 assert content is not None 

-

514 

-

515 package_definition = mutable_manifest.package(dctrl_bin.name) 

-

516 conffiles = { 

-

517 it.obsolete_conffile: it 

-

518 for it in package_definition.conffile_management_items() 

-

519 } 

-

520 seen_conffiles = set() 

-

521 

-

522 for dhe_line in content: 

-

523 cmd = dhe_line.tokens[0] 

-

524 if cmd not in {"rm_conffile", "mv_conffile"}: 524 ↛ 525line 524 didn't jump to line 525, because the condition on line 524 was never true

-

525 raise UnsupportedFeature( 

-

526 f"The dh_installdeb file {mainscript_file.path} contains the (currently)" 

-

527 f' unsupported command "{cmd}" on line {dhe_line.line_no}' 

-

528 f' (line: "{dhe_line.original_line}")' 

-

529 ) 

-

530 

-

531 try: 

-

532 ( 

-

533 _, 

-

534 obsolete_conffile, 

-

535 new_conffile, 

-

536 prior_to_version, 

-

537 owning_package, 

-

538 ) = _validate_rm_mv_conffile(dctrl_bin.name, dhe_line) 

-

539 except ValueError as e: 

-

540 _error( 

-

541 f"Validation error in {mainscript_file} on line {dhe_line.line_no}. The error was: {e.args[0]}." 

-

542 ) 

-

543 

-

544 if obsolete_conffile in seen_conffiles: 544 ↛ 545line 544 didn't jump to line 545, because the condition on line 544 was never true

-

545 raise ConflictingChange( 

-

546 f'The {mainscript_file} file defines actions for "{obsolete_conffile}" twice!' 

-

547 f" Please ensure that it is defined at most once in that file." 

-

548 ) 

-

549 seen_conffiles.add(obsolete_conffile) 

-

550 

-

551 if cmd == "rm_conffile": 

-

552 item = MutableYAMLConffileManagementItem.rm_conffile( 

-

553 obsolete_conffile, 

-

554 prior_to_version, 

-

555 owning_package, 

-

556 ) 

-

557 else: 

-

558 assert cmd == "mv_conffile" 

-

559 item = MutableYAMLConffileManagementItem.mv_conffile( 

-

560 obsolete_conffile, 

-

561 assume_not_none(new_conffile), 

-

562 prior_to_version, 

-

563 owning_package, 

-

564 ) 

-

565 

-

566 existing_def = conffiles.get(item.obsolete_conffile) 

-

567 if existing_def is not None: 567 ↛ 568line 567 didn't jump to line 568, because the condition on line 567 was never true

-

568 if not ( 

-

569 item.command == existing_def.command 

-

570 and item.new_conffile == existing_def.new_conffile 

-

571 and item.prior_to_version == existing_def.prior_to_version 

-

572 and item.owning_package == existing_def.owning_package 

-

573 ): 

-

574 raise ConflictingChange( 

-

575 f"The maintscript defines the action {item.command} for" 

-

576 f' "{obsolete_conffile}" in {mainscript_file}, but there is another' 

-

577 f" conffile management definition for same path defined already (in the" 

-

578 f" existing manifest or an migration e.g., inside {mainscript_file})" 

-

579 ) 

-

580 feature_migration.already_present += 1 

-

581 continue 

-

582 

-

583 package_definition.add_conffile_management(item) 

-

584 feature_migration.successful_manifest_changes += 1 

-

585 

-

586 

-

587@dataclasses.dataclass(slots=True) 

-

588class SourcesAndConditional: 

-

589 dest_dir: Optional[str] = None 

-

590 sources: List[str] = dataclasses.field(default_factory=list) 

-

591 conditional: Optional[Union[str, Mapping[str, Any]]] = None 

-

592 

-

593 

-

594def _strip_d_tmp(p: str) -> str: 

-

595 if p.startswith("debian/tmp/") and len(p) > 11: 

-

596 return p[11:] 

-

597 return p 

-

598 

-

599 

-

600def migrate_install_file( 

-

601 debian_dir: VirtualPath, 

-

602 manifest: HighLevelManifest, 

-

603 acceptable_migration_issues: AcceptableMigrationIssues, 

-

604 feature_migration: FeatureMigration, 

-

605 _migration_target: str, 

-

606) -> None: 

-

607 feature_migration.tagline = "dh_install config files" 

-

608 mutable_manifest = assume_not_none(manifest.mutable_manifest) 

-

609 installations = mutable_manifest.installations(create_if_absent=False) 

-

610 priority_lines = [] 

-

611 remaining_install_lines = [] 

-

612 warn_about_fixmes_in_dest_dir = False 

-

613 

-

614 is_single_binary = sum(1 for _ in manifest.all_packages) == 1 

-

615 

-

616 for dctrl_bin in manifest.all_packages: 

-

617 install_file, content = _dh_config_file( 

-

618 debian_dir, 

-

619 dctrl_bin, 

-

620 "install", 

-

621 "dh_install", 

-

622 acceptable_migration_issues, 

-

623 feature_migration, 

-

624 manifest, 

-

625 support_executable_files=True, 

-

626 allow_dh_exec_rename=True, 

-

627 ) 

-

628 if not install_file or not content: 

-

629 continue 

-

630 current_sources = [] 

-

631 sources_by_destdir: Dict[Tuple[str, Tuple[str, ...]], SourcesAndConditional] = ( 

-

632 {} 

-

633 ) 

-

634 install_as_rules = [] 

-

635 multi_dest = collections.defaultdict(list) 

-

636 seen_sources = set() 

-

637 multi_dest_sources: Set[str] = set() 

-

638 

-

639 for dhe_line in content: 

-

640 special_rule = None 

-

641 if "=>" in dhe_line.tokens: 

-

642 if dhe_line.tokens[0] == "=>" and len(dhe_line.tokens) == 2: 

-

643 # This rule must be as early as possible to retain the semantics 

-

644 path = _strip_d_tmp( 

-

645 _normalize_path(dhe_line.tokens[1], with_prefix=False) 

-

646 ) 

-

647 special_rule = AbstractMutableYAMLInstallRule.install_dest( 

-

648 path, 

-

649 dctrl_bin.name if not is_single_binary else None, 

-

650 dest_dir=None, 

-

651 when=dhe_line.conditional(), 

-

652 ) 

-

653 elif len(dhe_line.tokens) != 3: 653 ↛ 654line 653 didn't jump to line 654, because the condition on line 653 was never true

-

654 _error( 

-

655 f"Validation error in {install_file.path} on line {dhe_line.line_no}. Cannot migrate dh-exec" 

-

656 ' renames that is not exactly "SOURCE => TARGET" or "=> TARGET".' 

-

657 ) 

-

658 else: 

-

659 install_rule = AbstractMutableYAMLInstallRule.install_as( 

-

660 _strip_d_tmp( 

-

661 _normalize_path(dhe_line.tokens[0], with_prefix=False) 

-

662 ), 

-

663 _normalize_path(dhe_line.tokens[2], with_prefix=False), 

-

664 dctrl_bin.name if not is_single_binary else None, 

-

665 when=dhe_line.conditional(), 

-

666 ) 

-

667 install_as_rules.append(install_rule) 

-

668 else: 

-

669 if len(dhe_line.tokens) > 1: 

-

670 sources = list( 

-

671 _strip_d_tmp(_normalize_path(w, with_prefix=False)) 

-

672 for w in dhe_line.tokens[:-1] 

-

673 ) 

-

674 dest_dir = _normalize_path(dhe_line.tokens[-1], with_prefix=False) 

-

675 else: 

-

676 sources = list( 

-

677 _strip_d_tmp(_normalize_path(w, with_prefix=False)) 

-

678 for w in dhe_line.tokens 

-

679 ) 

-

680 dest_dir = None 

-

681 

-

682 multi_dest_sources.update(s for s in sources if s in seen_sources) 

-

683 seen_sources.update(sources) 

-

684 

-

685 if dest_dir is None and dhe_line.conditional() is None: 

-

686 current_sources.extend(sources) 

-

687 continue 

-

688 key = (dest_dir, dhe_line.conditional_key()) 

-

689 ctor = functools.partial( 

-

690 SourcesAndConditional, 

-

691 dest_dir=dest_dir, 

-

692 conditional=dhe_line.conditional(), 

-

693 ) 

-

694 md = _fetch_or_create( 

-

695 sources_by_destdir, 

-

696 key, 

-

697 ctor, 

-

698 ) 

-

699 md.sources.extend(sources) 

-

700 

-

701 if special_rule: 

-

702 priority_lines.append(special_rule) 

-

703 

-

704 remaining_install_lines.extend(install_as_rules) 

-

705 

-

706 for md in sources_by_destdir.values(): 

-

707 if multi_dest_sources: 

-

708 sources = [s for s in md.sources if s not in multi_dest_sources] 

-

709 already_installed = (s for s in md.sources if s in multi_dest_sources) 

-

710 for s in already_installed: 

-

711 # The sources are ignored, so we can reuse the object as-is 

-

712 multi_dest[s].append(md) 

-

713 if not sources: 

-

714 continue 

-

715 else: 

-

716 sources = md.sources 

-

717 install_rule = AbstractMutableYAMLInstallRule.install_dest( 

-

718 sources[0] if len(sources) == 1 else sources, 

-

719 dctrl_bin.name if not is_single_binary else None, 

-

720 dest_dir=md.dest_dir, 

-

721 when=md.conditional, 

-

722 ) 

-

723 remaining_install_lines.append(install_rule) 

-

724 

-

725 if current_sources: 

-

726 if multi_dest_sources: 

-

727 sources = [s for s in current_sources if s not in multi_dest_sources] 

-

728 already_installed = ( 

-

729 s for s in current_sources if s in multi_dest_sources 

-

730 ) 

-

731 for s in already_installed: 

-

732 # The sources are ignored, so we can reuse the object as-is 

-

733 dest_dir = os.path.dirname(s) 

-

734 if has_glob_magic(dest_dir): 

-

735 warn_about_fixmes_in_dest_dir = True 

-

736 dest_dir = f"FIXME: {dest_dir} (could not reliably compute the dest dir)" 

-

737 multi_dest[s].append( 

-

738 SourcesAndConditional( 

-

739 dest_dir=dest_dir, 

-

740 conditional=None, 

-

741 ) 

-

742 ) 

-

743 else: 

-

744 sources = current_sources 

-

745 

-

746 if sources: 

-

747 install_rule = AbstractMutableYAMLInstallRule.install_dest( 

-

748 sources[0] if len(sources) == 1 else sources, 

-

749 dctrl_bin.name if not is_single_binary else None, 

-

750 dest_dir=None, 

-

751 ) 

-

752 remaining_install_lines.append(install_rule) 

-

753 

-

754 if multi_dest: 

-

755 for source, dest_and_conditionals in multi_dest.items(): 

-

756 dest_dirs = [dac.dest_dir for dac in dest_and_conditionals] 

-

757 # We assume the conditional is the same. 

-

758 conditional = next( 

-

759 iter( 

-

760 dac.conditional 

-

761 for dac in dest_and_conditionals 

-

762 if dac.conditional is not None 

-

763 ), 

-

764 None, 

-

765 ) 

-

766 remaining_install_lines.append( 

-

767 AbstractMutableYAMLInstallRule.multi_dest_install( 

-

768 source, 

-

769 dest_dirs, 

-

770 dctrl_bin.name if not is_single_binary else None, 

-

771 when=conditional, 

-

772 ) 

-

773 ) 

-

774 

-

775 if priority_lines: 

-

776 installations.extend(priority_lines) 

-

777 

-

778 if remaining_install_lines: 

-

779 installations.extend(remaining_install_lines) 

-

780 

-

781 feature_migration.successful_manifest_changes += len(priority_lines) + len( 

-

782 remaining_install_lines 

-

783 ) 

-

784 if warn_about_fixmes_in_dest_dir: 

-

785 feature_migration.warn( 

-

786 "TODO: FIXME left in dest-dir(s) of some installation rules." 

-

787 " Please review these and remove the FIXME (plus correct as necessary)" 

-

788 ) 

-

789 

-

790 

-

791def migrate_installdocs_file( 

-

792 debian_dir: VirtualPath, 

-

793 manifest: HighLevelManifest, 

-

794 acceptable_migration_issues: AcceptableMigrationIssues, 

-

795 feature_migration: FeatureMigration, 

-

796 _migration_target: str, 

-

797) -> None: 

-

798 feature_migration.tagline = "dh_installdocs config files" 

-

799 mutable_manifest = assume_not_none(manifest.mutable_manifest) 

-

800 installations = mutable_manifest.installations(create_if_absent=False) 

-

801 

-

802 is_single_binary = sum(1 for _ in manifest.all_packages) == 1 

-

803 

-

804 for dctrl_bin in manifest.all_packages: 

-

805 install_file, content = _dh_config_file( 

-

806 debian_dir, 

-

807 dctrl_bin, 

-

808 "docs", 

-

809 "dh_installdocs", 

-

810 acceptable_migration_issues, 

-

811 feature_migration, 

-

812 manifest, 

-

813 support_executable_files=True, 

-

814 ) 

-

815 if not install_file: 

-

816 continue 

-

817 assert content is not None 

-

818 docs: List[str] = [] 

-

819 for dhe_line in content: 

-

820 if dhe_line.arch_filter or dhe_line.build_profile_filter: 820 ↛ 821line 820 didn't jump to line 821, because the condition on line 820 was never true

-

821 _error( 

-

822 f"Unable to migrate line {dhe_line.line_no} of {install_file.path}." 

-

823 " Missing support for conditions." 

-

824 ) 

-

825 docs.extend(_normalize_path(w, with_prefix=False) for w in dhe_line.tokens) 

-

826 

-

827 if not docs: 827 ↛ 828line 827 didn't jump to line 828, because the condition on line 827 was never true

-

828 continue 

-

829 feature_migration.successful_manifest_changes += 1 

-

830 install_rule = AbstractMutableYAMLInstallRule.install_docs( 

-

831 docs if len(docs) > 1 else docs[0], 

-

832 dctrl_bin.name if not is_single_binary else None, 

-

833 ) 

-

834 installations.create_definition_if_missing() 

-

835 installations.append(install_rule) 

-

836 

-

837 

-

838def migrate_installexamples_file( 

-

839 debian_dir: VirtualPath, 

-

840 manifest: HighLevelManifest, 

-

841 acceptable_migration_issues: AcceptableMigrationIssues, 

-

842 feature_migration: FeatureMigration, 

-

843 _migration_target: str, 

-

844) -> None: 

-

845 feature_migration.tagline = "dh_installexamples config files" 

-

846 mutable_manifest = assume_not_none(manifest.mutable_manifest) 

-

847 installations = mutable_manifest.installations(create_if_absent=False) 

-

848 is_single_binary = sum(1 for _ in manifest.all_packages) == 1 

-

849 

-

850 for dctrl_bin in manifest.all_packages: 

-

851 install_file, content = _dh_config_file( 

-

852 debian_dir, 

-

853 dctrl_bin, 

-

854 "examples", 

-

855 "dh_installexamples", 

-

856 acceptable_migration_issues, 

-

857 feature_migration, 

-

858 manifest, 

-

859 support_executable_files=True, 

-

860 ) 

-

861 if not install_file: 

-

862 continue 

-

863 assert content is not None 

-

864 examples: List[str] = [] 

-

865 for dhe_line in content: 

-

866 if dhe_line.arch_filter or dhe_line.build_profile_filter: 866 ↛ 867line 866 didn't jump to line 867, because the condition on line 866 was never true

-

867 _error( 

-

868 f"Unable to migrate line {dhe_line.line_no} of {install_file.path}." 

-

869 " Missing support for conditions." 

-

870 ) 

-

871 examples.extend( 

-

872 _normalize_path(w, with_prefix=False) for w in dhe_line.tokens 

-

873 ) 

-

874 

-

875 if not examples: 875 ↛ 876line 875 didn't jump to line 876, because the condition on line 875 was never true

-

876 continue 

-

877 feature_migration.successful_manifest_changes += 1 

-

878 install_rule = AbstractMutableYAMLInstallRule.install_examples( 

-

879 examples if len(examples) > 1 else examples[0], 

-

880 dctrl_bin.name if not is_single_binary else None, 

-

881 ) 

-

882 installations.create_definition_if_missing() 

-

883 installations.append(install_rule) 

-

884 

-

885 

-

886@dataclasses.dataclass(slots=True) 

-

887class InfoFilesDefinition: 

-

888 sources: List[str] = dataclasses.field(default_factory=list) 

-

889 conditional: Optional[Union[str, Mapping[str, Any]]] = None 

-

890 

-

891 

-

892def migrate_installinfo_file( 

-

893 debian_dir: VirtualPath, 

-

894 manifest: HighLevelManifest, 

-

895 acceptable_migration_issues: AcceptableMigrationIssues, 

-

896 feature_migration: FeatureMigration, 

-

897 _migration_target: str, 

-

898) -> None: 

-

899 feature_migration.tagline = "dh_installinfo config files" 

-

900 mutable_manifest = assume_not_none(manifest.mutable_manifest) 

-

901 installations = mutable_manifest.installations(create_if_absent=False) 

-

902 is_single_binary = sum(1 for _ in manifest.all_packages) == 1 

-

903 

-

904 for dctrl_bin in manifest.all_packages: 

-

905 info_file, content = _dh_config_file( 

-

906 debian_dir, 

-

907 dctrl_bin, 

-

908 "info", 

-

909 "dh_installinfo", 

-

910 acceptable_migration_issues, 

-

911 feature_migration, 

-

912 manifest, 

-

913 support_executable_files=True, 

-

914 ) 

-

915 if not info_file: 

-

916 continue 

-

917 assert content is not None 

-

918 info_files_by_condition: Dict[Tuple[str, ...], InfoFilesDefinition] = {} 

-

919 for dhe_line in content: 

-

920 key = dhe_line.conditional_key() 

-

921 ctr = functools.partial( 

-

922 InfoFilesDefinition, conditional=dhe_line.conditional() 

-

923 ) 

-

924 info_def = _fetch_or_create( 

-

925 info_files_by_condition, 

-

926 key, 

-

927 ctr, 

-

928 ) 

-

929 info_def.sources.extend( 

-

930 _normalize_path(w, with_prefix=False) for w in dhe_line.tokens 

-

931 ) 

-

932 

-

933 if not info_files_by_condition: 933 ↛ 934line 933 didn't jump to line 934, because the condition on line 933 was never true

-

934 continue 

-

935 feature_migration.successful_manifest_changes += 1 

-

936 installations.create_definition_if_missing() 

-

937 for info_def in info_files_by_condition.values(): 

-

938 info_files = info_def.sources 

-

939 install_rule = AbstractMutableYAMLInstallRule.install_docs( 

-

940 info_files if len(info_files) > 1 else info_files[0], 

-

941 dctrl_bin.name if not is_single_binary else None, 

-

942 dest_dir="{{path:GNU_INFO_DIR}}", 

-

943 when=info_def.conditional, 

-

944 ) 

-

945 installations.append(install_rule) 

-

946 

-

947 

-

948@dataclasses.dataclass(slots=True) 

-

949class ManpageDefinition: 

-

950 sources: List[str] = dataclasses.field(default_factory=list) 

-

951 language: Optional[str] = None 

-

952 conditional: Optional[Union[str, Mapping[str, Any]]] = None 

-

953 

-

954 

-

955DK = TypeVar("DK") 

-

956DV = TypeVar("DV") 

-

957 

-

958 

-

959def _fetch_or_create(d: Dict[DK, DV], key: DK, factory: Callable[[], DV]) -> DV: 

-

960 v = d.get(key) 

-

961 if v is None: 

-

962 v = factory() 

-

963 d[key] = v 

-

964 return v 

-

965 

-

966 

-

967def migrate_installman_file( 

-

968 debian_dir: VirtualPath, 

-

969 manifest: HighLevelManifest, 

-

970 acceptable_migration_issues: AcceptableMigrationIssues, 

-

971 feature_migration: FeatureMigration, 

-

972 _migration_target: str, 

-

973) -> None: 

-

974 feature_migration.tagline = "dh_installman config files" 

-

975 mutable_manifest = assume_not_none(manifest.mutable_manifest) 

-

976 installations = mutable_manifest.installations(create_if_absent=False) 

-

977 is_single_binary = sum(1 for _ in manifest.all_packages) == 1 

-

978 warn_about_basename = False 

-

979 

-

980 for dctrl_bin in manifest.all_packages: 

-

981 manpages_file, content = _dh_config_file( 

-

982 debian_dir, 

-

983 dctrl_bin, 

-

984 "manpages", 

-

985 "dh_installman", 

-

986 acceptable_migration_issues, 

-

987 feature_migration, 

-

988 manifest, 

-

989 support_executable_files=True, 

-

990 allow_dh_exec_rename=True, 

-

991 ) 

-

992 if not manpages_file: 

-

993 continue 

-

994 assert content is not None 

-

995 

-

996 vanilla_definitions = [] 

-

997 install_as_rules = [] 

-

998 complex_definitions: Dict[ 

-

999 Tuple[Optional[str], Tuple[str, ...]], ManpageDefinition 

-

1000 ] = {} 

-

1001 install_rule: AbstractMutableYAMLInstallRule 

-

1002 for dhe_line in content: 

-

1003 if "=>" in dhe_line.tokens: 1003 ↛ 1006line 1003 didn't jump to line 1006, because the condition on line 1003 was never true

-

1004 # dh-exec allows renaming features. For `debputy`, we degenerate it into an `install` (w. `as`) feature 

-

1005 # without any of the `install-man` features. 

-

1006 if dhe_line.tokens[0] == "=>" and len(dhe_line.tokens) == 2: 

-

1007 _error( 

-

1008 f'Unsupported "=> DEST" rule for error in {manpages_file.path} on line {dhe_line.line_no}."' 

-

1009 f' Cannot migrate dh-exec renames that is not exactly "SOURCE => TARGET" for d/manpages files.' 

-

1010 ) 

-

1011 elif len(dhe_line.tokens) != 3: 

-

1012 _error( 

-

1013 f"Validation error in {manpages_file.path} on line {dhe_line.line_no}. Cannot migrate dh-exec" 

-

1014 ' renames that is not exactly "SOURCE => TARGET" or "=> TARGET".' 

-

1015 ) 

-

1016 else: 

-

1017 install_rule = AbstractMutableYAMLInstallRule.install_doc_as( 

-

1018 _normalize_path(dhe_line.tokens[0], with_prefix=False), 

-

1019 _normalize_path(dhe_line.tokens[2], with_prefix=False), 

-

1020 dctrl_bin.name if not is_single_binary else None, 

-

1021 when=dhe_line.conditional(), 

-

1022 ) 

-

1023 install_as_rules.append(install_rule) 

-

1024 continue 

-

1025 

-

1026 sources = [_normalize_path(w, with_prefix=False) for w in dhe_line.tokens] 

-

1027 needs_basename = any( 

-

1028 MAN_GUESS_FROM_BASENAME.search(x) 

-

1029 and not MAN_GUESS_LANG_FROM_PATH.search(x) 

-

1030 for x in sources 

-

1031 ) 

-

1032 if needs_basename or dhe_line.conditional() is not None: 

-

1033 if needs_basename: 1033 ↛ 1037line 1033 didn't jump to line 1037, because the condition on line 1033 was never false

-

1034 warn_about_basename = True 

-

1035 language = "derive-from-basename" 

-

1036 else: 

-

1037 language = None 

-

1038 key = (language, dhe_line.conditional_key()) 

-

1039 ctor = functools.partial( 

-

1040 ManpageDefinition, 

-

1041 language=language, 

-

1042 conditional=dhe_line.conditional(), 

-

1043 ) 

-

1044 manpage_def = _fetch_or_create( 

-

1045 complex_definitions, 

-

1046 key, 

-

1047 ctor, 

-

1048 ) 

-

1049 manpage_def.sources.extend(sources) 

-

1050 else: 

-

1051 vanilla_definitions.extend(sources) 

-

1052 

-

1053 if not install_as_rules and not vanilla_definitions and not complex_definitions: 1053 ↛ 1054line 1053 didn't jump to line 1054, because the condition on line 1053 was never true

-

1054 continue 

-

1055 feature_migration.successful_manifest_changes += 1 

-

1056 installations.create_definition_if_missing() 

-

1057 installations.extend(install_as_rules) 

-

1058 if vanilla_definitions: 1058 ↛ 1070line 1058 didn't jump to line 1070, because the condition on line 1058 was never false

-

1059 man_source = ( 

-

1060 vanilla_definitions 

-

1061 if len(vanilla_definitions) > 1 

-

1062 else vanilla_definitions[0] 

-

1063 ) 

-

1064 install_rule = AbstractMutableYAMLInstallRule.install_man( 

-

1065 man_source, 

-

1066 dctrl_bin.name if not is_single_binary else None, 

-

1067 None, 

-

1068 ) 

-

1069 installations.append(install_rule) 

-

1070 for manpage_def in complex_definitions.values(): 

-

1071 sources = manpage_def.sources 

-

1072 install_rule = AbstractMutableYAMLInstallRule.install_man( 

-

1073 sources if len(sources) > 1 else sources[0], 

-

1074 dctrl_bin.name if not is_single_binary else None, 

-

1075 manpage_def.language, 

-

1076 when=manpage_def.conditional, 

-

1077 ) 

-

1078 installations.append(install_rule) 

-

1079 

-

1080 if warn_about_basename: 

-

1081 feature_migration.warn( 

-

1082 'Detected man pages that might rely on "derive-from-basename" logic. Please double check' 

-

1083 " that the generated `install-man` rules are correct" 

-

1084 ) 

-

1085 

-

1086 

-

1087def migrate_not_installed_file( 

-

1088 debian_dir: VirtualPath, 

-

1089 manifest: HighLevelManifest, 

-

1090 acceptable_migration_issues: AcceptableMigrationIssues, 

-

1091 feature_migration: FeatureMigration, 

-

1092 _migration_target: str, 

-

1093) -> None: 

-

1094 feature_migration.tagline = "dh_missing's not-installed config file" 

-

1095 mutable_manifest = assume_not_none(manifest.mutable_manifest) 

-

1096 installations = mutable_manifest.installations(create_if_absent=False) 

-

1097 main_binary = [p for p in manifest.all_packages if p.is_main_package][0] 

-

1098 

-

1099 missing_file, content = _dh_config_file( 

-

1100 debian_dir, 

-

1101 main_binary, 

-

1102 "not-installed", 

-

1103 "dh_missing", 

-

1104 acceptable_migration_issues, 

-

1105 feature_migration, 

-

1106 manifest, 

-

1107 support_executable_files=False, 

-

1108 pkgfile_lookup=False, 

-

1109 ) 

-

1110 discard_rules: List[str] = [] 

-

1111 if missing_file: 

-

1112 assert content is not None 

-

1113 for dhe_line in content: 

-

1114 discard_rules.extend( 

-

1115 _normalize_path(w, with_prefix=False) for w in dhe_line.tokens 

-

1116 ) 

-

1117 

-

1118 if discard_rules: 

-

1119 feature_migration.successful_manifest_changes += 1 

-

1120 install_rule = AbstractMutableYAMLInstallRule.discard( 

-

1121 discard_rules if len(discard_rules) > 1 else discard_rules[0], 

-

1122 ) 

-

1123 installations.create_definition_if_missing() 

-

1124 installations.append(install_rule) 

-

1125 

-

1126 

-

1127def detect_pam_files( 

-

1128 debian_dir: VirtualPath, 

-

1129 manifest: HighLevelManifest, 

-

1130 _acceptable_migration_issues: AcceptableMigrationIssues, 

-

1131 feature_migration: FeatureMigration, 

-

1132 _migration_target: str, 

-

1133) -> None: 

-

1134 feature_migration.tagline = "detect dh_installpam files (min dh compat)" 

-

1135 for dctrl_bin in manifest.all_packages: 

-

1136 dh_config_file = dhe_pkgfile(debian_dir, dctrl_bin, "pam") 

-

1137 if dh_config_file is not None: 

-

1138 feature_migration.assumed_compat = 14 

-

1139 break 

-

1140 

-

1141 

-

1142def migrate_tmpfile( 

-

1143 debian_dir: VirtualPath, 

-

1144 manifest: HighLevelManifest, 

-

1145 _acceptable_migration_issues: AcceptableMigrationIssues, 

-

1146 feature_migration: FeatureMigration, 

-

1147 _migration_target: str, 

-

1148) -> None: 

-

1149 feature_migration.tagline = "dh_installtmpfiles config files" 

-

1150 for dctrl_bin in manifest.all_packages: 

-

1151 dh_config_file = dhe_pkgfile(debian_dir, dctrl_bin, "tmpfile") 

-

1152 if dh_config_file is not None: 

-

1153 target = ( 

-

1154 dh_config_file.name.replace(".tmpfile", ".tmpfiles") 

-

1155 if "." in dh_config_file.name 

-

1156 else "tmpfiles" 

-

1157 ) 

-

1158 _rename_file_if_exists( 

-

1159 debian_dir, 

-

1160 dh_config_file.name, 

-

1161 target, 

-

1162 feature_migration, 

-

1163 ) 

-

1164 

-

1165 

-

1166def migrate_lintian_overrides_files( 

-

1167 debian_dir: VirtualPath, 

-

1168 manifest: HighLevelManifest, 

-

1169 acceptable_migration_issues: AcceptableMigrationIssues, 

-

1170 feature_migration: FeatureMigration, 

-

1171 _migration_target: str, 

-

1172) -> None: 

-

1173 feature_migration.tagline = "dh_lintian config files" 

-

1174 for dctrl_bin in manifest.all_packages: 

-

1175 # We do not support executable lintian-overrides and `_dh_config_file` handles all of that. 

-

1176 # Therefore, the return value is irrelevant to us. 

-

1177 _dh_config_file( 

-

1178 debian_dir, 

-

1179 dctrl_bin, 

-

1180 "lintian-overrides", 

-

1181 "dh_lintian", 

-

1182 acceptable_migration_issues, 

-

1183 feature_migration, 

-

1184 manifest, 

-

1185 support_executable_files=False, 

-

1186 remove_on_migration=False, 

-

1187 ) 

-

1188 

-

1189 

-

1190def migrate_links_files( 

-

1191 debian_dir: VirtualPath, 

-

1192 manifest: HighLevelManifest, 

-

1193 acceptable_migration_issues: AcceptableMigrationIssues, 

-

1194 feature_migration: FeatureMigration, 

-

1195 _migration_target: str, 

-

1196) -> None: 

-

1197 feature_migration.tagline = "dh_link files" 

-

1198 mutable_manifest = assume_not_none(manifest.mutable_manifest) 

-

1199 for dctrl_bin in manifest.all_packages: 

-

1200 links_file, content = _dh_config_file( 

-

1201 debian_dir, 

-

1202 dctrl_bin, 

-

1203 "links", 

-

1204 "dh_link", 

-

1205 acceptable_migration_issues, 

-

1206 feature_migration, 

-

1207 manifest, 

-

1208 support_executable_files=True, 

-

1209 ) 

-

1210 

-

1211 if links_file is None: 

-

1212 continue 

-

1213 assert content is not None 

-

1214 

-

1215 package_definition = mutable_manifest.package(dctrl_bin.name) 

-

1216 defined_symlink = { 

-

1217 symlink.symlink_path: symlink.symlink_target 

-

1218 for symlink in package_definition.symlinks() 

-

1219 } 

-

1220 

-

1221 seen_symlinks: Set[str] = set() 

-

1222 

-

1223 for dhe_line in content: 

-

1224 if len(dhe_line.tokens) != 2: 1224 ↛ 1225line 1224 didn't jump to line 1225, because the condition on line 1224 was never true

-

1225 raise UnsupportedFeature( 

-

1226 f"The dh_link file {links_file.fs_path} did not have exactly two paths on line" 

-

1227 f' {dhe_line.line_no} (line: "{dhe_line.original_line}"' 

-

1228 ) 

-

1229 target, source = dhe_line.tokens 

-

1230 if source in seen_symlinks: 1230 ↛ 1232line 1230 didn't jump to line 1232, because the condition on line 1230 was never true

-

1231 # According to #934499, this has happened in the wild already 

-

1232 raise ConflictingChange( 

-

1233 f"The {links_file.fs_path} file defines the link path {source} twice! Please ensure" 

-

1234 " that it is defined at most once in that file" 

-

1235 ) 

-

1236 seen_symlinks.add(source) 

-

1237 # Symlinks in .links are always considered absolute, but you were not required to have a leading slash. 

-

1238 # However, in the debputy manifest, you can have relative links, so we should ensure it is explicitly 

-

1239 # absolute. 

-

1240 if not target.startswith("/"): 1240 ↛ 1242line 1240 didn't jump to line 1242, because the condition on line 1240 was never false

-

1241 target = "/" + target 

-

1242 existing_target = defined_symlink.get(source) 

-

1243 if existing_target is not None: 1243 ↛ 1244line 1243 didn't jump to line 1244, because the condition on line 1243 was never true

-

1244 if existing_target != target: 

-

1245 raise ConflictingChange( 

-

1246 f'The symlink "{source}" points to "{target}" in {links_file}, but there is' 

-

1247 f' another symlink with same path pointing to "{existing_target}" defined' 

-

1248 " already (in the existing manifest or an migration e.g., inside" 

-

1249 f" {links_file.fs_path})" 

-

1250 ) 

-

1251 feature_migration.already_present += 1 

-

1252 continue 

-

1253 condition = dhe_line.conditional() 

-

1254 package_definition.add_symlink( 

-

1255 MutableYAMLSymlink.new_symlink( 

-

1256 source, 

-

1257 target, 

-

1258 condition, 

-

1259 ) 

-

1260 ) 

-

1261 feature_migration.successful_manifest_changes += 1 

-

1262 

-

1263 

-

1264def migrate_misspelled_readme_debian_files( 

-

1265 debian_dir: VirtualPath, 

-

1266 manifest: HighLevelManifest, 

-

1267 acceptable_migration_issues: AcceptableMigrationIssues, 

-

1268 feature_migration: FeatureMigration, 

-

1269 _migration_target: str, 

-

1270) -> None: 

-

1271 feature_migration.tagline = "misspelled README.Debian files" 

-

1272 for dctrl_bin in manifest.all_packages: 

-

1273 readme, _ = _dh_config_file( 

-

1274 debian_dir, 

-

1275 dctrl_bin, 

-

1276 "README.debian", 

-

1277 "dh_installdocs", 

-

1278 acceptable_migration_issues, 

-

1279 feature_migration, 

-

1280 manifest, 

-

1281 support_executable_files=False, 

-

1282 remove_on_migration=False, 

-

1283 ) 

-

1284 if readme is None: 

-

1285 continue 

-

1286 new_name = readme.name.replace("README.debian", "README.Debian") 

-

1287 assert readme.name != new_name 

-

1288 _rename_file_if_exists( 

-

1289 debian_dir, 

-

1290 readme.name, 

-

1291 new_name, 

-

1292 feature_migration, 

-

1293 ) 

-

1294 

-

1295 

-

1296def migrate_doc_base_files( 

-

1297 debian_dir: VirtualPath, 

-

1298 manifest: HighLevelManifest, 

-

1299 _: AcceptableMigrationIssues, 

-

1300 feature_migration: FeatureMigration, 

-

1301 _migration_target: str, 

-

1302) -> None: 

-

1303 feature_migration.tagline = "doc-base files" 

-

1304 # ignore the dh_make ".EX" file if one should still be present. The dh_installdocs tool ignores it too. 

-

1305 possible_effected_doc_base_files = [ 

-

1306 f 

-

1307 for f in debian_dir.iterdir 

-

1308 if ( 

-

1309 (".doc-base." in f.name or f.name.startswith("doc-base.")) 

-

1310 and not f.name.endswith("doc-base.EX") 

-

1311 ) 

-

1312 ] 

-

1313 known_packages = {d.name: d for d in manifest.all_packages} 

-

1314 main_package = [d for d in manifest.all_packages if d.is_main_package][0] 

-

1315 for doc_base_file in possible_effected_doc_base_files: 

-

1316 parts = doc_base_file.name.split(".") 

-

1317 owning_package = known_packages.get(parts[0]) 

-

1318 if owning_package is None: 1318 ↛ 1319line 1318 didn't jump to line 1319, because the condition on line 1318 was never true

-

1319 owning_package = main_package 

-

1320 package_part = None 

-

1321 else: 

-

1322 package_part = parts[0] 

-

1323 parts = parts[1:] 

-

1324 

-

1325 if not parts or parts[0] != "doc-base": 1325 ↛ 1327line 1325 didn't jump to line 1327, because the condition on line 1325 was never true

-

1326 # Not a doc-base file after all 

-

1327 continue 

-

1328 

-

1329 if len(parts) > 1: 1329 ↛ 1336line 1329 didn't jump to line 1336, because the condition on line 1329 was never false

-

1330 name_part = ".".join(parts[1:]) 

-

1331 if package_part is None: 1331 ↛ 1333line 1331 didn't jump to line 1333, because the condition on line 1331 was never true

-

1332 # Named files must have a package prefix 

-

1333 package_part = owning_package.name 

-

1334 else: 

-

1335 # No rename needed 

-

1336 continue 

-

1337 

-

1338 new_basename = ".".join(filter(None, (package_part, name_part, "doc-base"))) 

-

1339 _rename_file_if_exists( 

-

1340 debian_dir, 

-

1341 doc_base_file.name, 

-

1342 new_basename, 

-

1343 feature_migration, 

-

1344 ) 

-

1345 

-

1346 

-

1347def migrate_dh_hook_targets( 

-

1348 debian_dir: VirtualPath, 

-

1349 _: HighLevelManifest, 

-

1350 acceptable_migration_issues: AcceptableMigrationIssues, 

-

1351 feature_migration: FeatureMigration, 

-

1352 migration_target: str, 

-

1353) -> None: 

-

1354 feature_migration.tagline = "dh hook targets" 

-

1355 source_root = os.path.dirname(debian_dir.fs_path) 

-

1356 if source_root == "": 

-

1357 source_root = "." 

-

1358 detected_hook_targets = json.loads( 

-

1359 subprocess.check_output( 

-

1360 ["dh_assistant", "detect-hook-targets"], 

-

1361 cwd=source_root, 

-

1362 ).decode("utf-8") 

-

1363 ) 

-

1364 sample_hook_target: Optional[str] = None 

-

1365 replaced_commands = DH_COMMANDS_REPLACED[migration_target] 

-

1366 

-

1367 for hook_target_def in detected_hook_targets["hook-targets"]: 

-

1368 if hook_target_def["is-empty"]: 

-

1369 continue 

-

1370 command = hook_target_def["command"] 

-

1371 if command not in replaced_commands: 

-

1372 continue 

-

1373 hook_target = hook_target_def["target-name"] 

-

1374 advice = MIGRATION_AID_FOR_OVERRIDDEN_COMMANDS.get(command) 

-

1375 if advice is None: 

-

1376 if sample_hook_target is None: 

-

1377 sample_hook_target = hook_target 

-

1378 feature_migration.warn( 

-

1379 f"TODO: MANUAL MIGRATION required for hook target {hook_target}" 

-

1380 ) 

-

1381 else: 

-

1382 feature_migration.warn( 

-

1383 f"TODO: MANUAL MIGRATION required for hook target {hook_target}. Please see {advice}" 

-

1384 f" for migration advice." 

-

1385 ) 

-

1386 if ( 

-

1387 feature_migration.warnings 

-

1388 and "dh-hook-targets" not in acceptable_migration_issues 

-

1389 and sample_hook_target is not None 

-

1390 ): 

-

1391 raise UnsupportedFeature( 

-

1392 f"The debian/rules file contains one or more non empty dh hook targets that will not" 

-

1393 f" be run with the requested debputy dh sequence with no known migration advice. One of these would be" 

-

1394 f" {sample_hook_target}.", 

-

1395 ["dh-hook-targets"], 

-

1396 ) 

-

1397 

-

1398 

-

1399def detect_unsupported_zz_debputy_features( 

-

1400 debian_dir: VirtualPath, 

-

1401 manifest: HighLevelManifest, 

-

1402 acceptable_migration_issues: AcceptableMigrationIssues, 

-

1403 feature_migration: FeatureMigration, 

-

1404 _migration_target: str, 

-

1405) -> None: 

-

1406 feature_migration.tagline = "Known unsupported features" 

-

1407 

-

1408 for unsupported_config in UNSUPPORTED_DH_CONFIGS_AND_TOOLS_FOR_ZZ_DEBPUTY: 

-

1409 _unsupported_debhelper_config_file( 

-

1410 debian_dir, 

-

1411 manifest, 

-

1412 unsupported_config, 

-

1413 acceptable_migration_issues, 

-

1414 feature_migration, 

-

1415 ) 

-

1416 

-

1417 

-

1418def detect_obsolete_substvars( 

-

1419 debian_dir: VirtualPath, 

-

1420 _manifest: HighLevelManifest, 

-

1421 _acceptable_migration_issues: AcceptableMigrationIssues, 

-

1422 feature_migration: FeatureMigration, 

-

1423 _migration_target: str, 

-

1424) -> None: 

-

1425 feature_migration.tagline = ( 

-

1426 "Check for obsolete ${foo:var} variables in debian/control" 

-

1427 ) 

-

1428 ctrl_file = debian_dir.get("control") 

-

1429 if not ctrl_file: 1429 ↛ 1430line 1429 didn't jump to line 1430, because the condition on line 1429 was never true

-

1430 feature_migration.warn( 

-

1431 "Cannot find debian/control. Detection of obsolete substvars could not be performed." 

-

1432 ) 

-

1433 return 

-

1434 with ctrl_file.open() as fd: 

-

1435 ctrl = list(Deb822.iter_paragraphs(fd)) 

-

1436 

-

1437 relationship_fields = dpkg_field_list_pkg_dep() 

-

1438 relationship_fields_lc = frozenset(x.lower() for x in relationship_fields) 

-

1439 

-

1440 for p in ctrl[1:]: 

-

1441 seen_obsolete_relationship_substvars = set() 

-

1442 obsolete_fields = set() 

-

1443 is_essential = p.get("Essential") == "yes" 

-

1444 for df in relationship_fields: 

-

1445 field: Optional[str] = p.get(df) 

-

1446 if field is None: 

-

1447 continue 

-

1448 df_lc = df.lower() 

-

1449 number_of_relations = 0 

-

1450 obsolete_substvars_in_field = set() 

-

1451 for d in (d.strip() for d in field.strip().split(",")): 

-

1452 if not d: 

-

1453 continue 

-

1454 number_of_relations += 1 

-

1455 if not d.startswith("${"): 

-

1456 continue 

-

1457 try: 

-

1458 end_idx = d.index("}") 

-

1459 except ValueError: 

-

1460 continue 

-

1461 substvar_name = d[2:end_idx] 

-

1462 if ":" not in substvar_name: 1462 ↛ 1463line 1462 didn't jump to line 1463, because the condition on line 1462 was never true

-

1463 continue 

-

1464 _, field = substvar_name.rsplit(":", 1) 

-

1465 field_lc = field.lower() 

-

1466 if field_lc not in relationship_fields_lc: 1466 ↛ 1467line 1466 didn't jump to line 1467, because the condition on line 1466 was never true

-

1467 continue 

-

1468 is_obsolete = field_lc == df_lc 

-

1469 if ( 

-

1470 not is_obsolete 

-

1471 and is_essential 

-

1472 and substvar_name.lower() == "shlibs:depends" 

-

1473 and df_lc == "pre-depends" 

-

1474 ): 

-

1475 is_obsolete = True 

-

1476 

-

1477 if is_obsolete: 

-

1478 obsolete_substvars_in_field.add(d) 

-

1479 

-

1480 if number_of_relations == len(obsolete_substvars_in_field): 

-

1481 obsolete_fields.add(df) 

-

1482 else: 

-

1483 seen_obsolete_relationship_substvars.update(obsolete_substvars_in_field) 

-

1484 

-

1485 package = p.get("Package", "(Missing package name!?)") 

-

1486 if obsolete_fields: 

-

1487 fields = ", ".join(obsolete_fields) 

-

1488 feature_migration.warn( 

-

1489 f"The following relationship fields can be removed from {package}: {fields}." 

-

1490 f" (The content in them would be applied automatically.)" 

-

1491 ) 

-

1492 if seen_obsolete_relationship_substvars: 

-

1493 v = ", ".join(sorted(seen_obsolete_relationship_substvars)) 

-

1494 feature_migration.warn( 

-

1495 f"The following relationship substitution variables can be removed from {package}: {v}" 

-

1496 ) 

-

1497 

-

1498 

-

1499def read_dh_addon_sequences( 

-

1500 debian_dir: VirtualPath, 

-

1501) -> Optional[Tuple[Set[str], Set[str]]]: 

-

1502 ctrl_file = debian_dir.get("control") 

-

1503 if ctrl_file: 

-

1504 dr_sequences: Set[str] = set() 

-

1505 bd_sequences = set() 

-

1506 

-

1507 drules = debian_dir.get("rules") 

-

1508 if drules and drules.is_file: 1508 ↛ 1509line 1508 didn't jump to line 1509, because the condition on line 1508 was never true

-

1509 with drules.open() as fd: 

-

1510 parse_drules_for_addons(fd, dr_sequences) 

-

1511 

-

1512 with ctrl_file.open() as fd: 

-

1513 ctrl = list(Deb822.iter_paragraphs(fd)) 

-

1514 source_paragraph = ctrl[0] if ctrl else {} 

-

1515 

-

1516 extract_dh_addons_from_control(source_paragraph, bd_sequences) 

-

1517 return bd_sequences, dr_sequences 

-

1518 return None 

-

1519 

-

1520 

-

1521def detect_dh_addons_zz_debputy_rrr( 

-

1522 debian_dir: VirtualPath, 

-

1523 _manifest: HighLevelManifest, 

-

1524 _acceptable_migration_issues: AcceptableMigrationIssues, 

-

1525 feature_migration: FeatureMigration, 

-

1526 _migration_target: str, 

-

1527) -> None: 

-

1528 feature_migration.tagline = "Check for dh-sequence-addons" 

-

1529 r = read_dh_addon_sequences(debian_dir) 

-

1530 if r is None: 

-

1531 feature_migration.warn( 

-

1532 "Cannot find debian/control. Detection of unsupported/missing dh-sequence addon" 

-

1533 " could not be performed. Please ensure the package will Build-Depend on dh-sequence-zz-debputy-rrr." 

-

1534 ) 

-

1535 return 

-

1536 

-

1537 bd_sequences, dr_sequences = r 

-

1538 

-

1539 remaining_sequences = bd_sequences | dr_sequences 

-

1540 saw_dh_debputy = "zz-debputy-rrr" in remaining_sequences 

-

1541 

-

1542 if not saw_dh_debputy: 

-

1543 feature_migration.warn("Missing Build-Depends on dh-sequence-zz-debputy-rrr") 

-

1544 

-

1545 

-

1546def detect_dh_addons( 

-

1547 debian_dir: VirtualPath, 

-

1548 _manifest: HighLevelManifest, 

-

1549 acceptable_migration_issues: AcceptableMigrationIssues, 

-

1550 feature_migration: FeatureMigration, 

-

1551 _migration_target: str, 

-

1552) -> None: 

-

1553 feature_migration.tagline = "Check for dh-sequence-addons" 

-

1554 r = read_dh_addon_sequences(debian_dir) 

-

1555 if r is None: 

-

1556 feature_migration.warn( 

-

1557 "Cannot find debian/control. Detection of unsupported/missing dh-sequence addon" 

-

1558 " could not be performed. Please ensure the package will Build-Depend on dh-sequence-zz-debputy" 

-

1559 " and not rely on any other debhelper sequence addons except those debputy explicitly supports." 

-

1560 ) 

-

1561 return 

-

1562 

-

1563 bd_sequences, dr_sequences = r 

-

1564 

-

1565 remaining_sequences = bd_sequences | dr_sequences 

-

1566 saw_dh_debputy = ( 

-

1567 "debputy" in remaining_sequences or "zz-debputy" in remaining_sequences 

-

1568 ) 

-

1569 saw_zz_debputy = "zz-debputy" in remaining_sequences 

-

1570 must_use_zz_debputy = False 

-

1571 remaining_sequences -= SUPPORTED_DH_ADDONS 

-

1572 for sequence in remaining_sequences & DH_ADDONS_TO_PLUGINS.keys(): 

-

1573 migration = DH_ADDONS_TO_PLUGINS[sequence] 

-

1574 feature_migration.require_plugin(migration.debputy_plugin) 

-

1575 if migration.remove_dh_sequence: 1575 ↛ 1576line 1575 didn't jump to line 1576, because the condition on line 1575 was never true

-

1576 if migration.must_use_zz_debputy: 

-

1577 must_use_zz_debputy = True 

-

1578 if sequence in bd_sequences: 

-

1579 feature_migration.warn( 

-

1580 f"TODO: MANUAL MIGRATION - Remove build-dependency on dh-sequence-{sequence}" 

-

1581 f" (replaced by debputy-plugin-{migration.debputy_plugin})" 

-

1582 ) 

-

1583 else: 

-

1584 feature_migration.warn( 

-

1585 f"TODO: MANUAL MIGRATION - Remove --with {sequence} from dh in d/rules" 

-

1586 f" (replaced by debputy-plugin-{migration.debputy_plugin})" 

-

1587 ) 

-

1588 

-

1589 remaining_sequences -= DH_ADDONS_TO_PLUGINS.keys() 

-

1590 

-

1591 alt_key = "unsupported-dh-sequences" 

-

1592 for sequence in remaining_sequences & DH_ADDONS_TO_REMOVE: 1592 ↛ 1593line 1592 didn't jump to line 1593, because the loop on line 1592 never started

-

1593 if sequence in bd_sequences: 

-

1594 feature_migration.warn( 

-

1595 f"TODO: MANUAL MIGRATION - Remove build dependency on dh-sequence-{sequence}" 

-

1596 ) 

-

1597 else: 

-

1598 feature_migration.warn( 

-

1599 f"TODO: MANUAL MIGRATION - Remove --with {sequence} from dh in d/rules" 

-

1600 ) 

-

1601 

-

1602 remaining_sequences -= DH_ADDONS_TO_REMOVE 

-

1603 

-

1604 for sequence in remaining_sequences: 

-

1605 key = f"unsupported-dh-sequence-{sequence}" 

-

1606 msg = f'The dh addon "{sequence}" is not known to work with dh-debputy and might malfunction' 

-

1607 if ( 

-

1608 key not in acceptable_migration_issues 

-

1609 and alt_key not in acceptable_migration_issues 

-

1610 ): 

-

1611 raise UnsupportedFeature(msg, [key, alt_key]) 

-

1612 feature_migration.warn(msg) 

-

1613 

-

1614 if not saw_dh_debputy: 

-

1615 feature_migration.warn("Missing Build-Depends on dh-sequence-zz-debputy") 

-

1616 elif must_use_zz_debputy and not saw_zz_debputy: 1616 ↛ 1617line 1616 didn't jump to line 1617, because the condition on line 1616 was never true

-

1617 feature_migration.warn( 

-

1618 "Please use the zz-debputy sequence rather than the debputy (needed due to dh add-on load order)" 

-

1619 ) 

-

1620 

-

1621 

-

1622def _rename_file_if_exists( 

-

1623 debian_dir: VirtualPath, 

-

1624 source: str, 

-

1625 dest: str, 

-

1626 feature_migration: FeatureMigration, 

-

1627) -> None: 

-

1628 source_path = debian_dir.get(source) 

-

1629 dest_path = debian_dir.get(dest) 

-

1630 spath = ( 

-

1631 source_path.path 

-

1632 if source_path is not None 

-

1633 else os.path.join(debian_dir.path, source) 

-

1634 ) 

-

1635 dpath = ( 

-

1636 dest_path.path if dest_path is not None else os.path.join(debian_dir.path, dest) 

-

1637 ) 

-

1638 if source_path is not None and source_path.is_file: 

-

1639 if dest_path is not None: 

-

1640 if not dest_path.is_file: 

-

1641 feature_migration.warnings.append( 

-

1642 f'TODO: MANUAL MIGRATION - there is a "{spath}" (file) and "{dpath}" (not a file).' 

-

1643 f' The migration wanted to replace "{spath}" with "{dpath}", but since "{dpath}" is not' 

-

1644 " a file, this step is left as a manual migration." 

-

1645 ) 

-

1646 return 

-

1647 if ( 

-

1648 subprocess.call(["cmp", "-s", source_path.fs_path, dest_path.fs_path]) 

-

1649 != 0 

-

1650 ): 

-

1651 feature_migration.warnings.append( 

-

1652 f'TODO: MANUAL MIGRATION - there is a "{source_path.path}" and "{dest_path.path}"' 

-

1653 f" file. Normally these files are for the same package and there would only be one of" 

-

1654 f" them. In this case, they both exist but their content differs. Be advised that" 

-

1655 f' debputy tool will use the "{dest_path.path}".' 

-

1656 ) 

-

1657 else: 

-

1658 feature_migration.remove_on_success(dest_path.fs_path) 

-

1659 else: 

-

1660 feature_migration.rename_on_success( 

-

1661 source_path.fs_path, 

-

1662 os.path.join(debian_dir.fs_path, dest), 

-

1663 ) 

-

1664 elif source_path is not None: 1664 ↛ exitline 1664 didn't return from function '_rename_file_if_exists', because the condition on line 1664 was never false

-

1665 feature_migration.warnings.append( 

-

1666 f'TODO: MANUAL MIGRATION - The migration would normally have renamed "{spath}" to "{dpath}".' 

-

1667 f' However, the migration assumed "{spath}" would be a file and it is not. Therefore, this step' 

-

1668 " as a manual migration." 

-

1669 ) 

-

1670 

-

1671 

-

1672def _find_dh_config_file_for_any_pkg( 

-

1673 debian_dir: VirtualPath, 

-

1674 manifest: HighLevelManifest, 

-

1675 unsupported_config: UnsupportedDHConfig, 

-

1676) -> Iterable[VirtualPath]: 

-

1677 for dctrl_bin in manifest.all_packages: 

-

1678 dh_config_file = dhe_pkgfile( 

-

1679 debian_dir, 

-

1680 dctrl_bin, 

-

1681 unsupported_config.dh_config_basename, 

-

1682 bug_950723_prefix_matching=unsupported_config.bug_950723_prefix_matching, 

-

1683 ) 

-

1684 if dh_config_file is not None: 

-

1685 yield dh_config_file 

-

1686 

-

1687 

-

1688def _unsupported_debhelper_config_file( 

-

1689 debian_dir: VirtualPath, 

-

1690 manifest: HighLevelManifest, 

-

1691 unsupported_config: UnsupportedDHConfig, 

-

1692 acceptable_migration_issues: AcceptableMigrationIssues, 

-

1693 feature_migration: FeatureMigration, 

-

1694) -> None: 

-

1695 dh_config_files = list( 

-

1696 _find_dh_config_file_for_any_pkg(debian_dir, manifest, unsupported_config) 

-

1697 ) 

-

1698 if not dh_config_files: 

-

1699 return 

-

1700 dh_tool = unsupported_config.dh_tool 

-

1701 basename = unsupported_config.dh_config_basename 

-

1702 file_stem = ( 

-

1703 f"@{basename}" if unsupported_config.bug_950723_prefix_matching else basename 

-

1704 ) 

-

1705 dh_config_file = dh_config_files[0] 

-

1706 if unsupported_config.is_missing_migration: 

-

1707 feature_migration.warn( 

-

1708 f'Missing migration support for the "{dh_config_file.path}" debhelper config file' 

-

1709 f" (used by {dh_tool}). Manual migration may be feasible depending on the exact features" 

-

1710 " required." 

-

1711 ) 

-

1712 return 

-

1713 primary_key = f"unsupported-dh-config-file-{file_stem}" 

-

1714 secondary_key = "any-unsupported-dh-config-file" 

-

1715 if ( 

-

1716 primary_key not in acceptable_migration_issues 

-

1717 and secondary_key not in acceptable_migration_issues 

-

1718 ): 

-

1719 msg = ( 

-

1720 f'The "{dh_config_file.path}" debhelper config file (used by {dh_tool} is currently not' 

-

1721 " supported by debputy." 

-

1722 ) 

-

1723 raise UnsupportedFeature( 

-

1724 msg, 

-

1725 [primary_key, secondary_key], 

-

1726 ) 

-

1727 for dh_config_file in dh_config_files: 

-

1728 feature_migration.warn( 

-

1729 f'TODO: MANUAL MIGRATION - Use of unsupported "{dh_config_file.path}" file (used by {dh_tool})' 

-

1730 ) 

-
- - - diff --git a/coverage-report/d_23db3c975895bd86_migrators_py.html b/coverage-report/d_23db3c975895bd86_migrators_py.html deleted file mode 100644 index c9693b9..0000000 --- a/coverage-report/d_23db3c975895bd86_migrators_py.html +++ /dev/null @@ -1,166 +0,0 @@ - - - - - Coverage for src/debputy/dh_migration/migrators.py: 100% - - - - - -
-
-

- Coverage for src/debputy/dh_migration/migrators.py: - 100% -

- -

- 7 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from typing import Callable, List, Mapping 

-

2 

-

3from debputy.dh_migration.migrators_impl import ( 

-

4 migrate_links_files, 

-

5 migrate_maintscript, 

-

6 migrate_tmpfile, 

-

7 migrate_install_file, 

-

8 migrate_installdocs_file, 

-

9 migrate_installexamples_file, 

-

10 migrate_dh_hook_targets, 

-

11 migrate_misspelled_readme_debian_files, 

-

12 migrate_doc_base_files, 

-

13 migrate_lintian_overrides_files, 

-

14 detect_unsupported_zz_debputy_features, 

-

15 detect_pam_files, 

-

16 detect_dh_addons, 

-

17 migrate_not_installed_file, 

-

18 migrate_installman_file, 

-

19 migrate_bash_completion, 

-

20 migrate_installinfo_file, 

-

21 migrate_dh_installsystemd_files, 

-

22 detect_obsolete_substvars, 

-

23 detect_dh_addons_zz_debputy_rrr, 

-

24 MIGRATION_TARGET_DH_DEBPUTY, 

-

25 MIGRATION_TARGET_DH_DEBPUTY_RRR, 

-

26) 

-

27from debputy.dh_migration.models import AcceptableMigrationIssues, FeatureMigration 

-

28from debputy.highlevel_manifest import HighLevelManifest 

-

29from debputy.plugin.api import VirtualPath 

-

30 

-

31Migrator = Callable[ 

-

32 [VirtualPath, HighLevelManifest, AcceptableMigrationIssues, FeatureMigration, str], 

-

33 None, 

-

34] 

-

35 

-

36 

-

37MIGRATORS: Mapping[str, List[Migrator]] = { 

-

38 MIGRATION_TARGET_DH_DEBPUTY_RRR: [ 

-

39 migrate_dh_hook_targets, 

-

40 migrate_misspelled_readme_debian_files, 

-

41 detect_dh_addons_zz_debputy_rrr, 

-

42 detect_obsolete_substvars, 

-

43 ], 

-

44 MIGRATION_TARGET_DH_DEBPUTY: [ 

-

45 detect_unsupported_zz_debputy_features, 

-

46 detect_pam_files, 

-

47 migrate_dh_hook_targets, 

-

48 migrate_dh_installsystemd_files, 

-

49 migrate_install_file, 

-

50 migrate_installdocs_file, 

-

51 migrate_installexamples_file, 

-

52 migrate_installman_file, 

-

53 migrate_installinfo_file, 

-

54 migrate_misspelled_readme_debian_files, 

-

55 migrate_doc_base_files, 

-

56 migrate_links_files, 

-

57 migrate_maintscript, 

-

58 migrate_tmpfile, 

-

59 migrate_lintian_overrides_files, 

-

60 migrate_bash_completion, 

-

61 detect_dh_addons, 

-

62 detect_obsolete_substvars, 

-

63 # not-installed should go last, so its rules appear after other installations 

-

64 # It is not perfect, but it is a start. 

-

65 migrate_not_installed_file, 

-

66 ], 

-

67} 

-
- - - diff --git a/coverage-report/d_23db3c975895bd86_models_py.html b/coverage-report/d_23db3c975895bd86_models_py.html deleted file mode 100644 index e78a17b..0000000 --- a/coverage-report/d_23db3c975895bd86_models_py.html +++ /dev/null @@ -1,272 +0,0 @@ - - - - - Coverage for src/debputy/dh_migration/models.py: 84% - - - - - -
-
-

- Coverage for src/debputy/dh_migration/models.py: - 84% -

- -

- 86 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2import re 

-

3from typing import Sequence, Optional, FrozenSet, Tuple, List, cast 

-

4 

-

5from debputy.architecture_support import DpkgArchitectureBuildProcessValuesTable 

-

6from debputy.highlevel_manifest import MutableYAMLManifest 

-

7from debputy.substitution import Substitution 

-

8 

-

9_DH_VAR_RE = re.compile(r"([$][{])([A-Za-z0-9][-_:0-9A-Za-z]*)([}])") 

-

10 

-

11 

-

12class AcceptableMigrationIssues: 

-

13 def __init__(self, values: FrozenSet[str]): 

-

14 self._values = values 

-

15 

-

16 def __contains__(self, item: str) -> bool: 

-

17 return item in self._values or "ALL" in self._values 

-

18 

-

19 

-

20class UnsupportedFeature(RuntimeError): 

-

21 @property 

-

22 def message(self) -> str: 

-

23 return cast("str", self.args[0]) 

-

24 

-

25 @property 

-

26 def issue_keys(self) -> Optional[Sequence[str]]: 

-

27 if len(self.args) < 2: 

-

28 return None 

-

29 return cast("Sequence[str]", self.args[1]) 

-

30 

-

31 

-

32class ConflictingChange(RuntimeError): 

-

33 @property 

-

34 def message(self) -> str: 

-

35 return cast("str", self.args[0]) 

-

36 

-

37 

-

38@dataclasses.dataclass(slots=True) 

-

39class FeatureMigration: 

-

40 tagline: str 

-

41 successful_manifest_changes: int = 0 

-

42 already_present: int = 0 

-

43 warnings: List[str] = dataclasses.field(default_factory=list) 

-

44 remove_paths_on_success: List[str] = dataclasses.field(default_factory=list) 

-

45 rename_paths_on_success: List[Tuple[str, str]] = dataclasses.field( 

-

46 default_factory=list 

-

47 ) 

-

48 assumed_compat: Optional[int] = None 

-

49 required_plugins: List[str] = dataclasses.field(default_factory=list) 

-

50 

-

51 def warn(self, msg: str) -> None: 

-

52 self.warnings.append(msg) 

-

53 

-

54 def rename_on_success(self, source: str, dest: str) -> None: 

-

55 self.rename_paths_on_success.append((source, dest)) 

-

56 

-

57 def remove_on_success(self, path: str) -> None: 

-

58 self.remove_paths_on_success.append(path) 

-

59 

-

60 def require_plugin(self, debputy_plugin: str) -> None: 

-

61 self.required_plugins.append(debputy_plugin) 

-

62 

-

63 @property 

-

64 def anything_to_do(self) -> bool: 

-

65 return bool(self.total_changes_involved) 

-

66 

-

67 @property 

-

68 def performed_changes(self) -> int: 

-

69 return ( 

-

70 self.successful_manifest_changes 

-

71 + len(self.remove_paths_on_success) 

-

72 + len(self.rename_paths_on_success) 

-

73 ) 

-

74 

-

75 @property 

-

76 def total_changes_involved(self) -> int: 

-

77 return ( 

-

78 self.successful_manifest_changes 

-

79 + len(self.warnings) 

-

80 + len(self.remove_paths_on_success) 

-

81 + len(self.rename_paths_on_success) 

-

82 ) 

-

83 

-

84 

-

85class DHMigrationSubstitution(Substitution): 

-

86 def __init__( 

-

87 self, 

-

88 dpkg_arch_table: DpkgArchitectureBuildProcessValuesTable, 

-

89 acceptable_migration_issues: AcceptableMigrationIssues, 

-

90 feature_migration: FeatureMigration, 

-

91 mutable_manifest: MutableYAMLManifest, 

-

92 ) -> None: 

-

93 self._acceptable_migration_issues = acceptable_migration_issues 

-

94 self._dpkg_arch_table = dpkg_arch_table 

-

95 self._feature_migration = feature_migration 

-

96 self._mutable_manifest = mutable_manifest 

-

97 # TODO: load 1:1 variables from the real subst instance (less stuff to keep in sync) 

-

98 one2one = [ 

-

99 "DEB_SOURCE", 

-

100 "DEB_VERSION", 

-

101 "DEB_VERSION_EPOCH_UPSTREAM", 

-

102 "DEB_VERSION_UPSTREAM_REVISION", 

-

103 "DEB_VERSION_UPSTREAM", 

-

104 "SOURCE_DATE_EPOCH", 

-

105 ] 

-

106 self._builtin_substs = { 

-

107 "Tab": "{{token:TAB}}", 

-

108 "Space": " ", 

-

109 "Newline": "{{token:NEWLINE}}", 

-

110 "Dollar": "${}", 

-

111 } 

-

112 self._builtin_substs.update((x, "{{" + x + "}}") for x in one2one) 

-

113 

-

114 def _replacement(self, key: str, definition_source: str) -> str: 

-

115 if key in self._builtin_substs: 115 ↛ 116line 115 didn't jump to line 116, because the condition on line 115 was never true

-

116 return self._builtin_substs[key] 

-

117 if key in self._dpkg_arch_table: 

-

118 return "{{" + key + "}}" 

-

119 if key.startswith("env:"): 119 ↛ 120line 119 didn't jump to line 120, because the condition on line 119 was never true

-

120 if "dh-subst-env" not in self._acceptable_migration_issues: 

-

121 raise UnsupportedFeature( 

-

122 "Use of environment based substitution variable {{" 

-

123 + key 

-

124 + "}} is not" 

-

125 f" supported in debputy. The variable was spotted at {definition_source}", 

-

126 ["dh-subst-env"], 

-

127 ) 

-

128 elif "dh-subst-unknown-variable" not in self._acceptable_migration_issues: 128 ↛ 129line 128 didn't jump to line 129, because the condition on line 128 was never true

-

129 raise UnsupportedFeature( 

-

130 "Unknown substitution variable {{" 

-

131 + key 

-

132 + "}}, which does not have a known" 

-

133 f" counter part in debputy. The variable was spotted at {definition_source}", 

-

134 ["dh-subst-unknown-variable"], 

-

135 ) 

-

136 manifest_definitions = self._mutable_manifest.manifest_definitions( 

-

137 create_if_absent=False 

-

138 ) 

-

139 manifest_variables = manifest_definitions.manifest_variables( 

-

140 create_if_absent=False 

-

141 ) 

-

142 if key not in manifest_variables.variables: 142 ↛ 153line 142 didn't jump to line 153, because the condition on line 142 was never false

-

143 manifest_definitions.create_definition_if_missing() 

-

144 manifest_variables[key] = "TODO: Provide variable value for " + key 

-

145 self._feature_migration.warn( 

-

146 "TODO: MANUAL MIGRATION of unresolved substitution variable {{" 

-

147 + key 

-

148 + "}} from" 

-

149 + f" {definition_source}" 

-

150 ) 

-

151 self._feature_migration.successful_manifest_changes += 1 

-

152 

-

153 return "{{" + key + "}}" 

-

154 

-

155 def substitute( 

-

156 self, 

-

157 value: str, 

-

158 definition_source: str, 

-

159 /, 

-

160 escape_glob_characters: bool = False, 

-

161 ) -> str: 

-

162 if "${" not in value: 

-

163 return value 

-

164 replacement = self._apply_substitution( 

-

165 _DH_VAR_RE, 

-

166 value, 

-

167 definition_source, 

-

168 escape_glob_characters=escape_glob_characters, 

-

169 ) 

-

170 return replacement.replace("${}", "$") 

-

171 

-

172 def with_extra_substitutions(self, **extra_substitutions: str) -> "Substitution": 

-

173 return self 

-
- - - diff --git a/coverage-report/d_267b6307937f1878___init___py.html b/coverage-report/d_267b6307937f1878___init___py.html deleted file mode 100644 index 25e8dea..0000000 --- a/coverage-report/d_267b6307937f1878___init___py.html +++ /dev/null @@ -1,115 +0,0 @@ - - - - - Coverage for src/debputy/__init__.py: 78% - - - - - -
-
-

- Coverage for src/debputy/__init__.py: - 78% -

- -

- 7 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import pathlib 

-

2 

-

3from .version import IS_RELEASE_BUILD, __version__ 

-

4 

-

5# Replaced during install; must be a single line 

-

6# fmt: off 

-

7DEBPUTY_ROOT_DIR = pathlib.Path(__file__).parent.parent.parent 

-

8DEBPUTY_PLUGIN_ROOT_DIR = pathlib.Path(__file__).parent.parent.parent 

-

9# fmt: on 

-

10 

-

11if IS_RELEASE_BUILD: 11 ↛ 12line 11 didn't jump to line 12

-

12 DEBPUTY_DOC_ROOT_DIR = ( 

-

13 f"https://salsa.debian.org/debian/debputy/-/blob/debian/{__version__}" 

-

14 ) 

-

15else: 

-

16 DEBPUTY_DOC_ROOT_DIR = "https://salsa.debian.org/debian/debputy/-/blob/main" 

-
- - - diff --git a/coverage-report/d_267b6307937f1878__deb_options_profiles_py.html b/coverage-report/d_267b6307937f1878__deb_options_profiles_py.html deleted file mode 100644 index de2dc63..0000000 --- a/coverage-report/d_267b6307937f1878__deb_options_profiles_py.html +++ /dev/null @@ -1,190 +0,0 @@ - - - - - Coverage for src/debputy/_deb_options_profiles.py: 93% - - - - - -
-
-

- Coverage for src/debputy/_deb_options_profiles.py: - 93% -

- -

- 27 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import os 

-

2from functools import lru_cache 

-

3 

-

4from typing import FrozenSet, Optional, Mapping, Dict 

-

5 

-

6 

-

7def _parse_deb_build_options(value: str) -> Mapping[str, Optional[str]]: 

-

8 res: Dict[str, Optional[str]] = {} 

-

9 for kvish in value.split(): 

-

10 if "=" in kvish: 

-

11 key, value = kvish.split("=", 1) 

-

12 res[key] = value 

-

13 else: 

-

14 res[kvish] = None 

-

15 return res 

-

16 

-

17 

-

18class DebBuildOptionsAndProfiles: 

-

19 """Accessor to common environment related values 

-

20 

-

21 >>> env = DebBuildOptionsAndProfiles(environ={'DEB_BUILD_PROFILES': 'noudeb nojava'}) 

-

22 >>> 'noudeb' in env.deb_build_profiles 

-

23 True 

-

24 >>> 'nojava' in env.deb_build_profiles 

-

25 True 

-

26 >>> 'nopython' in env.deb_build_profiles 

-

27 False 

-

28 >>> sorted(env.deb_build_profiles) 

-

29 ['nojava', 'noudeb'] 

-

30 """ 

-

31 

-

32 def __init__(self, *, environ: Optional[Mapping[str, str]] = None) -> None: 

-

33 """Provide a view of the options. Though consider using DebBuildOptionsAndProfiles.instance() instead 

-

34 

-

35 :param environ: Alternative to os.environ. Mostly useful for testing purposes 

-

36 """ 

-

37 if environ is None: 37 ↛ 38line 37 didn't jump to line 38, because the condition on line 37 was never true

-

38 environ = os.environ 

-

39 self._deb_build_profiles = frozenset( 

-

40 x for x in environ.get("DEB_BUILD_PROFILES", "").split() 

-

41 ) 

-

42 self._deb_build_options = _parse_deb_build_options( 

-

43 environ.get("DEB_BUILD_OPTIONS", "") 

-

44 ) 

-

45 

-

46 @staticmethod 

-

47 @lru_cache(1) 

-

48 def instance() -> "DebBuildOptionsAndProfiles": 

-

49 return DebBuildOptionsAndProfiles() 

-

50 

-

51 @property 

-

52 def deb_build_profiles(self) -> FrozenSet[str]: 

-

53 """A set-like view of all build profiles active during the build 

-

54 

-

55 >>> env = DebBuildOptionsAndProfiles(environ={'DEB_BUILD_PROFILES': 'noudeb nojava'}) 

-

56 >>> 'noudeb' in env.deb_build_profiles 

-

57 True 

-

58 >>> 'nojava' in env.deb_build_profiles 

-

59 True 

-

60 >>> 'nopython' in env.deb_build_profiles 

-

61 False 

-

62 >>> sorted(env.deb_build_profiles) 

-

63 ['nojava', 'noudeb'] 

-

64 

-

65 """ 

-

66 return self._deb_build_profiles 

-

67 

-

68 @property 

-

69 def deb_build_options(self) -> Mapping[str, Optional[str]]: 

-

70 """A set-like view of all build profiles active during the build 

-

71 

-

72 >>> env = DebBuildOptionsAndProfiles(environ={'DEB_BUILD_OPTIONS': 'nostrip parallel=4'}) 

-

73 >>> 'nostrip' in env.deb_build_options 

-

74 True 

-

75 >>> 'parallel' in env.deb_build_options 

-

76 True 

-

77 >>> 'noautodbgsym' in env.deb_build_options 

-

78 False 

-

79 >>> env.deb_build_options['nostrip'] is None 

-

80 True 

-

81 >>> env.deb_build_options['parallel'] 

-

82 '4' 

-

83 >>> env.deb_build_options['noautodbgsym'] 

-

84 Traceback (most recent call last): 

-

85 ... 

-

86 KeyError: 'noautodbgsym' 

-

87 >>> sorted(env.deb_build_options) 

-

88 ['nostrip', 'parallel'] 

-

89 

-

90 """ 

-

91 return self._deb_build_options 

-
- - - diff --git a/coverage-report/d_267b6307937f1878__manifest_constants_py.html b/coverage-report/d_267b6307937f1878__manifest_constants_py.html deleted file mode 100644 index eeed0d1..0000000 --- a/coverage-report/d_267b6307937f1878__manifest_constants_py.html +++ /dev/null @@ -1,148 +0,0 @@ - - - - - Coverage for src/debputy/_manifest_constants.py: 100% - - - - - -
-
-

- Coverage for src/debputy/_manifest_constants.py: - 100% -

- -

- 37 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from typing import Literal 

-

2 

-

3DEFAULT_MANIFEST_VERSION = "0.1" 

-

4SUPPORTED_MANIFEST_VERSIONS = frozenset(["0.1"]) 

-

5ManifestVersion = Literal["0.1"] 

-

6assert DEFAULT_MANIFEST_VERSION in SUPPORTED_MANIFEST_VERSIONS 

-

7 

-

8MK_MANIFEST_VERSION = "manifest-version" 

-

9MK_PACKAGES = "packages" 

-

10 

-

11MK_INSTALLATIONS = "installations" 

-

12MK_INSTALLATIONS_INSTALL = "install" 

-

13MK_INSTALLATIONS_MULTI_DEST_INSTALL = "multi-dest-install" 

-

14MK_INSTALLATIONS_INSTALL_DOCS = "install-docs" 

-

15MK_INSTALLATIONS_INSTALL_EXAMPLES = "install-examples" 

-

16MK_INSTALLATIONS_INSTALL_MAN = "install-man" 

-

17MK_INSTALLATIONS_DISCARD = "discard" 

-

18 

-

19MK_INSTALLATIONS_INSTALL_SOURCE = "source" 

-

20MK_INSTALLATIONS_INSTALL_SOURCES = "sources" 

-

21MK_INSTALLATIONS_INSTALL_DEST_DIR = "dest-dir" 

-

22MK_INSTALLATIONS_INSTALL_AS = "as" 

-

23MK_INSTALLATIONS_INSTALL_INTO = "into" 

-

24 

-

25MK_INSTALLATIONS_INSTALL_MAN_LANGUAGE = "language" 

-

26 

-

27MK_CONDITION_WHEN = "when" 

-

28MK_CONDITION_ARCH_MATCHES = "arch-matches" 

-

29MK_CONDITION_BUILD_PROFILES_MATCHES = "build-profiles-matches" 

-

30 

-

31MK_TRANSFORMATIONS = "transformations" 

-

32 

-

33MK_TRANSFORMATIONS_CREATE_SYMLINK = "create-symlink" 

-

34MK_TRANSFORMATIONS_CREATE_SYMLINK_LINK_PATH = "path" 

-

35MK_TRANSFORMATIONS_CREATE_SYMLINK_LINK_TARGET = "target" 

-

36 

-

37MK_CONFFILE_MANAGEMENT = "conffile-management" 

-

38MK_CONFFILE_MANAGEMENT_REMOVE = "remove" 

-

39MK_CONFFILE_MANAGEMENT_RENAME = "rename" 

-

40 

-

41MK_CONFFILE_MANAGEMENT_REMOVE_PATH = "path" 

-

42MK_CONFFILE_MANAGEMENT_RENAME_SOURCE = "source" 

-

43MK_CONFFILE_MANAGEMENT_RENAME_TARGET = "target" 

-

44 

-

45MK_CONFFILE_MANAGEMENT_X_PRIOR_TO_VERSION = "prior-to-version" 

-

46MK_CONFFILE_MANAGEMENT_X_OWNING_PACKAGE = "owning-package" 

-

47 

-

48MK_MANIFEST_DEFINITIONS = "definitions" 

-

49MK_MANIFEST_VARIABLES = "variables" 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_architecture_support_py.html b/coverage-report/d_267b6307937f1878_architecture_support_py.html deleted file mode 100644 index 1c70df9..0000000 --- a/coverage-report/d_267b6307937f1878_architecture_support_py.html +++ /dev/null @@ -1,332 +0,0 @@ - - - - - Coverage for src/debputy/architecture_support.py: 95% - - - - - -
-
-

- Coverage for src/debputy/architecture_support.py: - 95% -

- -

- 107 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import os 

-

2import subprocess 

-

3from functools import lru_cache 

-

4from typing import Dict, Optional, Iterator, Tuple 

-

5 

-

6 

-

7class DpkgArchitectureBuildProcessValuesTable: 

-

8 """Dict-like interface to dpkg-architecture values""" 

-

9 

-

10 def __init__(self, *, mocked_answers: Optional[Dict[str, str]] = None) -> None: 

-

11 """Create a new dpkg-architecture table; NO INSTANTIATION 

-

12 

-

13 This object will be created for you; if you need a production instance 

-

14 then call dpkg_architecture_table(). If you need a testing instance, 

-

15 then call mock_arch_table(...) 

-

16 

-

17 :param mocked_answers: Used for testing purposes. Do not use directly; 

-

18 instead use mock_arch_table(...) to create the table you want. 

-

19 """ 

-

20 self._architecture_cache: Dict[str, str] = {} 

-

21 self._has_run_dpkg_architecture = False 

-

22 if mocked_answers is None: 

-

23 self._architecture_cache = {} 

-

24 self._respect_environ: bool = True 

-

25 self._has_run_dpkg_architecture = False 

-

26 else: 

-

27 self._architecture_cache = mocked_answers 

-

28 self._respect_environ = False 

-

29 self._has_run_dpkg_architecture = True 

-

30 

-

31 def __contains__(self, item: str) -> bool: 

-

32 try: 

-

33 self[item] 

-

34 except KeyError: 

-

35 return False 

-

36 else: 

-

37 return True 

-

38 

-

39 def __getitem__(self, item: str) -> str: 

-

40 if item not in self._architecture_cache: 

-

41 if self._respect_environ: 

-

42 value = os.environ.get(item) 

-

43 if value is not None: 43 ↛ 44line 43 didn't jump to line 44, because the condition on line 43 was never true

-

44 self._architecture_cache[item] = value 

-

45 return value 

-

46 if not self._has_run_dpkg_architecture: 

-

47 self._load_dpkg_architecture_values() 

-

48 # Fall through and look it up in the cache 

-

49 return self._architecture_cache[item] 

-

50 

-

51 def __iter__(self) -> Iterator[str]: 

-

52 if not self._has_run_dpkg_architecture: 

-

53 self._load_dpkg_architecture_values() 

-

54 yield from self._architecture_cache 

-

55 

-

56 @property 

-

57 def current_host_arch(self) -> str: 

-

58 """The architecture we are building for 

-

59 

-

60 This is the architecture name you need if you are in doubt. 

-

61 """ 

-

62 return self["DEB_HOST_ARCH"] 

-

63 

-

64 @property 

-

65 def current_host_multiarch(self) -> str: 

-

66 """The multi-arch path basename 

-

67 

-

68 This is the multi-arch basename name you need if you are in doubt. It 

-

69 goes here: 

-

70 

-

71 "/usr/lib/{MA}".format(table.current_host_multiarch) 

-

72 

-

73 """ 

-

74 return self["DEB_HOST_MULTIARCH"] 

-

75 

-

76 @property 

-

77 def is_cross_compiling(self) -> bool: 

-

78 """Whether we are cross-compiling 

-

79 

-

80 This is defined as DEB_BUILD_GNU_TYPE != DEB_HOST_GNU_TYPE and 

-

81 affects whether we can rely on being able to run the binaries 

-

82 that are compiled. 

-

83 """ 

-

84 return self["DEB_BUILD_GNU_TYPE"] != self["DEB_HOST_GNU_TYPE"] 

-

85 

-

86 def _load_dpkg_architecture_values(self) -> None: 

-

87 env = dict(os.environ) 

-

88 # For performance, disable dpkg's translation later 

-

89 env["DPKG_NLS"] = "0" 

-

90 kw_pairs = _parse_dpkg_arch_output( 

-

91 subprocess.check_output( 

-

92 ["dpkg-architecture"], 

-

93 env=env, 

-

94 ) 

-

95 ) 

-

96 for k, v in kw_pairs: 

-

97 self._architecture_cache[k] = os.environ.get(k, v) 

-

98 self._has_run_dpkg_architecture = True 

-

99 

-

100 

-

101def _parse_dpkg_arch_output(output: bytes) -> Iterator[Tuple[str, str]]: 

-

102 text = output.decode("utf-8") 

-

103 for line in text.splitlines(): 

-

104 k, v = line.strip().split("=", 1) 

-

105 yield k, v 

-

106 

-

107 

-

108def _rewrite(value: str, from_pattern: str, to_pattern: str) -> str: 

-

109 assert value.startswith(from_pattern) 

-

110 return to_pattern + value[len(from_pattern) :] 

-

111 

-

112 

-

113def faked_arch_table( 

-

114 host_arch: str, 

-

115 *, 

-

116 build_arch: Optional[str] = None, 

-

117 target_arch: Optional[str] = None, 

-

118) -> DpkgArchitectureBuildProcessValuesTable: 

-

119 """Creates a mocked instance of DpkgArchitectureBuildProcessValuesTable 

-

120 

-

121 

-

122 :param host_arch: The dpkg architecture to mock answers for. This affects 

-

123 DEB_HOST_* values and defines the default for DEB_{BUILD,TARGET}_* if 

-

124 not overridden. 

-

125 :param build_arch: If set and has a different value than host_arch, then 

-

126 pretend this is a cross-build. This value affects the DEB_BUILD_* values. 

-

127 :param target_arch: If set and has a different value than host_arch, then 

-

128 pretend this is a build _of_ a cross-compiler. This value affects the 

-

129 DEB_TARGET_* values. 

-

130 """ 

-

131 

-

132 if build_arch is None: 

-

133 build_arch = host_arch 

-

134 

-

135 if target_arch is None: 

-

136 target_arch = host_arch 

-

137 return _faked_arch_tables(host_arch, build_arch, target_arch) 

-

138 

-

139 

-

140@lru_cache 

-

141def _faked_arch_tables( 

-

142 host_arch: str, build_arch: str, target_arch: str 

-

143) -> DpkgArchitectureBuildProcessValuesTable: 

-

144 mock_table = {} 

-

145 

-

146 env = dict(os.environ) 

-

147 # Set CC to /bin/true avoid a warning from dpkg-architecture 

-

148 env["CC"] = "/bin/true" 

-

149 # For performance, disable dpkg's translation later 

-

150 env["DPKG_NLS"] = "0" 

-

151 # Clear environ variables that might confuse dpkg-architecture 

-

152 for k in os.environ: 

-

153 if k.startswith("DEB_"): 

-

154 del env[k] 

-

155 

-

156 if build_arch == host_arch: 

-

157 # easy / common case - we can handle this with a single call 

-

158 kw_pairs = _parse_dpkg_arch_output( 

-

159 subprocess.check_output( 

-

160 ["dpkg-architecture", "-a", host_arch, "-A", target_arch], 

-

161 env=env, 

-

162 ) 

-

163 ) 

-

164 for k, v in kw_pairs: 

-

165 if k.startswith(("DEB_HOST_", "DEB_TARGET_")): 

-

166 mock_table[k] = v 

-

167 # Clone DEB_HOST_* into DEB_BUILD_* as well 

-

168 if k.startswith("DEB_HOST_"): 

-

169 k2 = _rewrite(k, "DEB_HOST_", "DEB_BUILD_") 

-

170 mock_table[k2] = v 

-

171 elif build_arch != host_arch and host_arch != target_arch: 

-

172 # This will need two dpkg-architecture calls because we cannot set 

-

173 # DEB_BUILD_* directly. But we can set DEB_HOST_* and then rewrite 

-

174 # it 

-

175 # First handle the build arch 

-

176 kw_pairs = _parse_dpkg_arch_output( 

-

177 subprocess.check_output( 

-

178 ["dpkg-architecture", "-a", build_arch], 

-

179 env=env, 

-

180 ) 

-

181 ) 

-

182 for k, v in kw_pairs: 

-

183 if k.startswith("DEB_HOST_"): 

-

184 k = _rewrite(k, "DEB_HOST_", "DEB_BUILD_") 

-

185 mock_table[k] = v 

-

186 

-

187 kw_pairs = _parse_dpkg_arch_output( 

-

188 subprocess.check_output( 

-

189 ["dpkg-architecture", "-a", host_arch, "-A", target_arch], 

-

190 env=env, 

-

191 ) 

-

192 ) 

-

193 for k, v in kw_pairs: 

-

194 if k.startswith(("DEB_HOST_", "DEB_TARGET_")): 

-

195 mock_table[k] = v 

-

196 else: 

-

197 # This is a fun special case. We know that: 

-

198 # * build_arch != host_arch 

-

199 # * host_arch == target_arch 

-

200 # otherwise we would have hit one of the previous cases. 

-

201 # 

-

202 # We can do this in a single call to dpkg-architecture by 

-

203 # a bit of "cleaver" rewriting. 

-

204 # 

-

205 # - Use -a to set DEB_HOST_* and then rewrite that as 

-

206 # DEB_BUILD_* 

-

207 # - use -A to set DEB_TARGET_* and then use that for both 

-

208 # DEB_HOST_* and DEB_TARGET_* 

-

209 

-

210 kw_pairs = _parse_dpkg_arch_output( 

-

211 subprocess.check_output( 

-

212 ["dpkg-architecture", "-a", build_arch, "-A", target_arch], env=env 

-

213 ) 

-

214 ) 

-

215 for k, v in kw_pairs: 

-

216 if k.startswith("DEB_HOST_"): 

-

217 k2 = _rewrite(k, "DEB_HOST_", "DEB_BUILD_") 

-

218 mock_table[k2] = v 

-

219 continue 

-

220 if k.startswith("DEB_TARGET_"): 

-

221 mock_table[k] = v 

-

222 k2 = _rewrite(k, "DEB_TARGET_", "DEB_HOST_") 

-

223 mock_table[k2] = v 

-

224 

-

225 table = DpkgArchitectureBuildProcessValuesTable(mocked_answers=mock_table) 

-

226 return table 

-

227 

-

228 

-

229_ARCH_TABLE = DpkgArchitectureBuildProcessValuesTable() 

-

230 

-

231 

-

232def dpkg_architecture_table() -> DpkgArchitectureBuildProcessValuesTable: 

-

233 return _ARCH_TABLE 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_builtin_manifest_rules_py.html b/coverage-report/d_267b6307937f1878_builtin_manifest_rules_py.html deleted file mode 100644 index 4cf18e4..0000000 --- a/coverage-report/d_267b6307937f1878_builtin_manifest_rules_py.html +++ /dev/null @@ -1,360 +0,0 @@ - - - - - Coverage for src/debputy/builtin_manifest_rules.py: 86% - - - - - -
-
-

- Coverage for src/debputy/builtin_manifest_rules.py: - 86% -

- -

- 79 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import re 

-

2from typing import Iterable, Tuple, Optional 

-

3 

-

4from debputy.architecture_support import DpkgArchitectureBuildProcessValuesTable 

-

5from debputy.exceptions import PureVirtualPathError, TestPathWithNonExistentFSPathError 

-

6from debputy.intermediate_manifest import PathType 

-

7from debputy.manifest_parser.base_types import SymbolicMode, OctalMode, FileSystemMode 

-

8from debputy.manifest_parser.util import AttributePath 

-

9from debputy.packages import BinaryPackage 

-

10from debputy.path_matcher import ( 

-

11 MATCH_ANYTHING, 

-

12 MatchRule, 

-

13 ExactFileSystemPath, 

-

14 DirectoryBasedMatch, 

-

15 MatchRuleType, 

-

16 BasenameGlobMatch, 

-

17) 

-

18from debputy.substitution import Substitution 

-

19from debputy.types import VP 

-

20from debputy.util import _normalize_path, perl_module_dirs 

-

21 

-

22# Imported from dh_fixperms 

-

23_PERMISSION_NORMALIZATION_SOURCE_DEFINITION = "permission normalization" 

-

24attribute_path = AttributePath.builtin_path()[ 

-

25 _PERMISSION_NORMALIZATION_SOURCE_DEFINITION 

-

26] 

-

27_STD_FILE_MODE = OctalMode(0o644) 

-

28_PATH_FILE_MODE = OctalMode(0o755) 

-

29_HAS_BIN_SHBANG_RE = re.compile(rb"^#!\s*/(?:usr/)?s?bin", re.ASCII) 

-

30 

-

31 

-

32class _UsrShareDocMatchRule(DirectoryBasedMatch): 

-

33 def __init__(self) -> None: 

-

34 super().__init__( 

-

35 MatchRuleType.ANYTHING_BENEATH_DIR, 

-

36 _normalize_path("usr/share/doc", with_prefix=True), 

-

37 path_type=PathType.FILE, 

-

38 ) 

-

39 

-

40 def finditer(self, fs_root: VP, *, ignore_paths=None) -> Iterable[VP]: 

-

41 doc_dir = fs_root.lookup(self._directory) 

-

42 if doc_dir is None: 

-

43 return 

-

44 for path_in_doc_dir in doc_dir.iterdir: 

-

45 if ignore_paths is not None and ignore_paths(path_in_doc_dir): 45 ↛ 46line 45 didn't jump to line 46, because the condition on line 45 was never true

-

46 continue 

-

47 if path_in_doc_dir.is_file: 47 ↛ 48line 47 didn't jump to line 48, because the condition on line 47 was never true

-

48 yield path_in_doc_dir 

-

49 for subpath in path_in_doc_dir.iterdir: 

-

50 if subpath.name == "examples" and subpath.is_dir: 50 ↛ 51line 50 didn't jump to line 51, because the condition on line 50 was never true

-

51 continue 

-

52 if ignore_paths is not None: 52 ↛ 59line 52 didn't jump to line 59, because the condition on line 52 was never false

-

53 yield from ( 

-

54 f 

-

55 for f in subpath.all_paths() 

-

56 if f.is_file and not ignore_paths(f) 

-

57 ) 

-

58 else: 

-

59 yield from (f for f in subpath.all_paths() if f.is_file) 

-

60 

-

61 def describe_match_short(self) -> str: 

-

62 return f"All files beneath {self._directory}/ except .../<pkg>/examples" 

-

63 

-

64 def describe_match_exact(self) -> str: 

-

65 return self.describe_match_short() 

-

66 

-

67 

-

68class _ShebangScriptFiles(MatchRule): 

-

69 def __init__(self) -> None: 

-

70 super().__init__(MatchRuleType.GENERIC_GLOB) 

-

71 

-

72 def finditer(self, fs_root: VP, *, ignore_paths=None) -> Iterable[VP]: 

-

73 for p in fs_root.all_paths(): 

-

74 if not p.is_file or (ignore_paths and ignore_paths(p)): 

-

75 continue 

-

76 try: 

-

77 with p.open(byte_io=True) as fd: 

-

78 c = fd.read(32) 

-

79 except (PureVirtualPathError, TestPathWithNonExistentFSPathError): 

-

80 continue 

-

81 if _HAS_BIN_SHBANG_RE.match(c): 

-

82 yield p 

-

83 

-

84 @property 

-

85 def path_type(self) -> Optional[PathType]: 

-

86 return PathType.FILE 

-

87 

-

88 def _full_pattern(self) -> str: 

-

89 return "built-in - not a valid pattern" 

-

90 

-

91 def describe_match_short(self) -> str: 

-

92 return "All scripts with a absolute #!-line for /(s)bin or /usr/(s)bin" 

-

93 

-

94 def describe_match_exact(self) -> str: 

-

95 return self.describe_match_short() 

-

96 

-

97 

-

98USR_SHARE_DOC_MATCH_RULE = _UsrShareDocMatchRule() 

-

99SHEBANG_SCRIPTS = _ShebangScriptFiles() 

-

100del _UsrShareDocMatchRule 

-

101del _ShebangScriptFiles 

-

102 

-

103 

-

104def builtin_mode_normalization_rules( 

-

105 dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable, 

-

106 dctrl_bin: BinaryPackage, 

-

107 substitution: Substitution, 

-

108) -> Iterable[Tuple[MatchRule, FileSystemMode]]: 

-

109 yield from ( 

-

110 ( 

-

111 MatchRule.from_path_or_glob( 

-

112 x, 

-

113 _PERMISSION_NORMALIZATION_SOURCE_DEFINITION, 

-

114 path_type=PathType.FILE, 

-

115 ), 

-

116 _STD_FILE_MODE, 

-

117 ) 

-

118 for x in ( 

-

119 "*.so.*", 

-

120 "*.so", 

-

121 "*.la", 

-

122 "*.a", 

-

123 "*.js", 

-

124 "*.css", 

-

125 "*.scss", 

-

126 "*.sass", 

-

127 "*.jpeg", 

-

128 "*.jpg", 

-

129 "*.png", 

-

130 "*.gif", 

-

131 "*.cmxs", 

-

132 "*.node", 

-

133 ) 

-

134 ) 

-

135 

-

136 yield from ( 

-

137 ( 

-

138 MatchRule.recursive_beneath_directory( 

-

139 x, 

-

140 _PERMISSION_NORMALIZATION_SOURCE_DEFINITION, 

-

141 path_type=PathType.FILE, 

-

142 ), 

-

143 _STD_FILE_MODE, 

-

144 ) 

-

145 for x in ( 

-

146 "usr/share/man", 

-

147 "usr/include", 

-

148 "usr/share/applications", 

-

149 "usr/share/lintian/overrides", 

-

150 ) 

-

151 ) 

-

152 

-

153 # The dh_fixperms tool recuses for these directories, but probably should not (see #1006927) 

-

154 yield from ( 

-

155 ( 

-

156 MatchRule.from_path_or_glob( 

-

157 f"{x}/*", 

-

158 _PERMISSION_NORMALIZATION_SOURCE_DEFINITION, 

-

159 path_type=PathType.FILE, 

-

160 ), 

-

161 _PATH_FILE_MODE, 

-

162 ) 

-

163 for x in ( 

-

164 "usr/bin", 

-

165 "usr/bin/mh", 

-

166 "bin", 

-

167 "usr/sbin", 

-

168 "sbin", 

-

169 "usr/games", 

-

170 "usr/libexec", 

-

171 "etc/init.d", 

-

172 ) 

-

173 ) 

-

174 

-

175 yield ( 

-

176 # Strictly speaking, dh_fixperms does a recursive search but in practice, it does not matter. 

-

177 MatchRule.from_path_or_glob( 

-

178 "etc/sudoers.d/*", 

-

179 _PERMISSION_NORMALIZATION_SOURCE_DEFINITION, 

-

180 path_type=PathType.FILE, 

-

181 ), 

-

182 OctalMode(0o440), 

-

183 ) 

-

184 

-

185 # The reportbug rule 

-

186 yield ( 

-

187 ExactFileSystemPath( 

-

188 substitution.substitute( 

-

189 _normalize_path("usr/share/bug/{{PACKAGE}}"), 

-

190 _PERMISSION_NORMALIZATION_SOURCE_DEFINITION, 

-

191 ) 

-

192 ), 

-

193 OctalMode(0o755), 

-

194 ) 

-

195 

-

196 yield ( 

-

197 MatchRule.recursive_beneath_directory( 

-

198 "usr/share/bug/{{PACKAGE}}", 

-

199 _PERMISSION_NORMALIZATION_SOURCE_DEFINITION, 

-

200 path_type=PathType.FILE, 

-

201 substitution=substitution, 

-

202 ), 

-

203 OctalMode(0o644), 

-

204 ) 

-

205 

-

206 yield ( 

-

207 ExactFileSystemPath( 

-

208 substitution.substitute( 

-

209 _normalize_path("usr/share/bug/{{PACKAGE}}/script"), 

-

210 _PERMISSION_NORMALIZATION_SOURCE_DEFINITION, 

-

211 ) 

-

212 ), 

-

213 OctalMode(0o755), 

-

214 ) 

-

215 

-

216 yield ( 

-

217 USR_SHARE_DOC_MATCH_RULE, 

-

218 OctalMode(0o0644), 

-

219 ) 

-

220 

-

221 yield from ( 

-

222 ( 

-

223 BasenameGlobMatch( 

-

224 "*.pm", 

-

225 only_when_in_directory=perl_dir, 

-

226 path_type=PathType.FILE, 

-

227 recursive_match=True, 

-

228 ), 

-

229 SymbolicMode.parse_filesystem_mode( 

-

230 "a-x", 

-

231 attribute_path['"*.pm'], 

-

232 ), 

-

233 ) 

-

234 for perl_dir in perl_module_dirs(dpkg_architecture_variables, dctrl_bin) 

-

235 ) 

-

236 

-

237 yield ( 

-

238 BasenameGlobMatch( 

-

239 "*.ali", 

-

240 only_when_in_directory=_normalize_path("usr/lib"), 

-

241 path_type=PathType.FILE, 

-

242 recursive_match=True, 

-

243 ), 

-

244 SymbolicMode.parse_filesystem_mode( 

-

245 "a-w", 

-

246 attribute_path['"*.ali"'], 

-

247 ), 

-

248 ) 

-

249 

-

250 yield ( 

-

251 SHEBANG_SCRIPTS, 

-

252 _PATH_FILE_MODE, 

-

253 ) 

-

254 

-

255 yield ( 

-

256 MATCH_ANYTHING, 

-

257 SymbolicMode.parse_filesystem_mode( 

-

258 "go=rX,u+rw,a-s", 

-

259 attribute_path["**/*"], 

-

260 ), 

-

261 ) 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_deb_packaging_support_py.html b/coverage-report/d_267b6307937f1878_deb_packaging_support_py.html deleted file mode 100644 index e2bda35..0000000 --- a/coverage-report/d_267b6307937f1878_deb_packaging_support_py.html +++ /dev/null @@ -1,1794 +0,0 @@ - - - - - Coverage for src/debputy/deb_packaging_support.py: 13% - - - - - -
-
-

- Coverage for src/debputy/deb_packaging_support.py: - 13% -

- -

- 799 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import collections 

-

2import contextlib 

-

3import dataclasses 

-

4import datetime 

-

5import functools 

-

6import hashlib 

-

7import itertools 

-

8import operator 

-

9import os 

-

10import re 

-

11import subprocess 

-

12import tempfile 

-

13import textwrap 

-

14from contextlib import ExitStack 

-

15from tempfile import mkstemp 

-

16from typing import ( 

-

17 Iterable, 

-

18 List, 

-

19 Optional, 

-

20 Set, 

-

21 Dict, 

-

22 Sequence, 

-

23 Tuple, 

-

24 Iterator, 

-

25 Literal, 

-

26 TypeVar, 

-

27 FrozenSet, 

-

28 cast, 

-

29 Any, 

-

30 Union, 

-

31 Mapping, 

-

32) 

-

33 

-

34import debian.deb822 

-

35from debian.changelog import Changelog 

-

36from debian.deb822 import Deb822 

-

37 

-

38from debputy._deb_options_profiles import DebBuildOptionsAndProfiles 

-

39from debputy.architecture_support import DpkgArchitectureBuildProcessValuesTable 

-

40from debputy.debhelper_emulation import ( 

-

41 dhe_install_pkg_file_as_ctrl_file_if_present, 

-

42 dhe_dbgsym_root_dir, 

-

43) 

-

44from debputy.elf_util import find_all_elf_files, ELF_MAGIC 

-

45from debputy.exceptions import DebputyDpkgGensymbolsError 

-

46from debputy.filesystem_scan import FSPath, FSROOverlay 

-

47from debputy.highlevel_manifest import ( 

-

48 HighLevelManifest, 

-

49 PackageTransformationDefinition, 

-

50 BinaryPackageData, 

-

51) 

-

52from debputy.maintscript_snippet import ( 

-

53 ALL_CONTROL_SCRIPTS, 

-

54 MaintscriptSnippetContainer, 

-

55 STD_CONTROL_SCRIPTS, 

-

56) 

-

57from debputy.packages import BinaryPackage, SourcePackage 

-

58from debputy.packaging.alternatives import process_alternatives 

-

59from debputy.packaging.debconf_templates import process_debconf_templates 

-

60from debputy.packaging.makeshlibs import ( 

-

61 compute_shlibs, 

-

62 ShlibsContent, 

-

63 generate_shlib_dirs, 

-

64) 

-

65from debputy.plugin.api.feature_set import PluginProvidedFeatureSet 

-

66from debputy.plugin.api.impl import ServiceRegistryImpl 

-

67from debputy.plugin.api.impl_types import ( 

-

68 MetadataOrMaintscriptDetector, 

-

69 PackageDataTable, 

-

70 ServiceManagerDetails, 

-

71) 

-

72from debputy.plugin.api.spec import ( 

-

73 FlushableSubstvars, 

-

74 VirtualPath, 

-

75 PackageProcessingContext, 

-

76 ServiceDefinition, 

-

77) 

-

78from debputy.plugin.debputy.binary_package_rules import ServiceRule 

-

79from debputy.util import ( 

-

80 _error, 

-

81 ensure_dir, 

-

82 assume_not_none, 

-

83 perl_module_dirs, 

-

84 perlxs_api_dependency, 

-

85 detect_fakeroot, 

-

86 grouper, 

-

87 _info, 

-

88 xargs, 

-

89 escape_shell, 

-

90 generated_content_dir, 

-

91 print_command, 

-

92 _warn, 

-

93) 

-

94 

-

95VP = TypeVar("VP", bound=VirtualPath, covariant=True) 

-

96 

-

97_T64_REGEX = re.compile("^lib.*t64(?:-nss)?$") 

-

98_T64_PROVIDES = "t64:Provides" 

-

99 

-

100 

-

101def generate_md5sums_file(control_output_dir: str, fs_root: VirtualPath) -> None: 

-

102 conffiles = os.path.join(control_output_dir, "conffiles") 

-

103 md5sums = os.path.join(control_output_dir, "md5sums") 

-

104 exclude = set() 

-

105 if os.path.isfile(conffiles): 

-

106 with open(conffiles, "rt") as fd: 

-

107 for line in fd: 

-

108 if not line.startswith("/"): 

-

109 continue 

-

110 exclude.add("." + line.rstrip("\n")) 

-

111 had_content = False 

-

112 files = sorted( 

-

113 ( 

-

114 path 

-

115 for path in fs_root.all_paths() 

-

116 if path.is_file and path.path not in exclude 

-

117 ), 

-

118 # Sort in the same order as dh_md5sums, which is not quite the same as dpkg/`all_paths()` 

-

119 # Compare `.../doc/...` vs `.../doc-base/...` if you want to see the difference between 

-

120 # the two approaches. 

-

121 key=lambda p: p.path, 

-

122 ) 

-

123 with open(md5sums, "wt") as md5fd: 

-

124 for member in files: 

-

125 path = member.path 

-

126 assert path.startswith("./") 

-

127 path = path[2:] 

-

128 with member.open(byte_io=True) as f: 

-

129 file_hash = hashlib.md5() 

-

130 while chunk := f.read(8192): 

-

131 file_hash.update(chunk) 

-

132 had_content = True 

-

133 md5fd.write(f"{file_hash.hexdigest()} {path}\n") 

-

134 if not had_content: 

-

135 os.unlink(md5sums) 

-

136 

-

137 

-

138def install_or_generate_conffiles( 

-

139 binary_package: BinaryPackage, 

-

140 root_dir: str, 

-

141 fs_root: VirtualPath, 

-

142 debian_dir: VirtualPath, 

-

143) -> None: 

-

144 conffiles_dest = os.path.join(root_dir, "conffiles") 

-

145 dhe_install_pkg_file_as_ctrl_file_if_present( 

-

146 debian_dir, 

-

147 binary_package, 

-

148 "conffiles", 

-

149 root_dir, 

-

150 0o0644, 

-

151 ) 

-

152 etc_dir = fs_root.lookup("etc") 

-

153 if etc_dir: 

-

154 _add_conffiles(conffiles_dest, (p for p in etc_dir.all_paths() if p.is_file)) 

-

155 if os.path.isfile(conffiles_dest): 

-

156 os.chmod(conffiles_dest, 0o0644) 

-

157 

-

158 

-

159PERL_DEP_PROGRAM = 1 

-

160PERL_DEP_INDEP_PM_MODULE = 2 

-

161PERL_DEP_XS_MODULE = 4 

-

162PERL_DEP_ARCH_PM_MODULE = 8 

-

163PERL_DEP_MA_ANY_INCOMPATIBLE_TYPES = ~(PERL_DEP_PROGRAM | PERL_DEP_INDEP_PM_MODULE) 

-

164 

-

165 

-

166@functools.lru_cache(2) # In practice, param will be "perl" or "perl-base" 

-

167def _dpkg_perl_version(package: str) -> str: 

-

168 dpkg_version = None 

-

169 lines = ( 

-

170 subprocess.check_output(["dpkg", "-s", package]) 

-

171 .decode("utf-8") 

-

172 .splitlines(keepends=False) 

-

173 ) 

-

174 for line in lines: 

-

175 if line.startswith("Version: "): 

-

176 dpkg_version = line[8:].strip() 

-

177 break 

-

178 assert dpkg_version is not None 

-

179 return dpkg_version 

-

180 

-

181 

-

182def handle_perl_code( 

-

183 dctrl_bin: BinaryPackage, 

-

184 dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable, 

-

185 fs_root: FSPath, 

-

186 substvars: FlushableSubstvars, 

-

187) -> None: 

-

188 known_perl_inc_dirs = perl_module_dirs(dpkg_architecture_variables, dctrl_bin) 

-

189 detected_dep_requirements = 0 

-

190 

-

191 # MakeMaker always makes lib and share dirs, but typically only one directory is actually used. 

-

192 for perl_inc_dir in known_perl_inc_dirs: 

-

193 p = fs_root.lookup(perl_inc_dir) 

-

194 if p and p.is_dir: 

-

195 p.prune_if_empty_dir() 

-

196 

-

197 # FIXME: 80% of this belongs in a metadata detector, but that requires us to expose .walk() in the public API, 

-

198 # which will not be today. 

-

199 for d, pm_mode in [ 

-

200 (known_perl_inc_dirs.vendorlib, PERL_DEP_INDEP_PM_MODULE), 

-

201 (known_perl_inc_dirs.vendorarch, PERL_DEP_ARCH_PM_MODULE), 

-

202 ]: 

-

203 inc_dir = fs_root.lookup(d) 

-

204 if not inc_dir: 

-

205 continue 

-

206 for path in inc_dir.all_paths(): 

-

207 if not path.is_file: 

-

208 continue 

-

209 if path.name.endswith(".so"): 

-

210 detected_dep_requirements |= PERL_DEP_XS_MODULE 

-

211 elif path.name.endswith(".pm"): 

-

212 detected_dep_requirements |= pm_mode 

-

213 

-

214 for path, children in fs_root.walk(): 

-

215 if path.path == "./usr/share/doc": 

-

216 children.clear() 

-

217 continue 

-

218 if ( 

-

219 not path.is_file 

-

220 or not path.has_fs_path 

-

221 or not (path.is_executable or path.name.endswith(".pl")) 

-

222 ): 

-

223 continue 

-

224 

-

225 interpreter = path.interpreter() 

-

226 if interpreter is not None and interpreter.command_full_basename == "perl": 

-

227 detected_dep_requirements |= PERL_DEP_PROGRAM 

-

228 

-

229 if not detected_dep_requirements: 

-

230 return 

-

231 dpackage = "perl" 

-

232 # FIXME: Currently, dh_perl supports perl-base via manual toggle. 

-

233 

-

234 dependency = dpackage 

-

235 if not (detected_dep_requirements & PERL_DEP_MA_ANY_INCOMPATIBLE_TYPES): 

-

236 dependency += ":any" 

-

237 

-

238 if detected_dep_requirements & PERL_DEP_XS_MODULE: 

-

239 dpkg_version = _dpkg_perl_version(dpackage) 

-

240 dependency += f" (>= {dpkg_version})" 

-

241 substvars.add_dependency("perl:Depends", dependency) 

-

242 

-

243 if detected_dep_requirements & (PERL_DEP_XS_MODULE | PERL_DEP_ARCH_PM_MODULE): 

-

244 substvars.add_dependency("perl:Depends", perlxs_api_dependency()) 

-

245 

-

246 

-

247def usr_local_transformation(dctrl: BinaryPackage, fs_root: VirtualPath) -> None: 

-

248 path = fs_root.lookup("./usr/local") 

-

249 if path and any(path.iterdir): 

-

250 # There are two key issues: 

-

251 # 1) Getting the generated maintscript carried on to the final maintscript 

-

252 # 2) Making sure that manifest created directories do not trigger the "unused error". 

-

253 _error( 

-

254 f"Replacement of /usr/local paths is currently not supported in debputy (triggered by: {dctrl.name})." 

-

255 ) 

-

256 

-

257 

-

258def _find_and_analyze_systemd_service_files( 

-

259 fs_root: VirtualPath, 

-

260 systemd_service_dir: Literal["system", "user"], 

-

261) -> Iterable[VirtualPath]: 

-

262 service_dirs = [ 

-

263 f"./usr/lib/systemd/{systemd_service_dir}", 

-

264 f"./lib/systemd/{systemd_service_dir}", 

-

265 ] 

-

266 aliases: Dict[str, List[str]] = collections.defaultdict(list) 

-

267 seen = set() 

-

268 all_files = [] 

-

269 

-

270 for d in service_dirs: 

-

271 system_dir = fs_root.lookup(d) 

-

272 if not system_dir: 

-

273 continue 

-

274 for child in system_dir.iterdir: 

-

275 if child.is_symlink: 

-

276 dest = os.path.basename(child.readlink()) 

-

277 aliases[dest].append(child.name) 

-

278 elif child.is_file and child.name not in seen: 

-

279 seen.add(child.name) 

-

280 all_files.append(child) 

-

281 

-

282 return all_files 

-

283 

-

284 

-

285def detect_systemd_user_service_files( 

-

286 dctrl: BinaryPackage, 

-

287 fs_root: VirtualPath, 

-

288) -> None: 

-

289 for service_file in _find_and_analyze_systemd_service_files(fs_root, "user"): 

-

290 _error( 

-

291 f'Sorry, systemd user services files are not supported at the moment (saw "{service_file.path}"' 

-

292 f" in {dctrl.name})" 

-

293 ) 

-

294 

-

295 

-

296# Generally, this should match the release date of oldstable or oldoldstable 

-

297_DCH_PRUNE_CUT_OFF_DATE = datetime.date(2019, 7, 6) 

-

298_DCH_MIN_NUM_OF_ENTRIES = 4 

-

299 

-

300 

-

301def _prune_dch_file( 

-

302 package: BinaryPackage, 

-

303 path: VirtualPath, 

-

304 is_changelog: bool, 

-

305 keep_versions: Optional[Set[str]], 

-

306 *, 

-

307 trim: bool = True, 

-

308) -> Tuple[bool, Optional[Set[str]]]: 

-

309 # TODO: Process `d/changelog` once 

-

310 # Note we cannot assume that changelog_file is always `d/changelog` as you can have 

-

311 # per-package changelogs. 

-

312 with path.open() as fd: 

-

313 dch = Changelog(fd) 

-

314 shortened = False 

-

315 important_entries = 0 

-

316 binnmu_entries = [] 

-

317 if is_changelog: 

-

318 kept_entries = [] 

-

319 for block in dch: 

-

320 if block.other_pairs.get("binary-only", "no") == "yes": 

-

321 # Always keep binNMU entries (they are always in the top) and they do not count 

-

322 # towards our kept_entries limit 

-

323 binnmu_entries.append(block) 

-

324 continue 

-

325 block_date = block.date 

-

326 if block_date is None: 

-

327 _error(f"The Debian changelog was missing date in sign off line") 

-

328 entry_date = datetime.datetime.strptime( 

-

329 block_date, "%a, %d %b %Y %H:%M:%S %z" 

-

330 ).date() 

-

331 if ( 

-

332 trim 

-

333 and entry_date < _DCH_PRUNE_CUT_OFF_DATE 

-

334 and important_entries >= _DCH_MIN_NUM_OF_ENTRIES 

-

335 ): 

-

336 shortened = True 

-

337 break 

-

338 # Match debhelper in incrementing after the check. 

-

339 important_entries += 1 

-

340 kept_entries.append(block) 

-

341 else: 

-

342 assert keep_versions is not None 

-

343 # The NEWS files should match the version for the dch to avoid lintian warnings. 

-

344 # If that means we remove all entries in the NEWS file, then we delete the NEWS 

-

345 # file (see #1021607) 

-

346 kept_entries = [b for b in dch if b.version in keep_versions] 

-

347 shortened = len(dch) > len(kept_entries) 

-

348 if shortened and not kept_entries: 

-

349 path.unlink() 

-

350 return True, None 

-

351 

-

352 if not shortened and not binnmu_entries: 

-

353 return False, None 

-

354 

-

355 parent_dir = assume_not_none(path.parent_dir) 

-

356 

-

357 with path.replace_fs_path_content() as fs_path, open( 

-

358 fs_path, "wt", encoding="utf-8" 

-

359 ) as fd: 

-

360 for entry in kept_entries: 

-

361 fd.write(str(entry)) 

-

362 

-

363 if is_changelog and shortened: 

-

364 # For changelog (rather than NEWS) files, add a note about how to 

-

365 # get the full version. 

-

366 msg = textwrap.dedent( 

-

367 f"""\ 

-

368 # Older entries have been removed from this changelog. 

-

369 # To read the complete changelog use `apt changelog {package.name}`. 

-

370 """ 

-

371 ) 

-

372 fd.write(msg) 

-

373 

-

374 if binnmu_entries: 

-

375 if package.is_arch_all: 

-

376 _error( 

-

377 f"The package {package.name} is architecture all, but it is built during a binNMU. A binNMU build" 

-

378 " must not include architecture all packages" 

-

379 ) 

-

380 

-

381 with parent_dir.add_file( 

-

382 f"{path.name}.{package.resolved_architecture}" 

-

383 ) as binnmu_changelog, open( 

-

384 binnmu_changelog.fs_path, 

-

385 "wt", 

-

386 encoding="utf-8", 

-

387 ) as binnmu_fd: 

-

388 for entry in binnmu_entries: 

-

389 binnmu_fd.write(str(entry)) 

-

390 

-

391 if not shortened: 

-

392 return False, None 

-

393 return True, {b.version for b in kept_entries} 

-

394 

-

395 

-

396def fixup_debian_changelog_and_news_file( 

-

397 dctrl: BinaryPackage, 

-

398 fs_root: VirtualPath, 

-

399 is_native: bool, 

-

400 build_env: DebBuildOptionsAndProfiles, 

-

401) -> None: 

-

402 doc_dir = fs_root.lookup(f"./usr/share/doc/{dctrl.name}") 

-

403 if not doc_dir: 

-

404 return 

-

405 changelog = doc_dir.get("changelog.Debian") 

-

406 if changelog and is_native: 

-

407 changelog.name = "changelog" 

-

408 elif is_native: 

-

409 changelog = doc_dir.get("changelog") 

-

410 

-

411 trim = False if "notrimdch" in build_env.deb_build_options else True 

-

412 

-

413 kept_entries = None 

-

414 pruned_changelog = False 

-

415 if changelog and changelog.has_fs_path: 

-

416 pruned_changelog, kept_entries = _prune_dch_file( 

-

417 dctrl, changelog, True, None, trim=trim 

-

418 ) 

-

419 

-

420 if not trim: 

-

421 return 

-

422 

-

423 news_file = doc_dir.get("NEWS.Debian") 

-

424 if news_file and news_file.has_fs_path and pruned_changelog: 

-

425 _prune_dch_file(dctrl, news_file, False, kept_entries) 

-

426 

-

427 

-

428_UPSTREAM_CHANGELOG_SOURCE_DIRS = [ 

-

429 ".", 

-

430 "doc", 

-

431 "docs", 

-

432] 

-

433_UPSTREAM_CHANGELOG_NAMES = { 

-

434 # The value is a priority to match the debhelper order. 

-

435 # - The suffix weights heavier than the basename (because that is what debhelper did) 

-

436 # 

-

437 # We list the name/suffix in order of priority in the code. That makes it easier to 

-

438 # see the priority directly, but it gives the "lowest" value to the most important items 

-

439 f"{n}{s}": (sw, nw) 

-

440 for (nw, n), (sw, s) in itertools.product( 

-

441 enumerate(["changelog", "changes", "history"], start=1), 

-

442 enumerate(["", ".txt", ".md", ".rst"], start=1), 

-

443 ) 

-

444} 

-

445_NONE_TUPLE = (None, (0, 0)) 

-

446 

-

447 

-

448def _detect_upstream_changelog(names: Iterable[str]) -> Optional[str]: 

-

449 matches = [] 

-

450 for name in names: 

-

451 match_priority = _UPSTREAM_CHANGELOG_NAMES.get(name.lower()) 

-

452 if match_priority is not None: 

-

453 matches.append((name, match_priority)) 

-

454 return min(matches, default=_NONE_TUPLE, key=operator.itemgetter(1))[0] 

-

455 

-

456 

-

457def install_upstream_changelog( 

-

458 dctrl_bin: BinaryPackage, 

-

459 fs_root: FSPath, 

-

460 source_fs_root: VirtualPath, 

-

461) -> None: 

-

462 doc_dir = f"./usr/share/doc/{dctrl_bin.name}" 

-

463 bdir = fs_root.lookup(doc_dir) 

-

464 if bdir and not bdir.is_dir: 

-

465 # "/usr/share/doc/foo -> bar" symlink. Avoid croaking on those per: 

-

466 # https://salsa.debian.org/debian/debputy/-/issues/49 

-

467 return 

-

468 

-

469 if bdir: 

-

470 if bdir.get("changelog") or bdir.get("changelog.gz"): 

-

471 # Upstream's build system already provided the changelog with the correct name. 

-

472 # Accept that as the canonical one. 

-

473 return 

-

474 upstream_changelog = _detect_upstream_changelog( 

-

475 p.name for p in bdir.iterdir if p.is_file and p.has_fs_path and p.size > 0 

-

476 ) 

-

477 if upstream_changelog: 

-

478 p = bdir.lookup(upstream_changelog) 

-

479 assert p is not None # Mostly as a typing hint 

-

480 p.name = "changelog" 

-

481 return 

-

482 for dirname in _UPSTREAM_CHANGELOG_SOURCE_DIRS: 

-

483 dir_path = source_fs_root.lookup(dirname) 

-

484 if not dir_path or not dir_path.is_dir: 

-

485 continue 

-

486 changelog_name = _detect_upstream_changelog( 

-

487 p.name 

-

488 for p in dir_path.iterdir 

-

489 if p.is_file and p.has_fs_path and p.size > 0 

-

490 ) 

-

491 if changelog_name: 

-

492 if bdir is None: 492 ↛ 494line 492 didn't jump to line 494, because the condition on line 492 was never false

-

493 bdir = fs_root.mkdirs(doc_dir) 

-

494 bdir.insert_file_from_fs_path( 

-

495 "changelog", 

-

496 dir_path[changelog_name].fs_path, 

-

497 ) 

-

498 break 

-

499 

-

500 

-

501@dataclasses.dataclass(slots=True) 

-

502class _ElfInfo: 

-

503 path: VirtualPath 

-

504 fs_path: str 

-

505 is_stripped: Optional[bool] = None 

-

506 build_id: Optional[str] = None 

-

507 dbgsym: Optional[FSPath] = None 

-

508 

-

509 

-

510def _elf_static_lib_walk_filter( 

-

511 fs_path: VirtualPath, 

-

512 children: List[VP], 

-

513) -> bool: 

-

514 if ( 

-

515 fs_path.name == ".build-id" 

-

516 and assume_not_none(fs_path.parent_dir).name == "debug" 

-

517 ): 

-

518 children.clear() 

-

519 return False 

-

520 # Deal with some special cases, where certain files are not supposed to be stripped in a given directory 

-

521 if "debug/" in fs_path.path or fs_path.name.endswith("debug/"): 

-

522 # FIXME: We need a way to opt out of this per #468333/#1016122 

-

523 for so_file in (f for f in list(children) if f.name.endswith(".so")): 

-

524 children.remove(so_file) 

-

525 if "/guile/" in fs_path.path or fs_path.name == "guile": 

-

526 for go_file in (f for f in list(children) if f.name.endswith(".go")): 

-

527 children.remove(go_file) 

-

528 return True 

-

529 

-

530 

-

531@contextlib.contextmanager 

-

532def _all_elf_files(fs_root: VirtualPath) -> Iterator[Dict[str, _ElfInfo]]: 

-

533 all_elf_files = find_all_elf_files( 

-

534 fs_root, 

-

535 walk_filter=_elf_static_lib_walk_filter, 

-

536 ) 

-

537 if not all_elf_files: 

-

538 yield {} 

-

539 return 

-

540 with ExitStack() as cm_stack: 

-

541 resolved = ( 

-

542 (p, cm_stack.enter_context(p.replace_fs_path_content())) 

-

543 for p in all_elf_files 

-

544 ) 

-

545 elf_info = { 

-

546 fs_path: _ElfInfo( 

-

547 path=assume_not_none(fs_root.lookup(detached_path.path)), 

-

548 fs_path=fs_path, 

-

549 ) 

-

550 for detached_path, fs_path in resolved 

-

551 } 

-

552 _resolve_build_ids(elf_info) 

-

553 yield elf_info 

-

554 

-

555 

-

556def _find_all_static_libs( 

-

557 fs_root: FSPath, 

-

558) -> Iterator[FSPath]: 

-

559 for path, children in fs_root.walk(): 

-

560 # Matching the logic of dh_strip for now. 

-

561 if not _elf_static_lib_walk_filter(path, children): 

-

562 continue 

-

563 if not path.is_file: 

-

564 continue 

-

565 if path.name.startswith("lib") and path.name.endswith("_g.a"): 

-

566 # _g.a are historically ignored. I do not remember why, but guessing the "_g" is 

-

567 # an encoding of gcc's -g parameter into the filename (with -g meaning "I want debug 

-

568 # symbols") 

-

569 continue 

-

570 if not path.has_fs_path: 

-

571 continue 

-

572 with path.open(byte_io=True) as fd: 

-

573 magic = fd.read(8) 

-

574 if magic not in (b"!<arch>\n", b"!<thin>\n"): 

-

575 continue 

-

576 # Maybe we should see if the first file looks like an index file. 

-

577 # Three random .a samples suggests the index file is named "/" 

-

578 # Not sure if we should skip past it and then do the ELF check or just assume 

-

579 # that "index => static lib". 

-

580 data = fd.read(1024 * 1024) 

-

581 if b"\0" not in data and ELF_MAGIC not in data: 

-

582 continue 

-

583 yield path 

-

584 

-

585 

-

586@contextlib.contextmanager 

-

587def _all_static_libs(fs_root: FSPath) -> Iterator[List[str]]: 

-

588 all_static_libs = list(_find_all_static_libs(fs_root)) 

-

589 if not all_static_libs: 

-

590 yield [] 

-

591 return 

-

592 with ExitStack() as cm_stack: 

-

593 resolved: List[str] = [ 

-

594 cm_stack.enter_context(p.replace_fs_path_content()) for p in all_static_libs 

-

595 ] 

-

596 yield resolved 

-

597 

-

598 

-

599_FILE_BUILD_ID_RE = re.compile(rb"BuildID(?:\[\S+\])?=([A-Fa-f0-9]+)") 

-

600 

-

601 

-

602def _resolve_build_ids(elf_info: Dict[str, _ElfInfo]) -> None: 

-

603 static_cmd = ["file", "-00", "-N"] 

-

604 if detect_fakeroot(): 

-

605 static_cmd.append("--no-sandbox") 

-

606 

-

607 for cmd in xargs(static_cmd, (i.fs_path for i in elf_info.values())): 

-

608 _info(f"Looking up build-ids via: {escape_shell(*cmd)}") 

-

609 output = subprocess.check_output(cmd) 

-

610 

-

611 # Trailing "\0" gives an empty element in the end when splitting, so strip it out 

-

612 lines = output.rstrip(b"\0").split(b"\0") 

-

613 

-

614 for fs_path_b, verdict in grouper(lines, 2, incomplete="strict"): 

-

615 fs_path = fs_path_b.decode("utf-8") 

-

616 info = elf_info[fs_path] 

-

617 info.is_stripped = b"not stripped" not in verdict 

-

618 m = _FILE_BUILD_ID_RE.search(verdict) 

-

619 if m: 

-

620 info.build_id = m.group(1).decode("utf-8") 

-

621 

-

622 

-

623def _make_debug_file( 

-

624 objcopy: str, fs_path: str, build_id: str, dbgsym_fs_root: FSPath 

-

625) -> FSPath: 

-

626 dbgsym_dirname = f"./usr/lib/debug/.build-id/{build_id[0:2]}/" 

-

627 dbgsym_basename = f"{build_id[2:]}.debug" 

-

628 dbgsym_dir = dbgsym_fs_root.mkdirs(dbgsym_dirname) 

-

629 if dbgsym_basename in dbgsym_dir: 

-

630 return dbgsym_dir[dbgsym_basename] 

-

631 # objcopy is a pain and includes the basename verbatim when you do `--add-gnu-debuglink` without having an option 

-

632 # to overwrite the physical basename. So we have to ensure that the physical basename matches the installed 

-

633 # basename. 

-

634 with dbgsym_dir.add_file( 

-

635 dbgsym_basename, 

-

636 unlink_if_exists=False, 

-

637 fs_basename_matters=True, 

-

638 subdir_key="dbgsym-build-ids", 

-

639 ) as dbgsym: 

-

640 try: 

-

641 subprocess.check_call( 

-

642 [ 

-

643 objcopy, 

-

644 "--only-keep-debug", 

-

645 "--compress-debug-sections", 

-

646 fs_path, 

-

647 dbgsym.fs_path, 

-

648 ] 

-

649 ) 

-

650 except subprocess.CalledProcessError: 

-

651 full_command = ( 

-

652 f"{objcopy} --only-keep-debug --compress-debug-sections" 

-

653 f" {escape_shell(fs_path, dbgsym.fs_path)}" 

-

654 ) 

-

655 _error( 

-

656 f"Attempting to create a .debug file failed. Please review the error message from {objcopy} to" 

-

657 f" understand what went wrong. Full command was: {full_command}" 

-

658 ) 

-

659 return dbgsym 

-

660 

-

661 

-

662def _strip_binary(strip: str, options: List[str], paths: Iterable[str]) -> None: 

-

663 # We assume the paths are obtained via `p.replace_fs_path_content()`, 

-

664 # which is the case at the time of written and should remain so forever. 

-

665 it = iter(paths) 

-

666 first = next(it, None) 

-

667 if first is None: 

-

668 return 

-

669 static_cmd = [strip] 

-

670 static_cmd.extend(options) 

-

671 

-

672 for cmd in xargs(static_cmd, itertools.chain((first,), (f for f in it))): 

-

673 _info(f"Removing unnecessary ELF debug info via: {escape_shell(*cmd)}") 

-

674 try: 

-

675 subprocess.check_call( 

-

676 cmd, 

-

677 stdin=subprocess.DEVNULL, 

-

678 restore_signals=True, 

-

679 ) 

-

680 except subprocess.CalledProcessError: 

-

681 _error( 

-

682 f"Attempting to remove ELF debug info failed. Please review the error from {strip} above" 

-

683 f" understand what went wrong." 

-

684 ) 

-

685 

-

686 

-

687def _attach_debug(objcopy: str, elf_binary: VirtualPath, dbgsym: FSPath) -> None: 

-

688 dbgsym_fs_path: str 

-

689 with dbgsym.replace_fs_path_content() as dbgsym_fs_path: 

-

690 cmd = [objcopy, "--add-gnu-debuglink", dbgsym_fs_path, elf_binary.fs_path] 

-

691 print_command(*cmd) 

-

692 try: 

-

693 subprocess.check_call(cmd) 

-

694 except subprocess.CalledProcessError: 

-

695 _error( 

-

696 f"Attempting to attach ELF debug link to ELF binary failed. Please review the error from {objcopy}" 

-

697 f" above understand what went wrong." 

-

698 ) 

-

699 

-

700 

-

701def _run_dwz( 

-

702 dctrl: BinaryPackage, 

-

703 dbgsym_fs_root: FSPath, 

-

704 unstripped_elf_info: List[_ElfInfo], 

-

705) -> None: 

-

706 if not unstripped_elf_info or dctrl.is_udeb: 

-

707 return 

-

708 dwz_cmd = ["dwz"] 

-

709 dwz_ma_dir_name = f"usr/lib/debug/.dwz/{dctrl.deb_multiarch}" 

-

710 dwz_ma_basename = f"{dctrl.name}.debug" 

-

711 multifile = f"{dwz_ma_dir_name}/{dwz_ma_basename}" 

-

712 build_time_multifile = None 

-

713 if len(unstripped_elf_info) > 1: 

-

714 fs_content_dir = generated_content_dir() 

-

715 fd, build_time_multifile = mkstemp(suffix=dwz_ma_basename, dir=fs_content_dir) 

-

716 os.close(fd) 

-

717 dwz_cmd.append(f"-m{build_time_multifile}") 

-

718 dwz_cmd.append(f"-M/{multifile}") 

-

719 

-

720 # TODO: configuration for disabling multi-file and tweaking memory limits 

-

721 

-

722 dwz_cmd.extend(e.fs_path for e in unstripped_elf_info) 

-

723 

-

724 _info(f"Deduplicating ELF debug info via: {escape_shell(*dwz_cmd)}") 

-

725 try: 

-

726 subprocess.check_call(dwz_cmd) 

-

727 except subprocess.CalledProcessError: 

-

728 _error( 

-

729 "Attempting to deduplicate ELF info via dwz failed. Please review the output from dwz above" 

-

730 " to understand what went wrong." 

-

731 ) 

-

732 if build_time_multifile is not None and os.stat(build_time_multifile).st_size > 0: 

-

733 dwz_dir = dbgsym_fs_root.mkdirs(dwz_ma_dir_name) 

-

734 dwz_dir.insert_file_from_fs_path( 

-

735 dwz_ma_basename, 

-

736 build_time_multifile, 

-

737 mode=0o644, 

-

738 require_copy_on_write=False, 

-

739 follow_symlinks=False, 

-

740 ) 

-

741 

-

742 

-

743def relocate_dwarves_into_dbgsym_packages( 

-

744 dctrl: BinaryPackage, 

-

745 package_fs_root: FSPath, 

-

746 dbgsym_fs_root: VirtualPath, 

-

747) -> List[str]: 

-

748 # FIXME: hardlinks 

-

749 with _all_static_libs(package_fs_root) as all_static_files: 

-

750 if all_static_files: 

-

751 strip = dctrl.cross_command("strip") 

-

752 _strip_binary( 

-

753 strip, 

-

754 [ 

-

755 "--strip-debug", 

-

756 "--remove-section=.comment", 

-

757 "--remove-section=.note", 

-

758 "--enable-deterministic-archives", 

-

759 "-R", 

-

760 ".gnu.lto_*", 

-

761 "-R", 

-

762 ".gnu.debuglto_*", 

-

763 "-N", 

-

764 "__gnu_lto_slim", 

-

765 "-N", 

-

766 "__gnu_lto_v1", 

-

767 ], 

-

768 all_static_files, 

-

769 ) 

-

770 

-

771 with _all_elf_files(package_fs_root) as all_elf_files: 

-

772 if not all_elf_files: 

-

773 return [] 

-

774 objcopy = dctrl.cross_command("objcopy") 

-

775 strip = dctrl.cross_command("strip") 

-

776 unstripped_elf_info = list( 

-

777 e for e in all_elf_files.values() if not e.is_stripped 

-

778 ) 

-

779 

-

780 _run_dwz(dctrl, dbgsym_fs_root, unstripped_elf_info) 

-

781 

-

782 for elf_info in unstripped_elf_info: 

-

783 elf_info.dbgsym = _make_debug_file( 

-

784 objcopy, 

-

785 elf_info.fs_path, 

-

786 assume_not_none(elf_info.build_id), 

-

787 dbgsym_fs_root, 

-

788 ) 

-

789 

-

790 # Note: When run strip, we do so also on already stripped ELF binaries because that is what debhelper does! 

-

791 # Executables (defined by mode) 

-

792 _strip_binary( 

-

793 strip, 

-

794 ["--remove-section=.comment", "--remove-section=.note"], 

-

795 (i.fs_path for i in all_elf_files.values() if i.path.is_executable), 

-

796 ) 

-

797 

-

798 # Libraries (defined by mode) 

-

799 _strip_binary( 

-

800 strip, 

-

801 ["--remove-section=.comment", "--remove-section=.note", "--strip-unneeded"], 

-

802 (i.fs_path for i in all_elf_files.values() if not i.path.is_executable), 

-

803 ) 

-

804 

-

805 for elf_info in unstripped_elf_info: 

-

806 _attach_debug( 

-

807 objcopy, 

-

808 assume_not_none(elf_info.path), 

-

809 assume_not_none(elf_info.dbgsym), 

-

810 ) 

-

811 

-

812 # Set for uniqueness 

-

813 all_debug_info = sorted( 

-

814 {assume_not_none(i.build_id) for i in unstripped_elf_info} 

-

815 ) 

-

816 

-

817 dbgsym_doc_dir = dbgsym_fs_root.mkdirs("./usr/share/doc/") 

-

818 dbgsym_doc_dir.add_symlink(f"{dctrl.name}-dbgsym", dctrl.name) 

-

819 return all_debug_info 

-

820 

-

821 

-

822def run_package_processors( 

-

823 manifest: HighLevelManifest, 

-

824 package_metadata_context: PackageProcessingContext, 

-

825 fs_root: VirtualPath, 

-

826) -> None: 

-

827 pppps = manifest.plugin_provided_feature_set.package_processors_in_order() 

-

828 binary_package = package_metadata_context.binary_package 

-

829 for pppp in pppps: 

-

830 if not pppp.applies_to(binary_package): 

-

831 continue 

-

832 pppp.run_package_processor(fs_root, None, package_metadata_context) 

-

833 

-

834 

-

835def cross_package_control_files( 

-

836 package_data_table: PackageDataTable, 

-

837 manifest: HighLevelManifest, 

-

838) -> None: 

-

839 errors = [] 

-

840 combined_shlibs = ShlibsContent() 

-

841 shlibs_dir = None 

-

842 shlib_dirs: List[str] = [] 

-

843 shlibs_local = manifest.debian_dir.get("shlibs.local") 

-

844 if shlibs_local and shlibs_local.is_file: 

-

845 with shlibs_local.open() as fd: 

-

846 combined_shlibs.add_entries_from_shlibs_file(fd) 

-

847 

-

848 debputy_plugin_metadata = manifest.plugin_provided_feature_set.plugin_data[ 

-

849 "debputy" 

-

850 ] 

-

851 

-

852 for binary_package_data in package_data_table: 

-

853 binary_package = binary_package_data.binary_package 

-

854 if binary_package.is_arch_all or not binary_package.should_be_acted_on: 

-

855 continue 

-

856 control_output_dir = assume_not_none(binary_package_data.control_output_dir) 

-

857 fs_root = binary_package_data.fs_root 

-

858 package_state = manifest.package_state_for(binary_package.name) 

-

859 related_udeb_package = ( 

-

860 binary_package_data.package_metadata_context.related_udeb_package 

-

861 ) 

-

862 

-

863 udeb_package_name = related_udeb_package.name if related_udeb_package else None 

-

864 ctrl = binary_package_data.ctrl_creator.for_plugin( 

-

865 debputy_plugin_metadata, 

-

866 "compute_shlibs", 

-

867 ) 

-

868 try: 

-

869 soname_info_list = compute_shlibs( 

-

870 binary_package, 

-

871 control_output_dir, 

-

872 fs_root, 

-

873 manifest, 

-

874 udeb_package_name, 

-

875 ctrl, 

-

876 package_state.reserved_packager_provided_files, 

-

877 combined_shlibs, 

-

878 ) 

-

879 except DebputyDpkgGensymbolsError as e: 

-

880 errors.append(e.message) 

-

881 else: 

-

882 if soname_info_list: 

-

883 if shlibs_dir is None: 

-

884 shlibs_dir = generated_content_dir( 

-

885 subdir_key="_shlibs_materialization_dir" 

-

886 ) 

-

887 generate_shlib_dirs( 

-

888 binary_package, 

-

889 shlibs_dir, 

-

890 soname_info_list, 

-

891 shlib_dirs, 

-

892 ) 

-

893 if errors: 

-

894 for error in errors: 

-

895 _warn(error) 

-

896 _error("Stopping due to the errors above") 

-

897 

-

898 generated_shlibs_local = None 

-

899 if combined_shlibs: 

-

900 if shlibs_dir is None: 

-

901 shlibs_dir = generated_content_dir(subdir_key="_shlibs_materialization_dir") 

-

902 generated_shlibs_local = os.path.join(shlibs_dir, "shlibs.local") 

-

903 with open(generated_shlibs_local, "wt", encoding="utf-8") as fd: 

-

904 combined_shlibs.write_to(fd) 

-

905 _info(f"Generated {generated_shlibs_local} for dpkg-shlibdeps") 

-

906 

-

907 for binary_package_data in package_data_table: 

-

908 binary_package = binary_package_data.binary_package 

-

909 if binary_package.is_arch_all or not binary_package.should_be_acted_on: 

-

910 continue 

-

911 binary_package_data.ctrl_creator.shlibs_details = ( 

-

912 generated_shlibs_local, 

-

913 shlib_dirs, 

-

914 ) 

-

915 

-

916 

-

917def _relevant_service_definitions( 

-

918 service_rule: ServiceRule, 

-

919 service_managers: Union[List[str], FrozenSet[str]], 

-

920 by_service_manager_key: Mapping[ 

-

921 Tuple[str, str, str, str], Tuple[ServiceManagerDetails, ServiceDefinition[Any]] 

-

922 ], 

-

923 aliases: Mapping[str, Sequence[Tuple[str, str, str, str]]], 

-

924) -> Iterable[Tuple[Tuple[str, str, str, str], ServiceDefinition[Any]]]: 

-

925 as_keys = (key for key in aliases[service_rule.service]) 

-

926 

-

927 pending_queue = { 

-

928 key 

-

929 for key in as_keys 

-

930 if key in by_service_manager_key 

-

931 and service_rule.applies_to_service_manager(key[-1]) 

-

932 } 

-

933 relevant_names = {} 

-

934 seen_keys = set() 

-

935 

-

936 if not pending_queue: 

-

937 service_manager_names = ", ".join(sorted(service_managers)) 

-

938 _error( 

-

939 f"No none of the service managers ({service_manager_names}) detected a service named" 

-

940 f" {service_rule.service} (type: {service_rule.type_of_service}, scope: {service_rule.service_scope})," 

-

941 f" but the manifest definition at {service_rule.definition_source} requested that." 

-

942 ) 

-

943 

-

944 while pending_queue: 

-

945 next_key = pending_queue.pop() 

-

946 seen_keys.add(next_key) 

-

947 _, definition = by_service_manager_key[next_key] 

-

948 yield next_key, definition 

-

949 for name in definition.names: 

-

950 for target_key in aliases[name]: 

-

951 if ( 

-

952 target_key not in seen_keys 

-

953 and service_rule.applies_to_service_manager(target_key[-1]) 

-

954 ): 

-

955 pending_queue.add(target_key) 

-

956 

-

957 return relevant_names 

-

958 

-

959 

-

960def handle_service_management( 

-

961 binary_package_data: BinaryPackageData, 

-

962 manifest: HighLevelManifest, 

-

963 package_metadata_context: PackageProcessingContext, 

-

964 fs_root: VirtualPath, 

-

965 feature_set: PluginProvidedFeatureSet, 

-

966) -> None: 

-

967 

-

968 by_service_manager_key = {} 

-

969 aliases_by_name = collections.defaultdict(list) 

-

970 

-

971 state = manifest.package_state_for(binary_package_data.binary_package.name) 

-

972 all_service_managers = list(feature_set.service_managers) 

-

973 requested_service_rules = state.requested_service_rules 

-

974 for requested_service_rule in requested_service_rules: 

-

975 if not requested_service_rule.service_managers: 

-

976 continue 

-

977 for manager in requested_service_rule.service_managers: 

-

978 if manager not in feature_set.service_managers: 

-

979 # FIXME: Missing definition source; move to parsing. 

-

980 _error( 

-

981 f"Unknown service manager {manager} used at {requested_service_rule.definition_source}" 

-

982 ) 

-

983 

-

984 for service_manager_details in feature_set.service_managers.values(): 

-

985 service_registry = ServiceRegistryImpl(service_manager_details) 

-

986 service_manager_details.service_detector( 

-

987 fs_root, 

-

988 service_registry, 

-

989 package_metadata_context, 

-

990 ) 

-

991 

-

992 service_definitions = service_registry.detected_services 

-

993 if not service_definitions: 

-

994 continue 

-

995 

-

996 for plugin_provided_definition in service_definitions: 

-

997 key = ( 

-

998 plugin_provided_definition.name, 

-

999 plugin_provided_definition.type_of_service, 

-

1000 plugin_provided_definition.service_scope, 

-

1001 service_manager_details.service_manager, 

-

1002 ) 

-

1003 by_service_manager_key[key] = ( 

-

1004 service_manager_details, 

-

1005 plugin_provided_definition, 

-

1006 ) 

-

1007 

-

1008 for name in plugin_provided_definition.names: 

-

1009 aliases_by_name[name].append(key) 

-

1010 

-

1011 for requested_service_rule in requested_service_rules: 

-

1012 explicit_service_managers = requested_service_rule.service_managers is not None 

-

1013 related_service_managers = ( 

-

1014 requested_service_rule.service_managers or all_service_managers 

-

1015 ) 

-

1016 seen_service_managers = set() 

-

1017 for service_key, service_definition in _relevant_service_definitions( 

-

1018 requested_service_rule, 

-

1019 related_service_managers, 

-

1020 by_service_manager_key, 

-

1021 aliases_by_name, 

-

1022 ): 

-

1023 sm = service_key[-1] 

-

1024 seen_service_managers.add(sm) 

-

1025 by_service_manager_key[service_key] = ( 

-

1026 by_service_manager_key[service_key][0], 

-

1027 requested_service_rule.apply_to_service_definition(service_definition), 

-

1028 ) 

-

1029 if ( 

-

1030 explicit_service_managers 

-

1031 and seen_service_managers != related_service_managers 

-

1032 ): 

-

1033 missing_sms = ", ".join( 

-

1034 sorted(related_service_managers - seen_service_managers) 

-

1035 ) 

-

1036 _error( 

-

1037 f"The rule {requested_service_rule.definition_source} explicitly requested which service managers" 

-

1038 f" it should apply to. However, the following service managers did not provide a service of that" 

-

1039 f" name, type and scope: {missing_sms}. Please check the rule is correct and either provide the" 

-

1040 f" missing service or update the definition match the relevant services." 

-

1041 ) 

-

1042 

-

1043 per_service_manager = {} 

-

1044 

-

1045 for ( 

-

1046 service_manager_details, 

-

1047 plugin_provided_definition, 

-

1048 ) in by_service_manager_key.values(): 

-

1049 service_manager = service_manager_details.service_manager 

-

1050 if service_manager not in per_service_manager: 

-

1051 per_service_manager[service_manager] = ( 

-

1052 service_manager_details, 

-

1053 [plugin_provided_definition], 

-

1054 ) 

-

1055 else: 

-

1056 per_service_manager[service_manager][1].append(plugin_provided_definition) 

-

1057 

-

1058 for ( 

-

1059 service_manager_details, 

-

1060 final_service_definitions, 

-

1061 ) in per_service_manager.values(): 

-

1062 ctrl = binary_package_data.ctrl_creator.for_plugin( 

-

1063 service_manager_details.plugin_metadata, 

-

1064 service_manager_details.service_manager, 

-

1065 default_snippet_order="service", 

-

1066 ) 

-

1067 _info(f"Applying {final_service_definitions}") 

-

1068 service_manager_details.service_integrator( 

-

1069 final_service_definitions, 

-

1070 ctrl, 

-

1071 package_metadata_context, 

-

1072 ) 

-

1073 

-

1074 

-

1075def setup_control_files( 

-

1076 binary_package_data: BinaryPackageData, 

-

1077 manifest: HighLevelManifest, 

-

1078 dbgsym_fs_root: VirtualPath, 

-

1079 dbgsym_ids: List[str], 

-

1080 package_metadata_context: PackageProcessingContext, 

-

1081 *, 

-

1082 allow_ctrl_file_management: bool = True, 

-

1083) -> None: 

-

1084 binary_package = package_metadata_context.binary_package 

-

1085 control_output_dir = assume_not_none(binary_package_data.control_output_dir) 

-

1086 fs_root = binary_package_data.fs_root 

-

1087 package_state = manifest.package_state_for(binary_package.name) 

-

1088 

-

1089 feature_set: PluginProvidedFeatureSet = manifest.plugin_provided_feature_set 

-

1090 metadata_maintscript_detectors = feature_set.metadata_maintscript_detectors 

-

1091 substvars = binary_package_data.substvars 

-

1092 

-

1093 snippets = STD_CONTROL_SCRIPTS 

-

1094 generated_triggers = list(binary_package_data.ctrl_creator.generated_triggers()) 

-

1095 

-

1096 if binary_package.is_udeb: 

-

1097 # FIXME: Add missing udeb scripts 

-

1098 snippets = ["postinst"] 

-

1099 

-

1100 if allow_ctrl_file_management: 

-

1101 process_alternatives( 

-

1102 binary_package, 

-

1103 fs_root, 

-

1104 package_state.reserved_packager_provided_files, 

-

1105 package_state.maintscript_snippets, 

-

1106 ) 

-

1107 process_debconf_templates( 

-

1108 binary_package, 

-

1109 package_state.reserved_packager_provided_files, 

-

1110 package_state.maintscript_snippets, 

-

1111 substvars, 

-

1112 control_output_dir, 

-

1113 ) 

-

1114 

-

1115 handle_service_management( 

-

1116 binary_package_data, 

-

1117 manifest, 

-

1118 package_metadata_context, 

-

1119 fs_root, 

-

1120 feature_set, 

-

1121 ) 

-

1122 

-

1123 plugin_detector_definition: MetadataOrMaintscriptDetector 

-

1124 for plugin_detector_definition in itertools.chain.from_iterable( 

-

1125 metadata_maintscript_detectors.values() 

-

1126 ): 

-

1127 if not plugin_detector_definition.applies_to(binary_package): 

-

1128 continue 

-

1129 ctrl = binary_package_data.ctrl_creator.for_plugin( 

-

1130 plugin_detector_definition.plugin_metadata, 

-

1131 plugin_detector_definition.detector_id, 

-

1132 ) 

-

1133 plugin_detector_definition.run_detector( 

-

1134 fs_root, ctrl, package_metadata_context 

-

1135 ) 

-

1136 

-

1137 for script in snippets: 

-

1138 _generate_snippet( 

-

1139 control_output_dir, 

-

1140 script, 

-

1141 package_state.maintscript_snippets, 

-

1142 ) 

-

1143 

-

1144 else: 

-

1145 state = manifest.package_state_for(binary_package_data.binary_package.name) 

-

1146 if state.requested_service_rules: 

-

1147 service_source = state.requested_service_rules[0].definition_source 

-

1148 _error( 

-

1149 f"Use of service definitions (such as {service_source}) is not supported in this integration mode" 

-

1150 ) 

-

1151 for script, snippet_container in package_state.maintscript_snippets.items(): 

-

1152 for snippet in snippet_container.all_snippets(): 

-

1153 source = snippet.definition_source 

-

1154 _error( 

-

1155 f"This integration mode cannot use maintscript snippets" 

-

1156 f' (since dh_installdeb has already been called). However, "{source}" triggered' 

-

1157 f" a snippet for {script}. Please remove the offending definition if it is from" 

-

1158 f" the manifest or file a bug if it is caused by a built-in rule." 

-

1159 ) 

-

1160 

-

1161 for trigger in generated_triggers: 

-

1162 source = f"{trigger.provider.plugin_name}:{trigger.provider_source_id}" 

-

1163 _error( 

-

1164 f"This integration mode must not generate triggers" 

-

1165 f' (since dh_installdeb has already been called). However, "{source}" created' 

-

1166 f" a trigger. Please remove the offending definition if it is from" 

-

1167 f" the manifest or file a bug if it is caused by a built-in rule." 

-

1168 ) 

-

1169 

-

1170 shlibdeps_definition = [ 

-

1171 d 

-

1172 for d in metadata_maintscript_detectors["debputy"] 

-

1173 if d.detector_id == "dpkg-shlibdeps" 

-

1174 ][0] 

-

1175 

-

1176 ctrl = binary_package_data.ctrl_creator.for_plugin( 

-

1177 shlibdeps_definition.plugin_metadata, 

-

1178 shlibdeps_definition.detector_id, 

-

1179 ) 

-

1180 shlibdeps_definition.run_detector(fs_root, ctrl, package_metadata_context) 

-

1181 

-

1182 dh_staging_dir = os.path.join("debian", binary_package.name, "DEBIAN") 

-

1183 try: 

-

1184 with os.scandir(dh_staging_dir) as it: 

-

1185 existing_control_files = [ 

-

1186 f.path 

-

1187 for f in it 

-

1188 if f.is_file(follow_symlinks=False) 

-

1189 and f.name not in ("control", "md5sums") 

-

1190 ] 

-

1191 except FileNotFoundError: 

-

1192 existing_control_files = [] 

-

1193 

-

1194 if existing_control_files: 

-

1195 cmd = ["cp", "-a"] 

-

1196 cmd.extend(existing_control_files) 

-

1197 cmd.append(control_output_dir) 

-

1198 print_command(*cmd) 

-

1199 subprocess.check_call(cmd) 

-

1200 

-

1201 if binary_package.is_udeb: 

-

1202 _generate_control_files( 

-

1203 binary_package_data.source_package, 

-

1204 binary_package, 

-

1205 package_state, 

-

1206 control_output_dir, 

-

1207 fs_root, 

-

1208 substvars, 

-

1209 # We never built udebs due to #797391, so skip over this information, 

-

1210 # when creating the udeb 

-

1211 None, 

-

1212 None, 

-

1213 ) 

-

1214 return 

-

1215 

-

1216 if generated_triggers: 

-

1217 assert not allow_ctrl_file_management 

-

1218 dest_file = os.path.join(control_output_dir, "triggers") 

-

1219 with open(dest_file, "at", encoding="utf-8") as fd: 

-

1220 fd.writelines( 

-

1221 textwrap.dedent( 

-

1222 f"""\ 

-

1223 # Added by {t.provider_source_id} from {t.provider.plugin_name} 

-

1224 {t.dpkg_trigger_type} {t.dpkg_trigger_target} 

-

1225 """ 

-

1226 ) 

-

1227 for t in generated_triggers 

-

1228 ) 

-

1229 os.chmod(fd.fileno(), 0o644) 

-

1230 

-

1231 if allow_ctrl_file_management: 

-

1232 install_or_generate_conffiles( 

-

1233 binary_package, 

-

1234 control_output_dir, 

-

1235 fs_root, 

-

1236 manifest.debian_dir, 

-

1237 ) 

-

1238 

-

1239 _generate_control_files( 

-

1240 binary_package_data.source_package, 

-

1241 binary_package, 

-

1242 package_state, 

-

1243 control_output_dir, 

-

1244 fs_root, 

-

1245 substvars, 

-

1246 dbgsym_fs_root, 

-

1247 dbgsym_ids, 

-

1248 ) 

-

1249 

-

1250 

-

1251def _generate_snippet( 

-

1252 control_output_dir: str, 

-

1253 script: str, 

-

1254 maintscript_snippets: Dict[str, MaintscriptSnippetContainer], 

-

1255) -> None: 

-

1256 debputy_snippets = maintscript_snippets.get(script) 

-

1257 if debputy_snippets is None: 

-

1258 return 

-

1259 reverse = script in ("prerm", "postrm") 

-

1260 snippets = [ 

-

1261 debputy_snippets.generate_snippet(reverse=reverse), 

-

1262 debputy_snippets.generate_snippet(snippet_order="service", reverse=reverse), 

-

1263 ] 

-

1264 if reverse: 

-

1265 snippets = reversed(snippets) 

-

1266 full_content = "".join(f"{s}\n" for s in filter(None, snippets)) 

-

1267 if not full_content: 

-

1268 return 

-

1269 filename = os.path.join(control_output_dir, script) 

-

1270 with open(filename, "wt") as fd: 

-

1271 fd.write("#!/bin/sh\nset -e\n\n") 

-

1272 fd.write(full_content) 

-

1273 os.chmod(fd.fileno(), 0o755) 

-

1274 

-

1275 

-

1276def _add_conffiles( 

-

1277 conffiles_dest: str, 

-

1278 conffile_matches: Iterable[VirtualPath], 

-

1279) -> None: 

-

1280 with open(conffiles_dest, "at") as fd: 

-

1281 for conffile_match in conffile_matches: 

-

1282 conffile = conffile_match.absolute 

-

1283 assert conffile_match.is_file 

-

1284 fd.write(f"{conffile}\n") 

-

1285 if os.stat(conffiles_dest).st_size == 0: 

-

1286 os.unlink(conffiles_dest) 

-

1287 

-

1288 

-

1289def _ensure_base_substvars_defined(substvars: FlushableSubstvars) -> None: 

-

1290 for substvar in ("misc:Depends", "misc:Pre-Depends"): 

-

1291 if substvar not in substvars: 

-

1292 substvars[substvar] = "" 

-

1293 

-

1294 

-

1295def _compute_installed_size(fs_root: VirtualPath) -> int: 

-

1296 """Emulate dpkg-gencontrol's code for computing the default Installed-Size""" 

-

1297 size_in_kb = 0 

-

1298 hard_links = set() 

-

1299 for path in fs_root.all_paths(): 

-

1300 if not path.is_dir and path.has_fs_path: 

-

1301 st = path.stat() 

-

1302 if st.st_nlink > 1: 

-

1303 hl_key = (st.st_dev, st.st_ino) 

-

1304 if hl_key in hard_links: 

-

1305 continue 

-

1306 hard_links.add(hl_key) 

-

1307 path_size = (st.st_size + 1023) // 1024 

-

1308 elif path.is_symlink: 

-

1309 path_size = (len(path.readlink()) + 1023) // 1024 

-

1310 else: 

-

1311 path_size = 1 

-

1312 size_in_kb += path_size 

-

1313 return size_in_kb 

-

1314 

-

1315 

-

1316def _generate_dbgsym_control_file_if_relevant( 

-

1317 binary_package: BinaryPackage, 

-

1318 dbgsym_fs_root: VirtualPath, 

-

1319 dbgsym_root_dir: str, 

-

1320 dbgsym_ids: str, 

-

1321 multi_arch: Optional[str], 

-

1322 dctrl: str, 

-

1323 extra_common_params: Sequence[str], 

-

1324) -> None: 

-

1325 section = binary_package.archive_section 

-

1326 component = "" 

-

1327 extra_params = [] 

-

1328 if section is not None and "/" in section and not section.startswith("main/"): 

-

1329 component = section.split("/", 1)[1] + "/" 

-

1330 if multi_arch != "same": 

-

1331 extra_params.append("-UMulti-Arch") 

-

1332 extra_params.append("-UReplaces") 

-

1333 extra_params.append("-UBreaks") 

-

1334 dbgsym_control_dir = os.path.join(dbgsym_root_dir, "DEBIAN") 

-

1335 ensure_dir(dbgsym_control_dir) 

-

1336 # Pass it via cmd-line to make it more visible that we are providing the 

-

1337 # value. It also prevents the dbgsym package from picking up this value. 

-

1338 ctrl_fs_root = FSROOverlay.create_root_dir("DEBIAN", dbgsym_control_dir) 

-

1339 total_size = _compute_installed_size(dbgsym_fs_root) + _compute_installed_size( 

-

1340 ctrl_fs_root 

-

1341 ) 

-

1342 extra_params.append(f"-VInstalled-Size={total_size}") 

-

1343 extra_params.extend(extra_common_params) 

-

1344 

-

1345 package = binary_package.name 

-

1346 package_selector = ( 

-

1347 binary_package.name 

-

1348 if dctrl == "debian/control" 

-

1349 else f"{binary_package.name}-dbgsym" 

-

1350 ) 

-

1351 dpkg_cmd = [ 

-

1352 "dpkg-gencontrol", 

-

1353 f"-p{package_selector}", 

-

1354 # FIXME: Support d/<pkg>.changelog at some point. 

-

1355 "-ldebian/changelog", 

-

1356 "-T/dev/null", 

-

1357 f"-c{dctrl}", 

-

1358 f"-P{dbgsym_root_dir}", 

-

1359 f"-DPackage={package}-dbgsym", 

-

1360 "-DDepends=" + package + " (= ${binary:Version})", 

-

1361 f"-DDescription=debug symbols for {package}", 

-

1362 f"-DSection={component}debug", 

-

1363 f"-DBuild-Ids={dbgsym_ids}", 

-

1364 "-UPre-Depends", 

-

1365 "-URecommends", 

-

1366 "-USuggests", 

-

1367 "-UEnhances", 

-

1368 "-UProvides", 

-

1369 "-UEssential", 

-

1370 "-UConflicts", 

-

1371 "-DPriority=optional", 

-

1372 "-UHomepage", 

-

1373 "-UImportant", 

-

1374 "-UBuilt-Using", 

-

1375 "-UStatic-Built-Using", 

-

1376 "-DAuto-Built-Package=debug-symbols", 

-

1377 "-UProtected", 

-

1378 *extra_params, 

-

1379 ] 

-

1380 print_command(*dpkg_cmd) 

-

1381 try: 

-

1382 subprocess.check_call(dpkg_cmd) 

-

1383 except subprocess.CalledProcessError: 

-

1384 _error( 

-

1385 f"Attempting to generate DEBIAN/control file for {package}-dbgsym failed. Please review the output from " 

-

1386 " dpkg-gencontrol above to understand what went wrong." 

-

1387 ) 

-

1388 os.chmod(os.path.join(dbgsym_root_dir, "DEBIAN", "control"), 0o644) 

-

1389 

-

1390 

-

1391def _all_parent_directories_of(directories: Iterable[str]) -> Set[str]: 

-

1392 result = {"."} 

-

1393 for path in directories: 

-

1394 current = os.path.dirname(path) 

-

1395 while current and current not in result: 

-

1396 result.add(current) 

-

1397 current = os.path.dirname(current) 

-

1398 return result 

-

1399 

-

1400 

-

1401def _auto_compute_multi_arch( 

-

1402 binary_package: BinaryPackage, 

-

1403 control_output_dir: str, 

-

1404 fs_root: FSPath, 

-

1405) -> Optional[str]: 

-

1406 resolved_arch = binary_package.resolved_architecture 

-

1407 if resolved_arch == "all": 

-

1408 return None 

-

1409 if any( 

-

1410 script 

-

1411 for script in ALL_CONTROL_SCRIPTS 

-

1412 if os.path.isfile(os.path.join(control_output_dir, script)) 

-

1413 ): 

-

1414 return None 

-

1415 

-

1416 resolved_multiarch = binary_package.deb_multiarch 

-

1417 assert resolved_arch != "all" 

-

1418 acceptable_no_descend_paths = { 

-

1419 f"./usr/lib/{resolved_multiarch}", 

-

1420 f"./usr/include/{resolved_multiarch}", 

-

1421 } 

-

1422 acceptable_files = { 

-

1423 f"./usr/share/doc/{binary_package.name}/{basename}" 

-

1424 for basename in ( 

-

1425 "copyright", 

-

1426 "changelog.gz", 

-

1427 "changelog.Debian.gz", 

-

1428 f"changelog.Debian.{resolved_arch}.gz", 

-

1429 "NEWS.Debian", 

-

1430 "NEWS.Debian.gz", 

-

1431 "README.Debian", 

-

1432 "README.Debian.gz", 

-

1433 ) 

-

1434 } 

-

1435 acceptable_intermediate_dirs = _all_parent_directories_of( 

-

1436 itertools.chain(acceptable_no_descend_paths, acceptable_files) 

-

1437 ) 

-

1438 

-

1439 for fs_path, children in fs_root.walk(): 

-

1440 path = fs_path.path 

-

1441 if path in acceptable_no_descend_paths: 

-

1442 children.clear() 

-

1443 continue 

-

1444 if path in acceptable_intermediate_dirs or path in acceptable_files: 

-

1445 continue 

-

1446 return None 

-

1447 

-

1448 return "same" 

-

1449 

-

1450 

-

1451@functools.lru_cache() 

-

1452def _has_t64_enabled() -> bool: 

-

1453 try: 

-

1454 output = subprocess.check_output( 

-

1455 ["dpkg-buildflags", "--query-features", "abi"] 

-

1456 ).decode() 

-

1457 except (subprocess.CalledProcessError, FileNotFoundError): 

-

1458 return False 

-

1459 

-

1460 for stanza in Deb822.iter_paragraphs(output): 

-

1461 if stanza.get("Feature") == "time64" and stanza.get("Enabled") == "yes": 

-

1462 return True 

-

1463 return False 

-

1464 

-

1465 

-

1466def _t64_migration_substvar( 

-

1467 binary_package: BinaryPackage, 

-

1468 control_output_dir: str, 

-

1469 substvars: FlushableSubstvars, 

-

1470) -> None: 

-

1471 name = binary_package.name 

-

1472 compat_name = binary_package.fields.get("X-Time64-Compat") 

-

1473 if compat_name is None and not _T64_REGEX.match(name): 

-

1474 return 

-

1475 

-

1476 if not any( 

-

1477 os.path.isfile(os.path.join(control_output_dir, n)) 

-

1478 for n in ["symbols", "shlibs"] 

-

1479 ): 

-

1480 return 

-

1481 

-

1482 if compat_name is None: 

-

1483 compat_name = name.replace("t64", "", 1) 

-

1484 if compat_name == name: 

-

1485 raise AssertionError( 

-

1486 f"Failed to derive a t64 compat name for {name}. Please file a bug against debputy." 

-

1487 " As a work around, you can explicitly provide a X-Time64-Compat header in debian/control" 

-

1488 " where you specify the desired compat name." 

-

1489 ) 

-

1490 

-

1491 arch_bits = binary_package.package_deb_architecture_variable("ARCH_BITS") 

-

1492 

-

1493 if arch_bits != "32" or not _has_t64_enabled(): 

-

1494 substvars.add_dependency( 

-

1495 _T64_PROVIDES, 

-

1496 f"{compat_name} (= ${{binary:Version}})", 

-

1497 ) 

-

1498 elif _T64_PROVIDES not in substvars: 

-

1499 substvars[_T64_PROVIDES] = "" 

-

1500 

-

1501 

-

1502@functools.lru_cache() 

-

1503def dpkg_field_list_pkg_dep() -> Sequence[str]: 

-

1504 try: 

-

1505 output = subprocess.check_output( 

-

1506 [ 

-

1507 "perl", 

-

1508 "-MDpkg::Control::Fields", 

-

1509 "-e", 

-

1510 r'print "$_\n" for field_list_pkg_dep', 

-

1511 ] 

-

1512 ) 

-

1513 except (FileNotFoundError, subprocess.CalledProcessError): 

-

1514 _error("Could not run perl -MDpkg::Control::Fields to get a list of fields") 

-

1515 return output.decode("utf-8").splitlines(keepends=False) 

-

1516 

-

1517 

-

1518def _handle_relationship_substvars( 

-

1519 source: SourcePackage, 

-

1520 dctrl_file: BinaryPackage, 

-

1521 substvars: FlushableSubstvars, 

-

1522 has_dbgsym: bool, 

-

1523) -> Optional[str]: 

-

1524 relationship_fields = dpkg_field_list_pkg_dep() 

-

1525 relationship_fields_lc = frozenset(x.lower() for x in relationship_fields) 

-

1526 substvar_fields = collections.defaultdict(list) 

-

1527 needs_dbgsym_stanza = False 

-

1528 for substvar_name, substvar in substvars.as_substvar.items(): 

-

1529 if ":" not in substvar_name: 

-

1530 continue 

-

1531 if substvar.assignment_operator in ("$=", "!="): 

-

1532 # Will create incorrect results if there is a dbgsym and we do nothing 

-

1533 needs_dbgsym_stanza = True 

-

1534 

-

1535 if substvar.assignment_operator == "$=": 

-

1536 # Automatically handled; no need for manual merging. 

-

1537 continue 

-

1538 _, field = substvar_name.rsplit(":", 1) 

-

1539 field_lc = field.lower() 

-

1540 if field_lc not in relationship_fields_lc: 

-

1541 continue 

-

1542 substvar_fields[field_lc].append("${" + substvar_name + "}") 

-

1543 

-

1544 if not has_dbgsym: 

-

1545 needs_dbgsym_stanza = False 

-

1546 

-

1547 if not substvar_fields and not needs_dbgsym_stanza: 

-

1548 return None 

-

1549 

-

1550 replacement_stanza = debian.deb822.Deb822(dctrl_file.fields) 

-

1551 

-

1552 for field_name in relationship_fields: 

-

1553 field_name_lc = field_name.lower() 

-

1554 addendum = substvar_fields.get(field_name_lc) 

-

1555 if addendum is None: 

-

1556 # No merging required 

-

1557 continue 

-

1558 substvars_part = ", ".join(addendum) 

-

1559 existing_value = replacement_stanza.get(field_name) 

-

1560 

-

1561 if existing_value is None or existing_value.isspace(): 

-

1562 final_value = substvars_part 

-

1563 else: 

-

1564 existing_value = existing_value.rstrip().rstrip(",") 

-

1565 final_value = f"{existing_value}, {substvars_part}" 

-

1566 replacement_stanza[field_name] = final_value 

-

1567 

-

1568 tmpdir = generated_content_dir(package=dctrl_file) 

-

1569 with tempfile.NamedTemporaryFile( 

-

1570 mode="wb", 

-

1571 dir=tmpdir, 

-

1572 suffix="__DEBIAN_control", 

-

1573 delete=False, 

-

1574 ) as fd: 

-

1575 try: 

-

1576 cast("Any", source.fields).dump(fd) 

-

1577 except AttributeError: 

-

1578 debian.deb822.Deb822(source.fields).dump(fd) 

-

1579 fd.write(b"\n") 

-

1580 replacement_stanza.dump(fd) 

-

1581 

-

1582 if has_dbgsym: 

-

1583 # Minimal stanza to avoid substvars warnings. Most fields are still set 

-

1584 # via -D. 

-

1585 dbgsym_stanza = Deb822() 

-

1586 dbgsym_stanza["Package"] = f"{dctrl_file.name}-dbgsym" 

-

1587 dbgsym_stanza["Architecture"] = dctrl_file.fields["Architecture"] 

-

1588 dbgsym_stanza["Description"] = f"debug symbols for {dctrl_file.name}" 

-

1589 fd.write(b"\n") 

-

1590 dbgsym_stanza.dump(fd) 

-

1591 

-

1592 return fd.name 

-

1593 

-

1594 

-

1595def _generate_control_files( 

-

1596 source_package: SourcePackage, 

-

1597 binary_package: BinaryPackage, 

-

1598 package_state: PackageTransformationDefinition, 

-

1599 control_output_dir: str, 

-

1600 fs_root: FSPath, 

-

1601 substvars: FlushableSubstvars, 

-

1602 dbgsym_root_fs: Optional[VirtualPath], 

-

1603 dbgsym_build_ids: Optional[List[str]], 

-

1604) -> None: 

-

1605 package = binary_package.name 

-

1606 extra_common_params = [] 

-

1607 extra_params_specific = [] 

-

1608 _ensure_base_substvars_defined(substvars) 

-

1609 if "Installed-Size" not in substvars: 

-

1610 # Pass it via cmd-line to make it more visible that we are providing the 

-

1611 # value. It also prevents the dbgsym package from picking up this value. 

-

1612 ctrl_fs_root = FSROOverlay.create_root_dir("DEBIAN", control_output_dir) 

-

1613 total_size = _compute_installed_size(fs_root) + _compute_installed_size( 

-

1614 ctrl_fs_root 

-

1615 ) 

-

1616 extra_params_specific.append(f"-VInstalled-Size={total_size}") 

-

1617 

-

1618 ma_value = binary_package.fields.get("Multi-Arch") 

-

1619 if not binary_package.is_udeb and ma_value is None: 

-

1620 ma_value = _auto_compute_multi_arch(binary_package, control_output_dir, fs_root) 

-

1621 if ma_value is not None: 

-

1622 _info( 

-

1623 f'The package "{binary_package.name}" looks like it should be "Multi-Arch: {ma_value}" based' 

-

1624 ' on the contents and there is no explicit "Multi-Arch" field. Setting the Multi-Arch field' 

-

1625 ' accordingly in the binary. If this auto-correction is wrong, please add "Multi-Arch: no" to the' 

-

1626 ' relevant part of "debian/control" to disable this feature.' 

-

1627 ) 

-

1628 # We want this to apply to the `-dbgsym` package as well to avoid 

-

1629 # lintian `debug-package-for-multi-arch-same-pkg-not-coinstallable` 

-

1630 extra_common_params.append(f"-DMulti-Arch={ma_value}") 

-

1631 elif ma_value == "no": 

-

1632 extra_common_params.append("-UMulti-Arch") 

-

1633 

-

1634 dbgsym_root_dir = dhe_dbgsym_root_dir(binary_package) 

-

1635 dbgsym_ids = " ".join(dbgsym_build_ids) if dbgsym_build_ids else "" 

-

1636 if package_state.binary_version is not None: 

-

1637 extra_common_params.append(f"-v{package_state.binary_version}") 

-

1638 

-

1639 _t64_migration_substvar(binary_package, control_output_dir, substvars) 

-

1640 

-

1641 with substvars.flush() as flushed_substvars: 

-

1642 has_dbgsym = dbgsym_root_fs is not None and any( 

-

1643 f for f in dbgsym_root_fs.all_paths() if f.is_file 

-

1644 ) 

-

1645 dctrl_file = _handle_relationship_substvars( 

-

1646 source_package, 

-

1647 binary_package, 

-

1648 substvars, 

-

1649 has_dbgsym, 

-

1650 ) 

-

1651 if dctrl_file is None: 

-

1652 dctrl_file = "debian/control" 

-

1653 

-

1654 if has_dbgsym: 

-

1655 _generate_dbgsym_control_file_if_relevant( 

-

1656 binary_package, 

-

1657 dbgsym_root_fs, 

-

1658 dbgsym_root_dir, 

-

1659 dbgsym_ids, 

-

1660 ma_value, 

-

1661 dctrl_file, 

-

1662 extra_common_params, 

-

1663 ) 

-

1664 generate_md5sums_file( 

-

1665 os.path.join(dbgsym_root_dir, "DEBIAN"), 

-

1666 dbgsym_root_fs, 

-

1667 ) 

-

1668 elif dbgsym_ids: 

-

1669 extra_common_params.append(f"-DBuild-Ids={dbgsym_ids}") 

-

1670 

-

1671 ctrl_file = os.path.join(control_output_dir, "control") 

-

1672 dpkg_cmd = [ 

-

1673 "dpkg-gencontrol", 

-

1674 f"-p{package}", 

-

1675 # FIXME: Support d/<pkg>.changelog at some point. 

-

1676 "-ldebian/changelog", 

-

1677 f"-c{dctrl_file}", 

-

1678 f"-T{flushed_substvars}", 

-

1679 f"-O{ctrl_file}", 

-

1680 f"-P{control_output_dir}", 

-

1681 *extra_common_params, 

-

1682 *extra_params_specific, 

-

1683 ] 

-

1684 print_command(*dpkg_cmd) 

-

1685 try: 

-

1686 subprocess.check_call(dpkg_cmd) 

-

1687 except subprocess.CalledProcessError: 

-

1688 _error( 

-

1689 f"Attempting to generate DEBIAN/control file for {package} failed. Please review the output from " 

-

1690 " dpkg-gencontrol above to understand what went wrong." 

-

1691 ) 

-

1692 os.chmod(ctrl_file, 0o644) 

-

1693 

-

1694 if not binary_package.is_udeb: 

-

1695 generate_md5sums_file(control_output_dir, fs_root) 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_debhelper_emulation_py.html b/coverage-report/d_267b6307937f1878_debhelper_emulation_py.html deleted file mode 100644 index 735f581..0000000 --- a/coverage-report/d_267b6307937f1878_debhelper_emulation_py.html +++ /dev/null @@ -1,368 +0,0 @@ - - - - - Coverage for src/debputy/debhelper_emulation.py: 73% - - - - - -
-
-

- Coverage for src/debputy/debhelper_emulation.py: - 73% -

- -

- 143 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2import os.path 

-

3import re 

-

4import shutil 

-

5from re import Match 

-

6from typing import ( 

-

7 Optional, 

-

8 Callable, 

-

9 Union, 

-

10 Iterable, 

-

11 Tuple, 

-

12 Sequence, 

-

13 cast, 

-

14 Mapping, 

-

15 Any, 

-

16 Set, 

-

17 List, 

-

18) 

-

19 

-

20from debputy.packages import BinaryPackage 

-

21from debputy.plugin.api import VirtualPath 

-

22from debputy.substitution import Substitution 

-

23from debputy.util import ensure_dir, print_command, _error 

-

24 

-

25SnippetReplacement = Union[str, Callable[[str], str]] 

-

26MAINTSCRIPT_TOKEN_NAME_PATTERN = r"[A-Za-z0-9_.+]+" 

-

27MAINTSCRIPT_TOKEN_NAME_REGEX = re.compile(MAINTSCRIPT_TOKEN_NAME_PATTERN) 

-

28MAINTSCRIPT_TOKEN_REGEX = re.compile(f"#({MAINTSCRIPT_TOKEN_NAME_PATTERN})#") 

-

29_ARCH_FILTER_START = re.compile(r"^\s*(\[([^]]*)])[ \t]+") 

-

30_ARCH_FILTER_END = re.compile(r"\s+(\[([^]]*)])\s*$") 

-

31_BUILD_PROFILE_FILTER = re.compile(r"(<([^>]*)>(?:\s+<([^>]*)>)*)") 

-

32 

-

33 

-

34class CannotEmulateExecutableDHConfigFile(Exception): 

-

35 def message(self) -> str: 

-

36 return cast("str", self.args[0]) 

-

37 

-

38 def config_file(self) -> VirtualPath: 

-

39 return cast("VirtualPath", self.args[1]) 

-

40 

-

41 

-

42@dataclasses.dataclass(slots=True, frozen=True) 

-

43class DHConfigFileLine: 

-

44 config_file: VirtualPath 

-

45 line_no: int 

-

46 executable_config: bool 

-

47 original_line: str 

-

48 tokens: Sequence[str] 

-

49 arch_filter: Optional[str] 

-

50 build_profile_filter: Optional[str] 

-

51 

-

52 def conditional_key(self) -> Tuple[str, ...]: 

-

53 k = [] 

-

54 if self.arch_filter is not None: 

-

55 k.append("arch") 

-

56 k.append(self.arch_filter) 

-

57 if self.build_profile_filter is not None: 

-

58 k.append("build-profiles") 

-

59 k.append(self.build_profile_filter) 

-

60 return tuple(k) 

-

61 

-

62 def conditional(self) -> Optional[Mapping[str, Any]]: 

-

63 filters = [] 

-

64 if self.arch_filter is not None: 

-

65 filters.append({"arch-matches": self.arch_filter}) 

-

66 if self.build_profile_filter is not None: 

-

67 filters.append({"build-profiles-matches": self.build_profile_filter}) 

-

68 if not filters: 

-

69 return None 

-

70 if len(filters) == 1: 

-

71 return filters[0] 

-

72 return {"all-of": filters} 

-

73 

-

74 

-

75def dhe_dbgsym_root_dir(binary_package: BinaryPackage) -> str: 

-

76 return os.path.join("debian", ".debhelper", binary_package.name, "dbgsym-root") 

-

77 

-

78 

-

79def read_dbgsym_file(binary_package: BinaryPackage) -> List[str]: 

-

80 dbgsym_id_file = os.path.join( 

-

81 "debian", ".debhelper", binary_package.name, "dbgsym-build-ids" 

-

82 ) 

-

83 try: 

-

84 with open(dbgsym_id_file, "rt", encoding="utf-8") as fd: 

-

85 return fd.read().split() 

-

86 except FileNotFoundError: 

-

87 return [] 

-

88 

-

89 

-

90def assert_no_dbgsym_migration(binary_package: BinaryPackage) -> None: 

-

91 dbgsym_migration_file = os.path.join( 

-

92 "debian", ".debhelper", binary_package.name, "dbgsym-migration" 

-

93 ) 

-

94 if os.path.lexists(dbgsym_migration_file): 

-

95 _error( 

-

96 "Sorry, debputy does not support dh_strip --dbgsym-migration feature. Please either finish the" 

-

97 " migration first or migrate to debputy later" 

-

98 ) 

-

99 

-

100 

-

101def _prune_match( 

-

102 line: str, 

-

103 match: Optional[Match[str]], 

-

104 match_mapper: Optional[Callable[[Match[str]], str]] = None, 

-

105) -> Tuple[str, Optional[str]]: 

-

106 if match is None: 

-

107 return line, None 

-

108 s, e = match.span() 

-

109 if match_mapper: 

-

110 matched_part = match_mapper(match) 

-

111 else: 

-

112 matched_part = line[s:e] 

-

113 # We prune exactly the matched part and assume the regexes leaves behind spaces if they were important. 

-

114 line = line[:s] + line[e:] 

-

115 # One special-case, if the match is at the beginning or end, then we can safely discard left 

-

116 # over whitespace. 

-

117 return line.strip(), matched_part 

-

118 

-

119 

-

120def dhe_filedoublearray( 

-

121 config_file: VirtualPath, 

-

122 substitution: Substitution, 

-

123 *, 

-

124 allow_dh_exec_rename: bool = False, 

-

125) -> Iterable[DHConfigFileLine]: 

-

126 with config_file.open() as fd: 

-

127 is_executable = config_file.is_executable 

-

128 for line_no, orig_line in enumerate(fd, start=1): 

-

129 arch_filter = None 

-

130 build_profile_filter = None 

-

131 if ( 131 ↛ 138line 131 didn't jump to line 138

-

132 line_no == 1 

-

133 and is_executable 

-

134 and not orig_line.startswith( 

-

135 ("#!/usr/bin/dh-exec", "#! /usr/bin/dh-exec") 

-

136 ) 

-

137 ): 

-

138 raise CannotEmulateExecutableDHConfigFile( 

-

139 "Only #!/usr/bin/dh-exec based executables can be emulated", 

-

140 config_file, 

-

141 ) 

-

142 orig_line = orig_line.rstrip("\n") 

-

143 line = orig_line.strip() 

-

144 if not line or line.startswith("#"): 

-

145 continue 

-

146 if is_executable: 

-

147 if "=>" in line and not allow_dh_exec_rename: 147 ↛ 148line 147 didn't jump to line 148, because the condition on line 147 was never true

-

148 raise CannotEmulateExecutableDHConfigFile( 

-

149 'Cannot emulate dh-exec\'s "=>" feature to rename files for the concrete file', 

-

150 config_file, 

-

151 ) 

-

152 line, build_profile_filter = _prune_match( 

-

153 line, 

-

154 _BUILD_PROFILE_FILTER.search(line), 

-

155 ) 

-

156 line, arch_filter = _prune_match( 

-

157 line, 

-

158 _ARCH_FILTER_START.search(line) or _ARCH_FILTER_END.search(line), 

-

159 # Remove the enclosing [] 

-

160 lambda m: m.group(1)[1:-1].strip(), 

-

161 ) 

-

162 

-

163 parts = tuple( 

-

164 substitution.substitute( 

-

165 w, f'{config_file.path} line {line_no} token "{w}"' 

-

166 ) 

-

167 for w in line.split() 

-

168 ) 

-

169 yield DHConfigFileLine( 

-

170 config_file, 

-

171 line_no, 

-

172 is_executable, 

-

173 orig_line, 

-

174 parts, 

-

175 arch_filter, 

-

176 build_profile_filter, 

-

177 ) 

-

178 

-

179 

-

180def dhe_pkgfile( 

-

181 debian_dir: VirtualPath, 

-

182 binary_package: BinaryPackage, 

-

183 basename: str, 

-

184 always_fallback_to_packageless_variant: bool = False, 

-

185 bug_950723_prefix_matching: bool = False, 

-

186) -> Optional[VirtualPath]: 

-

187 # TODO: Architecture specific files 

-

188 maybe_at_suffix = "@" if bug_950723_prefix_matching else "" 

-

189 possible_names = [f"{binary_package.name}{maybe_at_suffix}.{basename}"] 

-

190 if binary_package.is_main_package or always_fallback_to_packageless_variant: 190 ↛ 195line 190 didn't jump to line 195, because the condition on line 190 was never false

-

191 possible_names.append( 

-

192 f"{basename}@" if bug_950723_prefix_matching else basename 

-

193 ) 

-

194 

-

195 for name in possible_names: 

-

196 match = debian_dir.get(name) 

-

197 if match is not None and not match.is_dir: 

-

198 return match 

-

199 return None 

-

200 

-

201 

-

202def dhe_pkgdir( 

-

203 debian_dir: VirtualPath, 

-

204 binary_package: BinaryPackage, 

-

205 basename: str, 

-

206) -> Optional[VirtualPath]: 

-

207 possible_names = [f"{binary_package.name}.{basename}"] 

-

208 if binary_package.is_main_package: 

-

209 possible_names.append(basename) 

-

210 

-

211 for name in possible_names: 

-

212 match = debian_dir.get(name) 

-

213 if match is not None and match.is_dir: 

-

214 return match 

-

215 return None 

-

216 

-

217 

-

218def dhe_install_pkg_file_as_ctrl_file_if_present( 

-

219 debian_dir: VirtualPath, 

-

220 binary_package: BinaryPackage, 

-

221 basename: str, 

-

222 control_output_dir: str, 

-

223 mode: int, 

-

224) -> None: 

-

225 source = dhe_pkgfile(debian_dir, binary_package, basename) 

-

226 if source is None: 

-

227 return 

-

228 ensure_dir(control_output_dir) 

-

229 dhe_install_path(source.fs_path, os.path.join(control_output_dir, basename), mode) 

-

230 

-

231 

-

232def dhe_install_path(source: str, dest: str, mode: int) -> None: 

-

233 # TODO: "install -p -mXXXX foo bar" silently discards broken 

-

234 # symlinks to install the file in place. (#868204) 

-

235 print_command("install", "-p", f"-m{oct(mode)[2:]}", source, dest) 

-

236 shutil.copyfile(source, dest) 

-

237 os.chmod(dest, mode) 

-

238 

-

239 

-

240_FIND_DH_WITH = re.compile(r"--with(?:\s+|=)(\S+)") 

-

241_DEP_REGEX = re.compile("^([a-z0-9][-+.a-z0-9]+)", re.ASCII) 

-

242 

-

243 

-

244def parse_drules_for_addons(lines: Iterable[str], sequences: Set[str]) -> None: 

-

245 for line in lines: 

-

246 if not line.startswith("\tdh "): 

-

247 continue 

-

248 for match in _FIND_DH_WITH.finditer(line): 

-

249 sequence_def = match.group(1) 

-

250 sequences.update(sequence_def.split(",")) 

-

251 

-

252 

-

253def extract_dh_addons_from_control( 

-

254 source_paragraph: Mapping[str, str], 

-

255 sequences: Set[str], 

-

256) -> None: 

-

257 for f in ("Build-Depends", "Build-Depends-Indep", "Build-Depends-Arch"): 

-

258 field = source_paragraph.get(f) 

-

259 if not field: 

-

260 continue 

-

261 

-

262 for dep_clause in (d.strip() for d in field.split(",")): 

-

263 match = _DEP_REGEX.match(dep_clause.strip()) 

-

264 if not match: 

-

265 continue 

-

266 dep = match.group(1) 

-

267 if not dep.startswith("dh-sequence-"): 

-

268 continue 

-

269 sequences.add(dep[12:]) 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_elf_util_py.html b/coverage-report/d_267b6307937f1878_elf_util_py.html deleted file mode 100644 index 31c29a0..0000000 --- a/coverage-report/d_267b6307937f1878_elf_util_py.html +++ /dev/null @@ -1,307 +0,0 @@ - - - - - Coverage for src/debputy/elf_util.py: 76% - - - - - -
-
-

- Coverage for src/debputy/elf_util.py: - 76% -

- -

- 100 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import io 

-

2import os 

-

3import struct 

-

4from typing import List, Optional, Callable, Tuple, Iterable 

-

5 

-

6from debputy.filesystem_scan import FSPath 

-

7from debputy.plugin.api import VirtualPath 

-

8 

-

9ELF_HEADER_SIZE32 = 136 

-

10ELF_HEADER_SIZE64 = 232 

-

11ELF_MAGIC = b"\x7fELF" 

-

12ELF_VERSION = 0x00000001 

-

13ELF_ENDIAN_LE = 0x01 

-

14ELF_ENDIAN_BE = 0x02 

-

15ELF_TYPE_EXECUTABLE = 0x0002 

-

16ELF_TYPE_SHARED_OBJECT = 0x0003 

-

17 

-

18ELF_LINKING_TYPE_ANY = None 

-

19ELF_LINKING_TYPE_DYNAMIC = True 

-

20ELF_LINKING_TYPE_STATIC = False 

-

21 

-

22ELF_EI_ELFCLASS32 = 1 

-

23ELF_EI_ELFCLASS64 = 2 

-

24 

-

25ELF_PT_DYNAMIC = 2 

-

26 

-

27ELF_EI_NIDENT = 0x10 

-

28 

-

29# ELF header format: 

-

30# typedef struct { 

-

31# unsigned char e_ident[EI_NIDENT]; # <-- 16 / 0x10 bytes 

-

32# uint16_t e_type; 

-

33# uint16_t e_machine; 

-

34# uint32_t e_version; 

-

35# ElfN_Addr e_entry; 

-

36# ElfN_Off e_phoff; 

-

37# ElfN_Off e_shoff; 

-

38# uint32_t e_flags; 

-

39# uint16_t e_ehsize; 

-

40# uint16_t e_phentsize; 

-

41# uint16_t e_phnum; 

-

42# uint16_t e_shentsize; 

-

43# uint16_t e_shnum; 

-

44# uint16_t e_shstrndx; 

-

45# } ElfN_Ehdr; 

-

46 

-

47 

-

48class IncompleteFileError(RuntimeError): 

-

49 pass 

-

50 

-

51 

-

52def is_so_or_exec_elf_file( 

-

53 path: VirtualPath, 

-

54 *, 

-

55 assert_linking_type: Optional[bool] = ELF_LINKING_TYPE_ANY, 

-

56) -> bool: 

-

57 is_elf, linking_type = _read_elf_file( 

-

58 path, 

-

59 determine_linking_type=assert_linking_type is not None, 

-

60 ) 

-

61 return is_elf and ( 

-

62 assert_linking_type is ELF_LINKING_TYPE_ANY 

-

63 or assert_linking_type == linking_type 

-

64 ) 

-

65 

-

66 

-

67def _read_elf_file( 

-

68 path: VirtualPath, 

-

69 *, 

-

70 determine_linking_type: bool = False, 

-

71) -> Tuple[bool, Optional[bool]]: 

-

72 buffer_size = 4096 

-

73 fd_buffer = bytearray(buffer_size) 

-

74 linking_type = None 

-

75 fd: io.BufferedReader 

-

76 with path.open(byte_io=True, buffering=io.DEFAULT_BUFFER_SIZE) as fd: 

-

77 len_elf_header_raw = fd.readinto(fd_buffer) 

-

78 if ( 

-

79 not fd_buffer 

-

80 or len_elf_header_raw < ELF_HEADER_SIZE32 

-

81 or not fd_buffer.startswith(ELF_MAGIC) 

-

82 ): 

-

83 return False, None 

-

84 

-

85 elf_ei_class = fd_buffer[4] 

-

86 endian_raw = fd_buffer[5] 

-

87 if endian_raw == ELF_ENDIAN_LE: 87 ↛ 89line 87 didn't jump to line 89, because the condition on line 87 was never false

-

88 endian = "<" 

-

89 elif endian_raw == ELF_ENDIAN_BE: 

-

90 endian = ">" 

-

91 else: 

-

92 return False, None 

-

93 

-

94 if elf_ei_class == ELF_EI_ELFCLASS64: 94 ↛ 100line 94 didn't jump to line 100, because the condition on line 94 was never false

-

95 offset_size = "Q" 

-

96 # We know it needs to be a 64bit ELF, then the header must be 

-

97 # large enough for that. 

-

98 if len_elf_header_raw < ELF_HEADER_SIZE64: 98 ↛ 99line 98 didn't jump to line 99, because the condition on line 98 was never true

-

99 return False, None 

-

100 elif elf_ei_class == ELF_EI_ELFCLASS32: 

-

101 offset_size = "L" 

-

102 else: 

-

103 return False, None 

-

104 

-

105 elf_type, _elf_machine, elf_version = struct.unpack_from( 

-

106 f"{endian}HHL", fd_buffer, offset=ELF_EI_NIDENT 

-

107 ) 

-

108 if elf_version != ELF_VERSION: 108 ↛ 109line 108 didn't jump to line 109, because the condition on line 108 was never true

-

109 return False, None 

-

110 if elf_type not in (ELF_TYPE_EXECUTABLE, ELF_TYPE_SHARED_OBJECT): 110 ↛ 111line 110 didn't jump to line 111, because the condition on line 110 was never true

-

111 return False, None 

-

112 

-

113 if determine_linking_type: 113 ↛ 76line 113 didn't jump to line 76

-

114 linking_type = _determine_elf_linking_type( 

-

115 fd, fd_buffer, endian, offset_size 

-

116 ) 

-

117 if linking_type is None: 117 ↛ 118line 117 didn't jump to line 118, because the condition on line 117 was never true

-

118 return False, None 

-

119 

-

120 return True, linking_type 

-

121 

-

122 

-

123def _determine_elf_linking_type(fd, fd_buffer, endian, offset_size) -> Optional[bool]: 

-

124 # To check the linking, we look for a DYNAMICALLY program header 

-

125 # In other words, we assume static linking by default. 

-

126 

-

127 linking_type = ELF_LINKING_TYPE_STATIC 

-

128 # To do that, we need to read a bit more of the ELF header to 

-

129 # locate the Program header table. 

-

130 # 

-

131 # Reading - in order at offset 0x18: 

-

132 # * e_entry (ignored) 

-

133 # * e_phoff 

-

134 # * e_shoff (ignored) 

-

135 # * e_flags (ignored) 

-

136 # * e_ehsize (ignored) 

-

137 # * e_phentsize 

-

138 # * e_phnum 

-

139 _, e_phoff, _, _, _, e_phentsize, e_phnum = struct.unpack_from( 

-

140 f"{endian}{offset_size}{offset_size}{offset_size}LHHH", 

-

141 fd_buffer, 

-

142 offset=ELF_EI_NIDENT + 8, 

-

143 ) 

-

144 

-

145 # man 5 elf suggests that Program headers can be absent. If so, 

-

146 # e_phnum will be zero - but we assume the same for e_phentsize. 

-

147 if e_phnum == 0: 147 ↛ 148line 147 didn't jump to line 148, because the condition on line 147 was never true

-

148 return linking_type 

-

149 

-

150 # Program headers must be at least 4 bytes for this code to do 

-

151 # anything sanely. In practise, it must be larger than that 

-

152 # as well. Accordingly, at best this is a corrupted ELF file. 

-

153 if e_phentsize < 4: 153 ↛ 154line 153 didn't jump to line 154, because the condition on line 153 was never true

-

154 return None 

-

155 

-

156 fd.seek(e_phoff, os.SEEK_SET) 

-

157 unpack_format = f"{endian}L" 

-

158 try: 

-

159 for program_header_raw in _read_bytes_iteratively(fd, e_phentsize, e_phnum): 159 ↛ 167line 159 didn't jump to line 167, because the loop on line 159 didn't complete

-

160 p_type = struct.unpack_from(unpack_format, program_header_raw)[0] 

-

161 if p_type == ELF_PT_DYNAMIC: 

-

162 linking_type = ELF_LINKING_TYPE_DYNAMIC 

-

163 break 

-

164 except IncompleteFileError: 

-

165 return None 

-

166 

-

167 return linking_type 

-

168 

-

169 

-

170def _read_bytes_iteratively( 

-

171 fd: io.BufferedReader, 

-

172 object_size: int, 

-

173 object_count: int, 

-

174) -> Iterable[bytes]: 

-

175 total_size = object_size * object_count 

-

176 bytes_remaining = total_size 

-

177 # FIXME: improve this to read larger chunks and yield them one-by-one 

-

178 byte_buffer = bytearray(object_size) 

-

179 

-

180 while bytes_remaining > 0: 180 ↛ 187line 180 didn't jump to line 187, because the condition on line 180 was never false

-

181 n = fd.readinto(byte_buffer) 

-

182 if n != object_size: 182 ↛ 183line 182 didn't jump to line 183, because the condition on line 182 was never true

-

183 break 

-

184 bytes_remaining -= n 

-

185 yield byte_buffer 

-

186 

-

187 if bytes_remaining: 

-

188 raise IncompleteFileError() 

-

189 

-

190 

-

191def find_all_elf_files( 

-

192 fs_root: VirtualPath, 

-

193 *, 

-

194 walk_filter: Optional[Callable[[VirtualPath, List[VirtualPath]], bool]] = None, 

-

195 with_linking_type: Optional[bool] = ELF_LINKING_TYPE_ANY, 

-

196) -> List[VirtualPath]: 

-

197 matches: List[VirtualPath] = [] 

-

198 # FIXME: Implementation detail that fs_root is always `FSPath` and has `.walk()` 

-

199 assert isinstance(fs_root, FSPath) 

-

200 for path, children in fs_root.walk(): 

-

201 if walk_filter is not None and not walk_filter(path, children): 

-

202 continue 

-

203 if not path.is_file or path.size < ELF_HEADER_SIZE32: 

-

204 continue 

-

205 if not is_so_or_exec_elf_file(path, assert_linking_type=with_linking_type): 

-

206 continue 

-

207 matches.append(path) 

-

208 return matches 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_exceptions_py.html b/coverage-report/d_267b6307937f1878_exceptions_py.html deleted file mode 100644 index 7d38b8d..0000000 --- a/coverage-report/d_267b6307937f1878_exceptions_py.html +++ /dev/null @@ -1,189 +0,0 @@ - - - - - Coverage for src/debputy/exceptions.py: 92% - - - - - -
-
-

- Coverage for src/debputy/exceptions.py: - 92% -

- -

- 50 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from typing import cast, TYPE_CHECKING 

-

2 

-

3if TYPE_CHECKING: 

-

4 from debputy.plugin.api.impl_types import DebputyPluginMetadata 

-

5 

-

6 

-

7class DebputyRuntimeError(RuntimeError): 

-

8 @property 

-

9 def message(self) -> str: 

-

10 return cast("str", self.args[0]) 

-

11 

-

12 

-

13class DebputySubstitutionError(DebputyRuntimeError): 

-

14 pass 

-

15 

-

16 

-

17class DebputyManifestVariableRequiresDebianDirError(DebputySubstitutionError): 

-

18 pass 

-

19 

-

20 

-

21class DebputyDpkgGensymbolsError(DebputyRuntimeError): 

-

22 pass 

-

23 

-

24 

-

25class SymlinkLoopError(ValueError): 

-

26 @property 

-

27 def message(self) -> str: 

-

28 return cast("str", self.args[0]) 

-

29 

-

30 

-

31class PureVirtualPathError(TypeError): 

-

32 @property 

-

33 def message(self) -> str: 

-

34 return cast("str", self.args[0]) 

-

35 

-

36 

-

37class TestPathWithNonExistentFSPathError(TypeError): 

-

38 @property 

-

39 def message(self) -> str: 

-

40 return cast("str", self.args[0]) 

-

41 

-

42 

-

43class DebputyFSError(DebputyRuntimeError): 

-

44 pass 

-

45 

-

46 

-

47class DebputyFSIsROError(DebputyFSError): 

-

48 pass 

-

49 

-

50 

-

51class PluginBaseError(DebputyRuntimeError): 

-

52 pass 

-

53 

-

54 

-

55class DebputyPluginRuntimeError(PluginBaseError): 

-

56 pass 

-

57 

-

58 

-

59class PluginNotFoundError(PluginBaseError): 

-

60 pass 

-

61 

-

62 

-

63class PluginInitializationError(PluginBaseError): 

-

64 pass 

-

65 

-

66 

-

67class PluginMetadataError(PluginBaseError): 

-

68 pass 

-

69 

-

70 

-

71class PluginConflictError(PluginBaseError): 

-

72 @property 

-

73 def plugin_a(self) -> "DebputyPluginMetadata": 

-

74 return cast("DebputyPluginMetadata", self.args[1]) 

-

75 

-

76 @property 

-

77 def plugin_b(self) -> "DebputyPluginMetadata": 

-

78 return cast("DebputyPluginMetadata", self.args[2]) 

-

79 

-

80 

-

81class PluginAPIViolationError(PluginBaseError): 

-

82 pass 

-

83 

-

84 

-

85class UnhandledOrUnexpectedErrorFromPluginError(PluginBaseError): 

-

86 pass 

-

87 

-

88 

-

89class DebputyMetadataAccessError(DebputyPluginRuntimeError): 

-

90 pass 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_filesystem_scan_py.html b/coverage-report/d_267b6307937f1878_filesystem_scan_py.html deleted file mode 100644 index 5c580d4..0000000 --- a/coverage-report/d_267b6307937f1878_filesystem_scan_py.html +++ /dev/null @@ -1,2020 +0,0 @@ - - - - - Coverage for src/debputy/filesystem_scan.py: 74% - - - - - -
-
-

- Coverage for src/debputy/filesystem_scan.py: - 74% -

- -

- 1104 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import atexit 

-

2import contextlib 

-

3import dataclasses 

-

4import errno 

-

5import io 

-

6import operator 

-

7import os 

-

8import stat 

-

9import subprocess 

-

10import tempfile 

-

11import time 

-

12from abc import ABC 

-

13from contextlib import suppress 

-

14from typing import ( 

-

15 List, 

-

16 Iterable, 

-

17 Dict, 

-

18 Optional, 

-

19 Tuple, 

-

20 Union, 

-

21 Iterator, 

-

22 Mapping, 

-

23 cast, 

-

24 Any, 

-

25 ContextManager, 

-

26 TextIO, 

-

27 BinaryIO, 

-

28 NoReturn, 

-

29 Type, 

-

30 Generic, 

-

31) 

-

32from weakref import ref, ReferenceType 

-

33 

-

34from debputy.exceptions import ( 

-

35 PureVirtualPathError, 

-

36 DebputyFSIsROError, 

-

37 DebputyMetadataAccessError, 

-

38 TestPathWithNonExistentFSPathError, 

-

39 SymlinkLoopError, 

-

40) 

-

41from debputy.intermediate_manifest import PathType 

-

42from debputy.manifest_parser.base_types import ( 

-

43 ROOT_DEFINITION, 

-

44 StaticFileSystemOwner, 

-

45 StaticFileSystemGroup, 

-

46) 

-

47from debputy.plugin.api.spec import ( 

-

48 VirtualPath, 

-

49 PathDef, 

-

50 PathMetadataReference, 

-

51 PMT, 

-

52) 

-

53from debputy.types import VP 

-

54from debputy.util import ( 

-

55 generated_content_dir, 

-

56 _error, 

-

57 escape_shell, 

-

58 assume_not_none, 

-

59 _normalize_path, 

-

60) 

-

61 

-

62BY_BASENAME = operator.attrgetter("name") 

-

63 

-

64 

-

65class AlwaysEmptyReadOnlyMetadataReference(PathMetadataReference[PMT]): 

-

66 __slots__ = ("_metadata_type", "_owning_plugin", "_current_plugin") 

-

67 

-

68 def __init__( 

-

69 self, 

-

70 owning_plugin: str, 

-

71 current_plugin: str, 

-

72 metadata_type: Type[PMT], 

-

73 ) -> None: 

-

74 self._owning_plugin = owning_plugin 

-

75 self._current_plugin = current_plugin 

-

76 self._metadata_type = metadata_type 

-

77 

-

78 @property 

-

79 def is_present(self) -> bool: 

-

80 return False 

-

81 

-

82 @property 

-

83 def can_read(self) -> bool: 

-

84 return self._owning_plugin == self._current_plugin 

-

85 

-

86 @property 

-

87 def can_write(self) -> bool: 

-

88 return False 

-

89 

-

90 @property 

-

91 def value(self) -> Optional[PMT]: 

-

92 if self.can_read: 92 ↛ 94line 92 didn't jump to line 94, because the condition on line 92 was never false

-

93 return None 

-

94 raise DebputyMetadataAccessError( 

-

95 f"Cannot read the metadata {self._metadata_type.__name__} owned by" 

-

96 f" {self._owning_plugin} as the metadata has not been made" 

-

97 f" readable to the plugin {self._current_plugin}." 

-

98 ) 

-

99 

-

100 @value.setter 

-

101 def value(self, new_value: PMT) -> None: 

-

102 if self._is_owner: 

-

103 raise DebputyFSIsROError( 

-

104 f"Cannot set the metadata {self._metadata_type.__name__} as the path is read-only" 

-

105 ) 

-

106 raise DebputyMetadataAccessError( 

-

107 f"Cannot set the metadata {self._metadata_type.__name__} owned by" 

-

108 f" {self._owning_plugin} as the metadata has not been made" 

-

109 f" read-write to the plugin {self._current_plugin}." 

-

110 ) 

-

111 

-

112 @property 

-

113 def _is_owner(self) -> bool: 

-

114 return self._owning_plugin == self._current_plugin 

-

115 

-

116 

-

117@dataclasses.dataclass(slots=True) 

-

118class PathMetadataValue(Generic[PMT]): 

-

119 owning_plugin: str 

-

120 metadata_type: Type[PMT] 

-

121 value: Optional[PMT] = None 

-

122 

-

123 def can_read_value(self, current_plugin: str) -> bool: 

-

124 return self.owning_plugin == current_plugin 

-

125 

-

126 def can_write_value(self, current_plugin: str) -> bool: 

-

127 return self.owning_plugin == current_plugin 

-

128 

-

129 

-

130class PathMetadataReferenceImplementation(PathMetadataReference[PMT]): 

-

131 __slots__ = ("_owning_path", "_current_plugin", "_path_metadata_value") 

-

132 

-

133 def __init__( 

-

134 self, 

-

135 owning_path: VirtualPath, 

-

136 current_plugin: str, 

-

137 path_metadata_value: PathMetadataValue[PMT], 

-

138 ) -> None: 

-

139 self._owning_path = owning_path 

-

140 self._current_plugin = current_plugin 

-

141 self._path_metadata_value = path_metadata_value 

-

142 

-

143 @property 

-

144 def is_present(self) -> bool: 

-

145 if not self.can_read: 145 ↛ 146line 145 didn't jump to line 146, because the condition on line 145 was never true

-

146 return False 

-

147 return self._path_metadata_value.value is not None 

-

148 

-

149 @property 

-

150 def can_read(self) -> bool: 

-

151 return self._path_metadata_value.can_read_value(self._current_plugin) 

-

152 

-

153 @property 

-

154 def can_write(self) -> bool: 

-

155 if not self._path_metadata_value.can_write_value(self._current_plugin): 155 ↛ 156line 155 didn't jump to line 156, because the condition on line 155 was never true

-

156 return False 

-

157 owning_path = self._owning_path 

-

158 return owning_path.is_read_write and not owning_path.is_detached 

-

159 

-

160 @property 

-

161 def value(self) -> Optional[PMT]: 

-

162 if self.can_read: 162 ↛ 164line 162 didn't jump to line 164, because the condition on line 162 was never false

-

163 return self._path_metadata_value.value 

-

164 raise DebputyMetadataAccessError( 

-

165 f"Cannot read the metadata {self._metadata_type_name} owned by" 

-

166 f" {self._owning_plugin} as the metadata has not been made" 

-

167 f" readable to the plugin {self._current_plugin}." 

-

168 ) 

-

169 

-

170 @value.setter 

-

171 def value(self, new_value: PMT) -> None: 

-

172 if not self.can_write: 172 ↛ 173line 172 didn't jump to line 173, because the condition on line 172 was never true

-

173 m = "set" if new_value is not None else "delete" 

-

174 raise DebputyMetadataAccessError( 

-

175 f"Cannot {m} the metadata {self._metadata_type_name} owned by" 

-

176 f" {self._owning_plugin} as the metadata has not been made" 

-

177 f" read-write to the plugin {self._current_plugin}." 

-

178 ) 

-

179 owning_path = self._owning_path 

-

180 if not owning_path.is_read_write: 180 ↛ 181line 180 didn't jump to line 181, because the condition on line 180 was never true

-

181 raise DebputyFSIsROError( 

-

182 f"Cannot set the metadata {self._metadata_type_name} as the path is read-only" 

-

183 ) 

-

184 if owning_path.is_detached: 184 ↛ 185line 184 didn't jump to line 185, because the condition on line 184 was never true

-

185 raise TypeError( 

-

186 f"Cannot set the metadata {self._metadata_type_name} as the path is detached" 

-

187 ) 

-

188 self._path_metadata_value.value = new_value 

-

189 

-

190 @property 

-

191 def _is_owner(self) -> bool: 

-

192 return self._owning_plugin == self._current_plugin 

-

193 

-

194 @property 

-

195 def _owning_plugin(self) -> str: 

-

196 return self._path_metadata_value.owning_plugin 

-

197 

-

198 @property 

-

199 def _metadata_type_name(self) -> str: 

-

200 return self._path_metadata_value.metadata_type.__name__ 

-

201 

-

202 

-

203def _cp_a(source: str, dest: str) -> None: 

-

204 cmd = ["cp", "-a", source, dest] 

-

205 try: 

-

206 subprocess.check_call(cmd) 

-

207 except subprocess.CalledProcessError: 

-

208 full_command = escape_shell(*cmd) 

-

209 _error( 

-

210 f"The attempt to make an internal copy of {escape_shell(source)} failed. Please review the output of cp" 

-

211 f" above to understand what went wrong. The full command was: {full_command}" 

-

212 ) 

-

213 

-

214 

-

215def _split_path(path: str) -> Tuple[bool, bool, List[str]]: 

-

216 must_be_dir = True if path.endswith("/") else False 

-

217 absolute = False 

-

218 if path.startswith("/"): 

-

219 absolute = True 

-

220 path = "." + path 

-

221 path_parts = path.rstrip("/").split("/") 

-

222 if must_be_dir: 

-

223 path_parts.append(".") 

-

224 return absolute, must_be_dir, path_parts 

-

225 

-

226 

-

227def _root(path: VP) -> VP: 

-

228 current = path 

-

229 while True: 

-

230 parent = current.parent_dir 

-

231 if parent is None: 

-

232 return current 

-

233 current = parent 

-

234 

-

235 

-

236def _check_fs_path_is_file( 

-

237 fs_path: str, 

-

238 unlink_on_error: Optional["FSPath"] = None, 

-

239) -> None: 

-

240 had_issue = False 

-

241 try: 

-

242 # FIXME: Check mode, and use the Virtual Path to cache the result as a side-effect 

-

243 st = os.lstat(fs_path) 

-

244 except FileNotFoundError: 

-

245 had_issue = True 

-

246 else: 

-

247 if not stat.S_ISREG(st.st_mode) or st.st_nlink > 1: 247 ↛ 248line 247 didn't jump to line 248, because the condition on line 247 was never true

-

248 had_issue = True 

-

249 if not had_issue: 249 ↛ 252line 249 didn't jump to line 252, because the condition on line 249 was never false

-

250 return 

-

251 

-

252 if unlink_on_error: 

-

253 with suppress(FileNotFoundError): 

-

254 os.unlink(fs_path) 

-

255 raise TypeError( 

-

256 "The provided FS backing file was deleted, replaced with a non-file entry or it was hard" 

-

257 " linked to another file. The entry has been disconnected." 

-

258 ) 

-

259 

-

260 

-

261class CurrentPluginContextManager: 

-

262 __slots__ = ("_plugin_names",) 

-

263 

-

264 def __init__(self, initial_plugin_name: str) -> None: 

-

265 self._plugin_names = [initial_plugin_name] 

-

266 

-

267 @property 

-

268 def current_plugin_name(self) -> str: 

-

269 return self._plugin_names[-1] 

-

270 

-

271 @contextlib.contextmanager 

-

272 def change_plugin_context(self, new_plugin_name: str) -> Iterator[str]: 

-

273 self._plugin_names.append(new_plugin_name) 

-

274 yield new_plugin_name 

-

275 self._plugin_names.pop() 

-

276 

-

277 

-

278class VirtualPathBase(VirtualPath, ABC): 

-

279 __slots__ = () 

-

280 

-

281 def _orphan_safe_path(self) -> str: 

-

282 return self.path 

-

283 

-

284 def _rw_check(self) -> None: 

-

285 if not self.is_read_write: 

-

286 raise DebputyFSIsROError( 

-

287 f'Attempt to write to "{self._orphan_safe_path()}" failed:' 

-

288 " Debputy Virtual File system is R/O." 

-

289 ) 

-

290 

-

291 def lookup(self, path: str) -> Optional["VirtualPathBase"]: 

-

292 match, missing = self.attempt_lookup(path) 

-

293 if missing: 

-

294 return None 

-

295 return match 

-

296 

-

297 def attempt_lookup(self, path: str) -> Tuple["VirtualPathBase", List[str]]: 

-

298 if self.is_detached: 298 ↛ 299line 298 didn't jump to line 299, because the condition on line 298 was never true

-

299 raise ValueError( 

-

300 f'Cannot perform lookup via "{self._orphan_safe_path()}": The path is detached' 

-

301 ) 

-

302 absolute, must_be_dir, path_parts = _split_path(path) 

-

303 current = _root(self) if absolute else self 

-

304 path_parts.reverse() 

-

305 link_expansions = set() 

-

306 while path_parts: 

-

307 dir_part = path_parts.pop() 

-

308 if dir_part == ".": 

-

309 continue 

-

310 if dir_part == "..": 

-

311 p = current.parent_dir 

-

312 if p is None: 312 ↛ 313line 312 didn't jump to line 313, because the condition on line 312 was never true

-

313 raise ValueError(f'The path "{path}" escapes the root dir') 

-

314 current = p 

-

315 continue 

-

316 try: 

-

317 current = current[dir_part] 

-

318 except KeyError: 

-

319 path_parts.append(dir_part) 

-

320 path_parts.reverse() 

-

321 if must_be_dir: 

-

322 path_parts.pop() 

-

323 return current, path_parts 

-

324 if current.is_symlink and path_parts: 

-

325 if current.path in link_expansions: 

-

326 # This is our loop detection for now. It might have some false positives where you 

-

327 # could safely resolve the same symlink twice. However, given that this use-case is 

-

328 # basically non-existent in practice for packaging, we just stop here for now. 

-

329 raise SymlinkLoopError( 

-

330 f'The path "{path}" traversed the symlink "{current.path}" multiple' 

-

331 " times. Currently, traversing the same symlink twice is considered" 

-

332 " a loop by `debputy` even if the path would eventually resolve." 

-

333 " Consider filing a feature request if you have a benign case that" 

-

334 " triggers this error." 

-

335 ) 

-

336 link_expansions.add(current.path) 

-

337 link_target = current.readlink() 

-

338 link_absolute, _, link_path_parts = _split_path(link_target) 

-

339 if link_absolute: 

-

340 current = _root(current) 

-

341 else: 

-

342 current = assume_not_none(current.parent_dir) 

-

343 link_path_parts.reverse() 

-

344 path_parts.extend(link_path_parts) 

-

345 return current, [] 

-

346 

-

347 def mkdirs(self, path: str) -> "VirtualPath": 

-

348 current: VirtualPath 

-

349 current, missing_parts = self.attempt_lookup( 

-

350 f"{path}/" if not path.endswith("/") else path 

-

351 ) 

-

352 if not current.is_dir: 352 ↛ 353line 352 didn't jump to line 353, because the condition on line 352 was never true

-

353 raise ValueError( 

-

354 f'mkdirs of "{path}" failed: This would require {current.path} to not exist OR be' 

-

355 " a directory. However, that path exist AND is a not directory." 

-

356 ) 

-

357 for missing_part in missing_parts: 

-

358 assert missing_part not in (".", "..") 

-

359 current = current.mkdir(missing_part) 

-

360 return current 

-

361 

-

362 def prune_if_empty_dir(self) -> None: 

-

363 """Remove this and all (now) empty parent directories 

-

364 

-

365 Same as: `rmdir --ignore-fail-on-non-empty --parents` 

-

366 

-

367 This operation may cause the path (and any of its parent directories) to become "detached" 

-

368 and therefore unsafe to use in further operations. 

-

369 """ 

-

370 self._rw_check() 

-

371 

-

372 if not self.is_dir: 372 ↛ 373line 372 didn't jump to line 373, because the condition on line 372 was never true

-

373 raise TypeError(f"{self._orphan_safe_path()} is not a directory") 

-

374 if any(self.iterdir): 

-

375 return 

-

376 parent_dir = assume_not_none(self.parent_dir) 

-

377 

-

378 # Recursive does not matter; we already know the directory is empty. 

-

379 self.unlink() 

-

380 

-

381 # Note: The root dir must never be deleted. This works because when delegating it to the root 

-

382 # directory, its implementation of this method is a no-op. If this is later rewritten to an 

-

383 # inline loop (rather than recursion), be sure to preserve this feature. 

-

384 parent_dir.prune_if_empty_dir() 

-

385 

-

386 def _current_plugin(self) -> str: 

-

387 if self.is_detached: 387 ↛ 388line 387 didn't jump to line 388, because the condition on line 387 was never true

-

388 raise TypeError("Cannot resolve the current plugin; path is detached") 

-

389 current = self 

-

390 while True: 

-

391 next_parent = current.parent_dir 

-

392 if next_parent is None: 

-

393 break 

-

394 current = next_parent 

-

395 assert current is not None 

-

396 return cast("FSRootDir", current)._current_plugin() 

-

397 

-

398 

-

399class FSPath(VirtualPathBase, ABC): 

-

400 __slots__ = ( 

-

401 "_basename", 

-

402 "_parent_dir", 

-

403 "_children", 

-

404 "_path_cache", 

-

405 "_parent_path_cache", 

-

406 "_last_known_parent_path", 

-

407 "_mode", 

-

408 "_owner", 

-

409 "_group", 

-

410 "_mtime", 

-

411 "_stat_cache", 

-

412 "_metadata", 

-

413 "__weakref__", 

-

414 ) 

-

415 

-

416 def __init__( 

-

417 self, 

-

418 basename: str, 

-

419 parent: Optional["FSPath"], 

-

420 children: Optional[Dict[str, "FSPath"]] = None, 

-

421 initial_mode: Optional[int] = None, 

-

422 mtime: Optional[float] = None, 

-

423 stat_cache: Optional[os.stat_result] = None, 

-

424 ) -> None: 

-

425 self._basename = basename 

-

426 self._path_cache: Optional[str] = None 

-

427 self._parent_path_cache: Optional[str] = None 

-

428 self._children = children 

-

429 self._last_known_parent_path: Optional[str] = None 

-

430 self._mode = initial_mode 

-

431 self._mtime = mtime 

-

432 self._stat_cache = stat_cache 

-

433 self._metadata: Dict[Tuple[str, Type[Any]], PathMetadataValue[Any]] = {} 

-

434 self._owner = ROOT_DEFINITION 

-

435 self._group = ROOT_DEFINITION 

-

436 

-

437 # The self._parent_dir = None is to create `_parent_dir` because the parent_dir setter calls 

-

438 # is_orphaned, which assumes self._parent_dir is an attribute. 

-

439 self._parent_dir: Optional[ReferenceType["FSPath"]] = None 

-

440 if parent is not None: 

-

441 self.parent_dir = parent 

-

442 

-

443 def __repr__(self) -> str: 

-

444 return ( 

-

445 f"{self.__class__.__name__}({self._orphan_safe_path()!r}," 

-

446 f" is_file={self.is_file}," 

-

447 f" is_dir={self.is_dir}," 

-

448 f" is_symlink={self.is_symlink}," 

-

449 f" has_fs_path={self.has_fs_path}," 

-

450 f" children_len={len(self._children) if self._children else 0})" 

-

451 ) 

-

452 

-

453 @property 

-

454 def name(self) -> str: 

-

455 return self._basename 

-

456 

-

457 @name.setter 

-

458 def name(self, new_name: str) -> None: 

-

459 self._rw_check() 

-

460 if new_name == self._basename: 460 ↛ 461line 460 didn't jump to line 461, because the condition on line 460 was never true

-

461 return 

-

462 if self.is_detached: 462 ↛ 463line 462 didn't jump to line 463, because the condition on line 462 was never true

-

463 self._basename = new_name 

-

464 return 

-

465 self._rw_check() 

-

466 parent = self.parent_dir 

-

467 # This little parent_dir dance ensures the parent dir detects the rename properly 

-

468 self.parent_dir = None 

-

469 self._basename = new_name 

-

470 self.parent_dir = parent 

-

471 

-

472 @property 

-

473 def iterdir(self) -> Iterable["FSPath"]: 

-

474 if self._children is not None: 

-

475 yield from self._children.values() 

-

476 

-

477 def all_paths(self) -> Iterable["FSPath"]: 

-

478 yield self 

-

479 if not self.is_dir: 

-

480 return 

-

481 by_basename = BY_BASENAME 

-

482 stack = sorted(self.iterdir, key=by_basename, reverse=True) 

-

483 while stack: 

-

484 current = stack.pop() 

-

485 yield current 

-

486 if current.is_dir and not current.is_detached: 

-

487 stack.extend(sorted(current.iterdir, key=by_basename, reverse=True)) 

-

488 

-

489 def walk(self) -> Iterable[Tuple["FSPath", List["FSPath"]]]: 

-

490 # FIXME: can this be more "os.walk"-like without making it harder to implement? 

-

491 if not self.is_dir: 491 ↛ 492line 491 didn't jump to line 492, because the condition on line 491 was never true

-

492 yield self, [] 

-

493 return 

-

494 by_basename = BY_BASENAME 

-

495 stack = [self] 

-

496 while stack: 

-

497 current = stack.pop() 

-

498 children = sorted(current.iterdir, key=by_basename) 

-

499 assert not children or current.is_dir 

-

500 yield current, children 

-

501 # Removing the directory counts as discarding the children. 

-

502 if not current.is_detached: 502 ↛ 496line 502 didn't jump to line 496, because the condition on line 502 was never false

-

503 stack.extend(reversed(children)) 

-

504 

-

505 def _orphan_safe_path(self) -> str: 

-

506 if not self.is_detached or self._last_known_parent_path is not None: 506 ↛ 508line 506 didn't jump to line 508, because the condition on line 506 was never false

-

507 return self.path 

-

508 return f"<orphaned>/{self.name}" 

-

509 

-

510 @property 

-

511 def is_detached(self) -> bool: 

-

512 parent = self._parent_dir 

-

513 if parent is None: 

-

514 return True 

-

515 resolved_parent = parent() 

-

516 if resolved_parent is None: 516 ↛ 517line 516 didn't jump to line 517, because the condition on line 516 was never true

-

517 return True 

-

518 return resolved_parent.is_detached 

-

519 

-

520 # The __getitem__ behaves like __getitem__ from Dict but __iter__ would ideally work like a Sequence. 

-

521 # However, that does not feel compatible, so lets force people to use .children instead for the Sequence 

-

522 # behaviour to avoid surprises for now. 

-

523 # (Maybe it is a non-issue, but it is easier to add the API later than to remove it once we have committed 

-

524 # to using it) 

-

525 __iter__ = None 

-

526 

-

527 def __getitem__(self, key) -> "FSPath": 

-

528 if self._children is None: 

-

529 raise KeyError( 

-

530 f"{key} (note: {self._orphan_safe_path()!r} has no children)" 

-

531 ) 

-

532 if isinstance(key, FSPath): 532 ↛ 533line 532 didn't jump to line 533, because the condition on line 532 was never true

-

533 key = key.name 

-

534 return self._children[key] 

-

535 

-

536 def __delitem__(self, key) -> None: 

-

537 self._rw_check() 

-

538 children = self._children 

-

539 if children is None: 539 ↛ 540line 539 didn't jump to line 540, because the condition on line 539 was never true

-

540 raise KeyError(key) 

-

541 del children[key] 

-

542 

-

543 def get(self, key: str) -> "Optional[FSPath]": 

-

544 try: 

-

545 return self[key] 

-

546 except KeyError: 

-

547 return None 

-

548 

-

549 def __contains__(self, item: object) -> bool: 

-

550 if isinstance(item, VirtualPath): 550 ↛ 551line 550 didn't jump to line 551, because the condition on line 550 was never true

-

551 return item.parent_dir is self 

-

552 if not isinstance(item, str): 552 ↛ 553line 552 didn't jump to line 553, because the condition on line 552 was never true

-

553 return False 

-

554 m = self.get(item) 

-

555 return m is not None 

-

556 

-

557 def _add_child(self, child: "FSPath") -> None: 

-

558 self._rw_check() 

-

559 if not self.is_dir: 559 ↛ 560line 559 didn't jump to line 560, because the condition on line 559 was never true

-

560 raise TypeError(f"{self._orphan_safe_path()!r} is not a directory") 

-

561 if self._children is None: 

-

562 self._children = {} 

-

563 

-

564 conflict_child = self.get(child.name) 

-

565 if conflict_child is not None: 565 ↛ 566line 565 didn't jump to line 566, because the condition on line 565 was never true

-

566 conflict_child.unlink(recursive=True) 

-

567 self._children[child.name] = child 

-

568 

-

569 @property 

-

570 def tar_path(self) -> str: 

-

571 path = self.path 

-

572 if self.is_dir: 

-

573 return path + "/" 

-

574 return path 

-

575 

-

576 @property 

-

577 def path(self) -> str: 

-

578 parent_path = self.parent_dir_path 

-

579 if ( 

-

580 self._parent_path_cache is not None 

-

581 and self._parent_path_cache == parent_path 

-

582 ): 

-

583 return assume_not_none(self._path_cache) 

-

584 if parent_path is None: 584 ↛ 585line 584 didn't jump to line 585, because the condition on line 584 was never true

-

585 raise ReferenceError( 

-

586 f"The path {self.name} is detached! {self.__class__.__name__}" 

-

587 ) 

-

588 self._parent_path_cache = parent_path 

-

589 ret = os.path.join(parent_path, self.name) 

-

590 self._path_cache = ret 

-

591 return ret 

-

592 

-

593 @property 

-

594 def parent_dir(self) -> Optional["FSPath"]: 

-

595 p_ref = self._parent_dir 

-

596 p = p_ref() if p_ref is not None else None 

-

597 if p is None: 597 ↛ 598line 597 didn't jump to line 598, because the condition on line 597 was never true

-

598 raise ReferenceError( 

-

599 f"The path {self.name} is detached! {self.__class__.__name__}" 

-

600 ) 

-

601 return p 

-

602 

-

603 @parent_dir.setter 

-

604 def parent_dir(self, new_parent: Optional["FSPath"]) -> None: 

-

605 self._rw_check() 

-

606 if new_parent is not None: 

-

607 if not new_parent.is_dir: 607 ↛ 608line 607 didn't jump to line 608, because the condition on line 607 was never true

-

608 raise ValueError( 

-

609 f"The parent {new_parent._orphan_safe_path()} must be a directory" 

-

610 ) 

-

611 new_parent._rw_check() 

-

612 old_parent = None 

-

613 self._last_known_parent_path = None 

-

614 if not self.is_detached: 

-

615 old_parent = self.parent_dir 

-

616 old_parent_children = assume_not_none(assume_not_none(old_parent)._children) 

-

617 del old_parent_children[self.name] 

-

618 if new_parent is not None: 

-

619 self._parent_dir = ref(new_parent) 

-

620 new_parent._add_child(self) 

-

621 else: 

-

622 if old_parent is not None and not old_parent.is_detached: 622 ↛ 624line 622 didn't jump to line 624, because the condition on line 622 was never false

-

623 self._last_known_parent_path = old_parent.path 

-

624 self._parent_dir = None 

-

625 self._parent_path_cache = None 

-

626 

-

627 @property 

-

628 def parent_dir_path(self) -> Optional[str]: 

-

629 if self.is_detached: 629 ↛ 630line 629 didn't jump to line 630, because the condition on line 629 was never true

-

630 return self._last_known_parent_path 

-

631 return assume_not_none(self.parent_dir).path 

-

632 

-

633 def chown( 

-

634 self, 

-

635 owner: Optional[StaticFileSystemOwner], 

-

636 group: Optional[StaticFileSystemGroup], 

-

637 ) -> None: 

-

638 """Change the owner/group of this path 

-

639 

-

640 :param owner: The desired owner definition for this path. If None, then no change of owner is performed. 

-

641 :param group: The desired group definition for this path. If None, then no change of group is performed. 

-

642 """ 

-

643 self._rw_check() 

-

644 

-

645 if owner is not None: 

-

646 self._owner = owner.ownership_definition 

-

647 if group is not None: 

-

648 self._group = group.ownership_definition 

-

649 

-

650 def stat(self) -> os.stat_result: 

-

651 st = self._stat_cache 

-

652 if st is None: 

-

653 st = self._uncached_stat() 

-

654 self._stat_cache = st 

-

655 return st 

-

656 

-

657 def _uncached_stat(self) -> os.stat_result: 

-

658 return os.lstat(self.fs_path) 

-

659 

-

660 @property 

-

661 def mode(self) -> int: 

-

662 current_mode = self._mode 

-

663 if current_mode is None: 663 ↛ 664line 663 didn't jump to line 664, because the condition on line 663 was never true

-

664 current_mode = stat.S_IMODE(self.stat().st_mode) 

-

665 self._mode = current_mode 

-

666 return current_mode 

-

667 

-

668 @mode.setter 

-

669 def mode(self, new_mode: int) -> None: 

-

670 self._rw_check() 

-

671 min_bit = 0o500 if self.is_dir else 0o400 

-

672 if (new_mode & min_bit) != min_bit: 672 ↛ 673line 672 didn't jump to line 673, because the condition on line 672 was never true

-

673 omode = oct(new_mode)[2:] 

-

674 omin = oct(min_bit)[2:] 

-

675 raise ValueError( 

-

676 f'Attempt to set mode of path "{self._orphan_safe_path()}" to {omode} rejected;' 

-

677 f" Minimum requirements are {omin} (read-bit and, for dirs, exec bit for user)." 

-

678 " There are no paths that do not need these requirements met and they can cause" 

-

679 " problems during build or on the final system." 

-

680 ) 

-

681 self._mode = new_mode 

-

682 

-

683 @property 

-

684 def mtime(self) -> float: 

-

685 mtime = self._mtime 

-

686 if mtime is None: 

-

687 mtime = self.stat().st_mtime 

-

688 self._mtime = mtime 

-

689 return mtime 

-

690 

-

691 @mtime.setter 

-

692 def mtime(self, new_mtime: float) -> None: 

-

693 self._rw_check() 

-

694 self._mtime = new_mtime 

-

695 

-

696 @property 

-

697 def tar_owner_info(self) -> Tuple[str, int, str, int]: 

-

698 owner = self._owner 

-

699 group = self._group 

-

700 return ( 

-

701 owner.entity_name, 

-

702 owner.entity_id, 

-

703 group.entity_name, 

-

704 group.entity_id, 

-

705 ) 

-

706 

-

707 @property 

-

708 def _can_replace_inline(self) -> bool: 

-

709 return False 

-

710 

-

711 @contextlib.contextmanager 

-

712 def add_file( 

-

713 self, 

-

714 name: str, 

-

715 *, 

-

716 unlink_if_exists: bool = True, 

-

717 use_fs_path_mode: bool = False, 

-

718 mode: int = 0o0644, 

-

719 mtime: Optional[float] = None, 

-

720 # Special-case parameters that are not exposed in the API 

-

721 fs_basename_matters: bool = False, 

-

722 subdir_key: Optional[str] = None, 

-

723 ) -> Iterator["FSPath"]: 

-

724 if "/" in name or name in {".", ".."}: 724 ↛ 725line 724 didn't jump to line 725, because the condition on line 724 was never true

-

725 raise ValueError(f'Invalid file name: "{name}"') 

-

726 if not self.is_dir: 726 ↛ 727line 726 didn't jump to line 727, because the condition on line 726 was never true

-

727 raise TypeError( 

-

728 f"Cannot create {self._orphan_safe_path()}/{name}:" 

-

729 f" {self._orphan_safe_path()} is not a directory" 

-

730 ) 

-

731 self._rw_check() 

-

732 existing = self.get(name) 

-

733 if existing is not None: 733 ↛ 734line 733 didn't jump to line 734, because the condition on line 733 was never true

-

734 if not unlink_if_exists: 

-

735 raise ValueError( 

-

736 f'The path "{self._orphan_safe_path()}" already contains a file called "{name}"' 

-

737 f" and exist_ok was False" 

-

738 ) 

-

739 existing.unlink(recursive=False) 

-

740 

-

741 if fs_basename_matters and subdir_key is None: 741 ↛ 742line 741 didn't jump to line 742, because the condition on line 741 was never true

-

742 raise ValueError( 

-

743 "When fs_basename_matters is True, a subdir_key must be provided" 

-

744 ) 

-

745 

-

746 directory = generated_content_dir(subdir_key=subdir_key) 

-

747 

-

748 if fs_basename_matters: 748 ↛ 749line 748 didn't jump to line 749, because the condition on line 748 was never true

-

749 fs_path = os.path.join(directory, name) 

-

750 with open(fs_path, "xb") as _: 

-

751 # Ensure that the fs_path exists 

-

752 pass 

-

753 child = FSBackedFilePath( 

-

754 name, 

-

755 self, 

-

756 fs_path, 

-

757 replaceable_inline=True, 

-

758 mtime=mtime, 

-

759 ) 

-

760 yield child 

-

761 else: 

-

762 with tempfile.NamedTemporaryFile( 

-

763 dir=directory, suffix=f"__{name}", delete=False 

-

764 ) as fd: 

-

765 fs_path = fd.name 

-

766 child = FSBackedFilePath( 

-

767 name, 

-

768 self, 

-

769 fs_path, 

-

770 replaceable_inline=True, 

-

771 mtime=mtime, 

-

772 ) 

-

773 fd.close() 

-

774 yield child 

-

775 

-

776 if use_fs_path_mode: 776 ↛ 778line 776 didn't jump to line 778, because the condition on line 776 was never true

-

777 # Ensure the caller can see the current mode 

-

778 os.chmod(fs_path, mode) 

-

779 _check_fs_path_is_file(fs_path, unlink_on_error=child) 

-

780 child._reset_caches() 

-

781 if not use_fs_path_mode: 781 ↛ exitline 781 didn't return from function 'add_file', because the condition on line 781 was never false

-

782 child.mode = mode 

-

783 

-

784 def insert_file_from_fs_path( 

-

785 self, 

-

786 name: str, 

-

787 fs_path: str, 

-

788 *, 

-

789 exist_ok: bool = True, 

-

790 use_fs_path_mode: bool = False, 

-

791 mode: int = 0o0644, 

-

792 require_copy_on_write: bool = True, 

-

793 follow_symlinks: bool = True, 

-

794 reference_path: Optional[VirtualPath] = None, 

-

795 ) -> "FSPath": 

-

796 if "/" in name or name in {".", ".."}: 796 ↛ 797line 796 didn't jump to line 797, because the condition on line 796 was never true

-

797 raise ValueError(f'Invalid file name: "{name}"') 

-

798 if not self.is_dir: 798 ↛ 799line 798 didn't jump to line 799, because the condition on line 798 was never true

-

799 raise TypeError( 

-

800 f"Cannot create {self._orphan_safe_path()}/{name}:" 

-

801 f" {self._orphan_safe_path()} is not a directory" 

-

802 ) 

-

803 self._rw_check() 

-

804 if name in self and not exist_ok: 804 ↛ 805line 804 didn't jump to line 805, because the condition on line 804 was never true

-

805 raise ValueError( 

-

806 f'The path "{self._orphan_safe_path()}" already contains a file called "{name}"' 

-

807 f" and exist_ok was False" 

-

808 ) 

-

809 new_fs_path = fs_path 

-

810 if follow_symlinks: 

-

811 if reference_path is not None: 811 ↛ 812line 811 didn't jump to line 812, because the condition on line 811 was never true

-

812 raise ValueError( 

-

813 "The reference_path cannot be used with follow_symlinks" 

-

814 ) 

-

815 new_fs_path = os.path.realpath(new_fs_path, strict=True) 

-

816 

-

817 fmode: Optional[int] = mode 

-

818 if use_fs_path_mode: 

-

819 fmode = None 

-

820 

-

821 st = None 

-

822 if reference_path is None: 

-

823 st = os.lstat(new_fs_path) 

-

824 if stat.S_ISDIR(st.st_mode): 824 ↛ 825line 824 didn't jump to line 825, because the condition on line 824 was never true

-

825 raise ValueError( 

-

826 f'The provided path "{fs_path}" is a directory. However, this' 

-

827 " method does not support directories" 

-

828 ) 

-

829 

-

830 if not stat.S_ISREG(st.st_mode): 830 ↛ 831line 830 didn't jump to line 831, because the condition on line 830 was never true

-

831 if follow_symlinks: 

-

832 raise ValueError( 

-

833 f"The resolved fs_path ({new_fs_path}) was not a file." 

-

834 ) 

-

835 raise ValueError(f"The provided fs_path ({fs_path}) was not a file.") 

-

836 return FSBackedFilePath( 

-

837 name, 

-

838 self, 

-

839 new_fs_path, 

-

840 initial_mode=fmode, 

-

841 stat_cache=st, 

-

842 replaceable_inline=not require_copy_on_write, 

-

843 reference_path=reference_path, 

-

844 ) 

-

845 

-

846 def add_symlink( 

-

847 self, 

-

848 link_name: str, 

-

849 link_target: str, 

-

850 *, 

-

851 reference_path: Optional[VirtualPath] = None, 

-

852 ) -> "FSPath": 

-

853 if "/" in link_name or link_name in {".", ".."}: 853 ↛ 854line 853 didn't jump to line 854, because the condition on line 853 was never true

-

854 raise ValueError( 

-

855 f'Invalid file name: "{link_name}" (it must be a valid basename)' 

-

856 ) 

-

857 if not self.is_dir: 857 ↛ 858line 857 didn't jump to line 858, because the condition on line 857 was never true

-

858 raise TypeError( 

-

859 f"Cannot create {self._orphan_safe_path()}/{link_name}:" 

-

860 f" {self._orphan_safe_path()} is not a directory" 

-

861 ) 

-

862 self._rw_check() 

-

863 

-

864 existing = self.get(link_name) 

-

865 if existing: 865 ↛ 867line 865 didn't jump to line 867, because the condition on line 865 was never true

-

866 # Emulate ln -sf with attempts a non-recursive unlink first. 

-

867 existing.unlink(recursive=False) 

-

868 

-

869 return SymlinkVirtualPath( 

-

870 link_name, 

-

871 self, 

-

872 link_target, 

-

873 reference_path=reference_path, 

-

874 ) 

-

875 

-

876 def mkdir( 

-

877 self, 

-

878 name: str, 

-

879 *, 

-

880 reference_path: Optional[VirtualPath] = None, 

-

881 ) -> "FSPath": 

-

882 if "/" in name or name in {".", ".."}: 882 ↛ 883line 882 didn't jump to line 883, because the condition on line 882 was never true

-

883 raise ValueError( 

-

884 f'Invalid file name: "{name}" (it must be a valid basename)' 

-

885 ) 

-

886 if not self.is_dir: 886 ↛ 887line 886 didn't jump to line 887, because the condition on line 886 was never true

-

887 raise TypeError( 

-

888 f"Cannot create {self._orphan_safe_path()}/{name}:" 

-

889 f" {self._orphan_safe_path()} is not a directory" 

-

890 ) 

-

891 if reference_path is not None and not reference_path.is_dir: 891 ↛ 892line 891 didn't jump to line 892, because the condition on line 891 was never true

-

892 raise ValueError( 

-

893 f'The provided fs_path "{reference_path.fs_path}" exist but it is not a directory!' 

-

894 ) 

-

895 self._rw_check() 

-

896 

-

897 existing = self.get(name) 

-

898 if existing: 898 ↛ 899line 898 didn't jump to line 899, because the condition on line 898 was never true

-

899 raise ValueError(f"Path {existing.path} already exist") 

-

900 return VirtualDirectoryFSPath(name, self, reference_path=reference_path) 

-

901 

-

902 def mkdirs(self, path: str) -> "FSPath": 

-

903 return cast("FSPath", super().mkdirs(path)) 

-

904 

-

905 @property 

-

906 def is_read_write(self) -> bool: 

-

907 """When true, the file system entry may be mutated 

-

908 

-

909 :return: Whether file system mutations are permitted. 

-

910 """ 

-

911 if self.is_detached: 

-

912 return True 

-

913 return assume_not_none(self.parent_dir).is_read_write 

-

914 

-

915 def unlink(self, *, recursive: bool = False) -> None: 

-

916 """Unlink a file or a directory 

-

917 

-

918 This operation will detach the path from the file system (causing "is_detached" to return True). 

-

919 

-

920 Note that the root directory cannot be deleted. 

-

921 

-

922 :param recursive: If True, then non-empty directories will be unlinked as well removing everything inside them 

-

923 as well. When False, an error is raised if the path is a non-empty directory 

-

924 """ 

-

925 if self.is_detached: 925 ↛ 926line 925 didn't jump to line 926, because the condition on line 925 was never true

-

926 return 

-

927 if not recursive and any(self.iterdir): 927 ↛ 928line 927 didn't jump to line 928, because the condition on line 927 was never true

-

928 raise ValueError( 

-

929 f'Refusing to unlink "{self.path}": The directory was not empty and recursive was False' 

-

930 ) 

-

931 # The .parent_dir setter does a _rw_check() for us. 

-

932 self.parent_dir = None 

-

933 

-

934 def _reset_caches(self) -> None: 

-

935 self._mtime = None 

-

936 self._stat_cache = None 

-

937 

-

938 def metadata( 

-

939 self, 

-

940 metadata_type: Type[PMT], 

-

941 *, 

-

942 owning_plugin: Optional[str] = None, 

-

943 ) -> PathMetadataReference[PMT]: 

-

944 current_plugin = self._current_plugin() 

-

945 if owning_plugin is None: 945 ↛ 947line 945 didn't jump to line 947, because the condition on line 945 was never false

-

946 owning_plugin = current_plugin 

-

947 metadata_key = (owning_plugin, metadata_type) 

-

948 metadata_value = self._metadata.get(metadata_key) 

-

949 if metadata_value is None: 

-

950 if self.is_detached: 950 ↛ 951line 950 didn't jump to line 951, because the condition on line 950 was never true

-

951 raise TypeError( 

-

952 f"Cannot access the metadata {metadata_type.__name__}: The path is detached." 

-

953 ) 

-

954 if not self.is_read_write: 

-

955 return AlwaysEmptyReadOnlyMetadataReference( 

-

956 owning_plugin, 

-

957 current_plugin, 

-

958 metadata_type, 

-

959 ) 

-

960 metadata_value = PathMetadataValue(owning_plugin, metadata_type) 

-

961 self._metadata[metadata_key] = metadata_value 

-

962 return PathMetadataReferenceImplementation( 

-

963 self, 

-

964 current_plugin, 

-

965 metadata_value, 

-

966 ) 

-

967 

-

968 @contextlib.contextmanager 

-

969 def replace_fs_path_content( 

-

970 self, 

-

971 *, 

-

972 use_fs_path_mode: bool = False, 

-

973 ) -> Iterator[str]: 

-

974 if not self.is_file: 974 ↛ 975line 974 didn't jump to line 975, because the condition on line 974 was never true

-

975 raise TypeError( 

-

976 f'Cannot replace contents of "{self._orphan_safe_path()}" as it is not a file' 

-

977 ) 

-

978 self._rw_check() 

-

979 fs_path = self.fs_path 

-

980 if not self._can_replace_inline: 980 ↛ 992line 980 didn't jump to line 992, because the condition on line 980 was never false

-

981 fs_path = self.fs_path 

-

982 directory = generated_content_dir() 

-

983 with tempfile.NamedTemporaryFile( 

-

984 dir=directory, suffix=f"__{self.name}", delete=False 

-

985 ) as new_path_fd: 

-

986 new_path_fd.close() 

-

987 _cp_a(fs_path, new_path_fd.name) 

-

988 fs_path = new_path_fd.name 

-

989 self._replaced_path(fs_path) 

-

990 assert self.fs_path == fs_path 

-

991 

-

992 current_mtime = self._mtime 

-

993 if current_mtime is not None: 

-

994 os.utime(fs_path, (current_mtime, current_mtime)) 

-

995 

-

996 current_mode = self.mode 

-

997 yield fs_path 

-

998 _check_fs_path_is_file(fs_path, unlink_on_error=self) 

-

999 if not use_fs_path_mode: 999 ↛ 1001line 999 didn't jump to line 1001, because the condition on line 999 was never false

-

1000 os.chmod(fs_path, current_mode) 

-

1001 self._reset_caches() 

-

1002 

-

1003 def _replaced_path(self, new_fs_path: str) -> None: 

-

1004 raise NotImplementedError 

-

1005 

-

1006 

-

1007class VirtualFSPathBase(FSPath, ABC): 

-

1008 __slots__ = () 

-

1009 

-

1010 def __init__( 

-

1011 self, 

-

1012 basename: str, 

-

1013 parent: Optional["FSPath"], 

-

1014 children: Optional[Dict[str, "FSPath"]] = None, 

-

1015 initial_mode: Optional[int] = None, 

-

1016 mtime: Optional[float] = None, 

-

1017 stat_cache: Optional[os.stat_result] = None, 

-

1018 ) -> None: 

-

1019 super().__init__( 

-

1020 basename, 

-

1021 parent, 

-

1022 children, 

-

1023 initial_mode=initial_mode, 

-

1024 mtime=mtime, 

-

1025 stat_cache=stat_cache, 

-

1026 ) 

-

1027 

-

1028 @property 

-

1029 def mtime(self) -> float: 

-

1030 mtime = self._mtime 

-

1031 if mtime is None: 

-

1032 mtime = time.time() 

-

1033 self._mtime = mtime 

-

1034 return mtime 

-

1035 

-

1036 @property 

-

1037 def has_fs_path(self) -> bool: 

-

1038 return False 

-

1039 

-

1040 def stat(self) -> os.stat_result: 

-

1041 if not self.has_fs_path: 

-

1042 raise PureVirtualPathError( 

-

1043 "stat() is only applicable to paths backed by the file system. The path" 

-

1044 f" {self._orphan_safe_path()!r} is purely virtual" 

-

1045 ) 

-

1046 return super().stat() 

-

1047 

-

1048 @property 

-

1049 def fs_path(self) -> str: 

-

1050 if not self.has_fs_path: 

-

1051 raise PureVirtualPathError( 

-

1052 "fs_path is only applicable to paths backed by the file system. The path" 

-

1053 f" {self._orphan_safe_path()!r} is purely virtual" 

-

1054 ) 

-

1055 return self.fs_path 

-

1056 

-

1057 

-

1058class FSRootDir(FSPath): 

-

1059 __slots__ = ("_fs_path", "_fs_read_write", "_plugin_context") 

-

1060 

-

1061 def __init__(self, fs_path: Optional[str] = None) -> None: 

-

1062 self._fs_path = fs_path 

-

1063 self._fs_read_write = True 

-

1064 super().__init__( 

-

1065 ".", 

-

1066 None, 

-

1067 children={}, 

-

1068 initial_mode=0o755, 

-

1069 ) 

-

1070 self._plugin_context = CurrentPluginContextManager("debputy") 

-

1071 

-

1072 @property 

-

1073 def is_detached(self) -> bool: 

-

1074 return False 

-

1075 

-

1076 def _orphan_safe_path(self) -> str: 

-

1077 return self.name 

-

1078 

-

1079 @property 

-

1080 def path(self) -> str: 

-

1081 return self.name 

-

1082 

-

1083 @property 

-

1084 def parent_dir(self) -> Optional["FSPath"]: 

-

1085 return None 

-

1086 

-

1087 @parent_dir.setter 

-

1088 def parent_dir(self, new_parent: Optional[FSPath]) -> None: 

-

1089 if new_parent is not None: 

-

1090 raise ValueError("The root directory cannot become a non-root directory") 

-

1091 

-

1092 @property 

-

1093 def parent_dir_path(self) -> Optional[str]: 

-

1094 return None 

-

1095 

-

1096 @property 

-

1097 def is_dir(self) -> bool: 

-

1098 return True 

-

1099 

-

1100 @property 

-

1101 def is_file(self) -> bool: 

-

1102 return False 

-

1103 

-

1104 @property 

-

1105 def is_symlink(self) -> bool: 

-

1106 return False 

-

1107 

-

1108 def readlink(self) -> str: 

-

1109 raise TypeError(f'"{self._orphan_safe_path()!r}" is a directory; not a symlink') 

-

1110 

-

1111 @property 

-

1112 def has_fs_path(self) -> bool: 

-

1113 return self._fs_path is not None 

-

1114 

-

1115 def stat(self) -> os.stat_result: 

-

1116 if not self.has_fs_path: 

-

1117 raise PureVirtualPathError( 

-

1118 "stat() is only applicable to paths backed by the file system. The path" 

-

1119 f" {self._orphan_safe_path()!r} is purely virtual" 

-

1120 ) 

-

1121 return os.stat(self.fs_path) 

-

1122 

-

1123 @property 

-

1124 def fs_path(self) -> str: 

-

1125 if not self.has_fs_path: 1125 ↛ 1126line 1125 didn't jump to line 1126, because the condition on line 1125 was never true

-

1126 raise PureVirtualPathError( 

-

1127 "fs_path is only applicable to paths backed by the file system. The path" 

-

1128 f" {self._orphan_safe_path()!r} is purely virtual" 

-

1129 ) 

-

1130 return assume_not_none(self._fs_path) 

-

1131 

-

1132 @property 

-

1133 def is_read_write(self) -> bool: 

-

1134 return self._fs_read_write 

-

1135 

-

1136 @is_read_write.setter 

-

1137 def is_read_write(self, new_value: bool) -> None: 

-

1138 self._fs_read_write = new_value 

-

1139 

-

1140 def prune_if_empty_dir(self) -> None: 

-

1141 # No-op for the root directory. There is never a case where you want to delete this directory 

-

1142 # (and even if you could, debputy will need it for technical reasons, so the root dir stays) 

-

1143 return 

-

1144 

-

1145 def unlink(self, *, recursive: bool = False) -> None: 

-

1146 # There is never a case where you want to delete this directory (and even if you could, 

-

1147 # debputy will need it for technical reasons, so the root dir stays) 

-

1148 raise TypeError("Cannot delete the root directory") 

-

1149 

-

1150 def _current_plugin(self) -> str: 

-

1151 return self._plugin_context.current_plugin_name 

-

1152 

-

1153 @contextlib.contextmanager 

-

1154 def change_plugin_context(self, new_plugin: str) -> Iterator[str]: 

-

1155 with self._plugin_context.change_plugin_context(new_plugin) as r: 

-

1156 yield r 

-

1157 

-

1158 

-

1159class VirtualPathWithReference(VirtualFSPathBase, ABC): 

-

1160 __slots__ = ("_reference_path",) 

-

1161 

-

1162 def __init__( 

-

1163 self, 

-

1164 basename: str, 

-

1165 parent: FSPath, 

-

1166 *, 

-

1167 default_mode: int, 

-

1168 reference_path: Optional[VirtualPath] = None, 

-

1169 ) -> None: 

-

1170 super().__init__( 

-

1171 basename, 

-

1172 parent=parent, 

-

1173 initial_mode=reference_path.mode if reference_path else default_mode, 

-

1174 ) 

-

1175 self._reference_path = reference_path 

-

1176 

-

1177 @property 

-

1178 def has_fs_path(self) -> bool: 

-

1179 ref_path = self._reference_path 

-

1180 return ref_path is not None and ref_path.has_fs_path 

-

1181 

-

1182 @property 

-

1183 def mtime(self) -> float: 

-

1184 mtime = self._mtime 

-

1185 if mtime is None: 1185 ↛ 1192line 1185 didn't jump to line 1192, because the condition on line 1185 was never false

-

1186 ref_path = self._reference_path 

-

1187 if ref_path: 1187 ↛ 1190line 1187 didn't jump to line 1190, because the condition on line 1187 was never false

-

1188 mtime = ref_path.mtime 

-

1189 else: 

-

1190 mtime = super().mtime 

-

1191 self._mtime = mtime 

-

1192 return mtime 

-

1193 

-

1194 @mtime.setter 

-

1195 def mtime(self, new_mtime: float) -> None: 

-

1196 self._rw_check() 

-

1197 self._mtime = new_mtime 

-

1198 

-

1199 @property 

-

1200 def fs_path(self) -> str: 

-

1201 ref_path = self._reference_path 

-

1202 if ref_path is not None and ( 1202 ↛ 1206line 1202 didn't jump to line 1206, because the condition on line 1202 was never false

-

1203 not super().has_fs_path or super().fs_path == ref_path.fs_path 

-

1204 ): 

-

1205 return ref_path.fs_path 

-

1206 return super().fs_path 

-

1207 

-

1208 def stat(self) -> os.stat_result: 

-

1209 ref_path = self._reference_path 

-

1210 if ref_path is not None and ( 

-

1211 not super().has_fs_path or super().fs_path == ref_path.fs_path 

-

1212 ): 

-

1213 return ref_path.stat() 

-

1214 return super().stat() 

-

1215 

-

1216 def open( 

-

1217 self, 

-

1218 *, 

-

1219 byte_io: bool = False, 

-

1220 buffering: int = -1, 

-

1221 ) -> Union[TextIO, BinaryIO]: 

-

1222 reference_path = self._reference_path 

-

1223 if reference_path is not None and reference_path.fs_path == self.fs_path: 

-

1224 return reference_path.open(byte_io=byte_io, buffering=buffering) 

-

1225 return super().open(byte_io=byte_io, buffering=buffering) 

-

1226 

-

1227 

-

1228class VirtualDirectoryFSPath(VirtualPathWithReference): 

-

1229 __slots__ = ("_reference_path",) 

-

1230 

-

1231 def __init__( 

-

1232 self, 

-

1233 basename: str, 

-

1234 parent: FSPath, 

-

1235 *, 

-

1236 reference_path: Optional[VirtualPath] = None, 

-

1237 ) -> None: 

-

1238 super().__init__( 

-

1239 basename, 

-

1240 parent, 

-

1241 reference_path=reference_path, 

-

1242 default_mode=0o755, 

-

1243 ) 

-

1244 self._reference_path = reference_path 

-

1245 assert reference_path is None or reference_path.is_dir 

-

1246 

-

1247 @property 

-

1248 def is_dir(self) -> bool: 

-

1249 return True 

-

1250 

-

1251 @property 

-

1252 def is_file(self) -> bool: 

-

1253 return False 

-

1254 

-

1255 @property 

-

1256 def is_symlink(self) -> bool: 

-

1257 return False 

-

1258 

-

1259 def readlink(self) -> str: 

-

1260 raise TypeError(f'"{self._orphan_safe_path()!r}" is a directory; not a symlink') 

-

1261 

-

1262 

-

1263class SymlinkVirtualPath(VirtualPathWithReference): 

-

1264 __slots__ = ("_link_target",) 

-

1265 

-

1266 def __init__( 

-

1267 self, 

-

1268 basename: str, 

-

1269 parent_dir: FSPath, 

-

1270 link_target: str, 

-

1271 *, 

-

1272 reference_path: Optional[VirtualPath] = None, 

-

1273 ) -> None: 

-

1274 super().__init__( 

-

1275 basename, 

-

1276 parent=parent_dir, 

-

1277 default_mode=_SYMLINK_MODE, 

-

1278 reference_path=reference_path, 

-

1279 ) 

-

1280 self._link_target = link_target 

-

1281 

-

1282 @property 

-

1283 def is_dir(self) -> bool: 

-

1284 return False 

-

1285 

-

1286 @property 

-

1287 def is_file(self) -> bool: 

-

1288 return False 

-

1289 

-

1290 @property 

-

1291 def is_symlink(self) -> bool: 

-

1292 return True 

-

1293 

-

1294 def readlink(self) -> str: 

-

1295 return self._link_target 

-

1296 

-

1297 

-

1298class FSBackedFilePath(VirtualPathWithReference): 

-

1299 __slots__ = ("_fs_path", "_replaceable_inline") 

-

1300 

-

1301 def __init__( 

-

1302 self, 

-

1303 basename: str, 

-

1304 parent_dir: FSPath, 

-

1305 fs_path: str, 

-

1306 *, 

-

1307 replaceable_inline: bool = False, 

-

1308 initial_mode: Optional[int] = None, 

-

1309 mtime: Optional[float] = None, 

-

1310 stat_cache: Optional[os.stat_result] = None, 

-

1311 reference_path: Optional[VirtualPath] = None, 

-

1312 ) -> None: 

-

1313 super().__init__( 

-

1314 basename, 

-

1315 parent_dir, 

-

1316 default_mode=0o644, 

-

1317 reference_path=reference_path, 

-

1318 ) 

-

1319 self._fs_path = fs_path 

-

1320 self._replaceable_inline = replaceable_inline 

-

1321 if initial_mode is not None: 

-

1322 self.mode = initial_mode 

-

1323 if mtime is not None: 

-

1324 self._mtime = mtime 

-

1325 self._stat_cache = stat_cache 

-

1326 assert ( 

-

1327 not replaceable_inline or "debputy/scratch-dir/" in fs_path 

-

1328 ), f"{fs_path} should not be inline-replaceable -- {self.path}" 

-

1329 

-

1330 @property 

-

1331 def is_dir(self) -> bool: 

-

1332 return False 

-

1333 

-

1334 @property 

-

1335 def is_file(self) -> bool: 

-

1336 return True 

-

1337 

-

1338 @property 

-

1339 def is_symlink(self) -> bool: 

-

1340 return False 

-

1341 

-

1342 def readlink(self) -> str: 

-

1343 raise TypeError(f'"{self._orphan_safe_path()!r}" is a file; not a symlink') 

-

1344 

-

1345 @property 

-

1346 def has_fs_path(self) -> bool: 

-

1347 return True 

-

1348 

-

1349 @property 

-

1350 def fs_path(self) -> str: 

-

1351 return self._fs_path 

-

1352 

-

1353 @property 

-

1354 def _can_replace_inline(self) -> bool: 

-

1355 return self._replaceable_inline 

-

1356 

-

1357 def _replaced_path(self, new_fs_path: str) -> None: 

-

1358 self._fs_path = new_fs_path 

-

1359 self._reference_path = None 

-

1360 self._replaceable_inline = True 

-

1361 

-

1362 

-

1363_SYMLINK_MODE = 0o777 

-

1364 

-

1365 

-

1366class VirtualTestPath(FSPath): 

-

1367 __slots__ = ( 

-

1368 "_path_type", 

-

1369 "_has_fs_path", 

-

1370 "_fs_path", 

-

1371 "_link_target", 

-

1372 "_content", 

-

1373 "_materialized_content", 

-

1374 ) 

-

1375 

-

1376 def __init__( 

-

1377 self, 

-

1378 basename: str, 

-

1379 parent_dir: Optional[FSPath], 

-

1380 mode: Optional[int] = None, 

-

1381 mtime: Optional[float] = None, 

-

1382 is_dir: bool = False, 

-

1383 has_fs_path: Optional[bool] = False, 

-

1384 fs_path: Optional[str] = None, 

-

1385 link_target: Optional[str] = None, 

-

1386 content: Optional[str] = None, 

-

1387 materialized_content: Optional[str] = None, 

-

1388 ) -> None: 

-

1389 if is_dir: 

-

1390 self._path_type = PathType.DIRECTORY 

-

1391 elif link_target is not None: 

-

1392 self._path_type = PathType.SYMLINK 

-

1393 if mode is not None and mode != _SYMLINK_MODE: 1393 ↛ 1394line 1393 didn't jump to line 1394, because the condition on line 1393 was never true

-

1394 raise ValueError( 

-

1395 f'Please do not assign a mode to symlinks. Triggered for "{basename}".' 

-

1396 ) 

-

1397 assert mode is None or mode == _SYMLINK_MODE 

-

1398 else: 

-

1399 self._path_type = PathType.FILE 

-

1400 

-

1401 if mode is not None: 

-

1402 initial_mode = mode 

-

1403 else: 

-

1404 initial_mode = 0o755 if is_dir else 0o644 

-

1405 

-

1406 self._link_target = link_target 

-

1407 if has_fs_path is None: 

-

1408 has_fs_path = bool(fs_path) 

-

1409 self._has_fs_path = has_fs_path 

-

1410 self._fs_path = fs_path 

-

1411 self._materialized_content = materialized_content 

-

1412 super().__init__( 

-

1413 basename, 

-

1414 parent=parent_dir, 

-

1415 initial_mode=initial_mode, 

-

1416 mtime=mtime, 

-

1417 ) 

-

1418 self._content = content 

-

1419 

-

1420 @property 

-

1421 def is_dir(self) -> bool: 

-

1422 return self._path_type == PathType.DIRECTORY 

-

1423 

-

1424 @property 

-

1425 def is_file(self) -> bool: 

-

1426 return self._path_type == PathType.FILE 

-

1427 

-

1428 @property 

-

1429 def is_symlink(self) -> bool: 

-

1430 return self._path_type == PathType.SYMLINK 

-

1431 

-

1432 def readlink(self) -> str: 

-

1433 if not self.is_symlink: 1433 ↛ 1434line 1433 didn't jump to line 1434, because the condition on line 1433 was never true

-

1434 raise TypeError(f"readlink is only valid for symlinks ({self.path!r})") 

-

1435 link_target = self._link_target 

-

1436 assert link_target is not None 

-

1437 return link_target 

-

1438 

-

1439 @property 

-

1440 def mtime(self) -> float: 

-

1441 if self._mtime is None: 

-

1442 self._mtime = time.time() 

-

1443 return self._mtime 

-

1444 

-

1445 @mtime.setter 

-

1446 def mtime(self, new_mtime: float) -> None: 

-

1447 self._rw_check() 

-

1448 self._mtime = new_mtime 

-

1449 

-

1450 @property 

-

1451 def has_fs_path(self) -> bool: 

-

1452 return self._has_fs_path 

-

1453 

-

1454 def stat(self) -> os.stat_result: 

-

1455 if self.has_fs_path: 1455 ↛ 1470line 1455 didn't jump to line 1470, because the condition on line 1455 was never false

-

1456 path = self.fs_path 

-

1457 if path is None: 1457 ↛ 1458line 1457 didn't jump to line 1458, because the condition on line 1457 was never true

-

1458 raise PureVirtualPathError( 

-

1459 f"The test wants a real stat of {self._orphan_safe_path()!r}, which this mock path" 

-

1460 " cannot provide!" 

-

1461 ) 

-

1462 try: 

-

1463 return os.stat(path) 

-

1464 except FileNotFoundError as e: 

-

1465 raise PureVirtualPathError( 

-

1466 f"The test wants a real stat of {self._orphan_safe_path()!r}, which this mock path" 

-

1467 " cannot provide! (An fs_path was provided, but it did not exist)" 

-

1468 ) from e 

-

1469 

-

1470 raise PureVirtualPathError( 

-

1471 "stat() is only applicable to paths backed by the file system. The path" 

-

1472 f" {self._orphan_safe_path()!r} is purely virtual" 

-

1473 ) 

-

1474 

-

1475 @property 

-

1476 def size(self) -> int: 

-

1477 if self._content is not None: 1477 ↛ 1478line 1477 didn't jump to line 1478, because the condition on line 1477 was never true

-

1478 return len(self._content.encode("utf-8")) 

-

1479 if not self.has_fs_path or self.fs_path is None: 

-

1480 return 0 

-

1481 return self.stat().st_size 

-

1482 

-

1483 @property 

-

1484 def fs_path(self) -> str: 

-

1485 if self.has_fs_path: 

-

1486 if self._fs_path is None and self._materialized_content is not None: 

-

1487 with tempfile.NamedTemporaryFile( 

-

1488 mode="w+t", 

-

1489 encoding="utf-8", 

-

1490 suffix=f"__{self.name}", 

-

1491 delete=False, 

-

1492 ) as fd: 

-

1493 filepath = fd.name 

-

1494 fd.write(self._materialized_content) 

-

1495 self._fs_path = filepath 

-

1496 atexit.register(lambda: os.unlink(filepath)) 1496 ↛ exitline 1496 didn't run the lambda on line 1496

-

1497 

-

1498 path = self._fs_path 

-

1499 if path is None: 1499 ↛ 1500line 1499 didn't jump to line 1500, because the condition on line 1499 was never true

-

1500 raise PureVirtualPathError( 

-

1501 f"The test wants a real file system entry of {self._orphan_safe_path()!r}, which this " 

-

1502 " mock path cannot provide!" 

-

1503 ) 

-

1504 return path 

-

1505 raise PureVirtualPathError( 

-

1506 "fs_path is only applicable to paths backed by the file system. The path" 

-

1507 f" {self._orphan_safe_path()!r} is purely virtual" 

-

1508 ) 

-

1509 

-

1510 def replace_fs_path_content( 

-

1511 self, 

-

1512 *, 

-

1513 use_fs_path_mode: bool = False, 

-

1514 ) -> ContextManager[str]: 

-

1515 if self._content is not None: 1515 ↛ 1516line 1515 didn't jump to line 1516, because the condition on line 1515 was never true

-

1516 raise TypeError( 

-

1517 f"The `replace_fs_path_content()` method was called on {self.path}. Said path was" 

-

1518 " created with `content` but for this method to work, the path should have been" 

-

1519 " created with `materialized_content`" 

-

1520 ) 

-

1521 return super().replace_fs_path_content(use_fs_path_mode=use_fs_path_mode) 

-

1522 

-

1523 def open( 

-

1524 self, 

-

1525 *, 

-

1526 byte_io: bool = False, 

-

1527 buffering: int = -1, 

-

1528 ) -> Union[TextIO, BinaryIO]: 

-

1529 if self._content is None: 

-

1530 try: 

-

1531 return super().open(byte_io=byte_io, buffering=buffering) 

-

1532 except FileNotFoundError as e: 

-

1533 raise TestPathWithNonExistentFSPathError( 

-

1534 "The test path {self.path} had an fs_path {self._fs_path}, which does not" 

-

1535 " exist. This exception can only occur in the testsuite. Either have the" 

-

1536 " test provide content for the path (`virtual_path_def(..., content=...) or," 

-

1537 " if that is too painful in general, have the code accept this error as a " 

-

1538 " test only-case and provide a default." 

-

1539 ) from e 

-

1540 

-

1541 if byte_io: 

-

1542 return io.BytesIO(self._content.encode("utf-8")) 

-

1543 return io.StringIO(self._content) 

-

1544 

-

1545 def _replaced_path(self, new_fs_path: str) -> None: 

-

1546 self._fs_path = new_fs_path 

-

1547 

-

1548 

-

1549class FSROOverlay(VirtualPathBase): 

-

1550 __slots__ = ( 

-

1551 "_path", 

-

1552 "_fs_path", 

-

1553 "_parent", 

-

1554 "_stat_cache", 

-

1555 "_readlink_cache", 

-

1556 "_children", 

-

1557 "_stat_failed_cache", 

-

1558 "__weakref__", 

-

1559 ) 

-

1560 

-

1561 def __init__( 

-

1562 self, 

-

1563 path: str, 

-

1564 fs_path: str, 

-

1565 parent: Optional["FSROOverlay"], 

-

1566 ) -> None: 

-

1567 self._path: str = path 

-

1568 self._fs_path: str = _normalize_path(fs_path, with_prefix=False) 

-

1569 self._parent: Optional[ReferenceType[FSROOverlay]] = ( 

-

1570 ref(parent) if parent is not None else None 

-

1571 ) 

-

1572 self._stat_cache: Optional[os.stat_result] = None 

-

1573 self._readlink_cache: Optional[str] = None 

-

1574 self._stat_failed_cache = False 

-

1575 self._children: Optional[Mapping[str, FSROOverlay]] = None 

-

1576 

-

1577 @classmethod 

-

1578 def create_root_dir(cls, path: str, fs_path: str) -> "FSROOverlay": 

-

1579 return FSROOverlay(path, fs_path, None) 

-

1580 

-

1581 @property 

-

1582 def name(self) -> str: 

-

1583 return os.path.basename(self._path) 

-

1584 

-

1585 @property 

-

1586 def iterdir(self) -> Iterable["FSROOverlay"]: 

-

1587 if not self.is_dir: 

-

1588 return 

-

1589 if self._children is None: 

-

1590 self._ensure_children_are_resolved() 

-

1591 yield from assume_not_none(self._children).values() 

-

1592 

-

1593 def lookup(self, path: str) -> Optional["FSROOverlay"]: 

-

1594 if not self.is_dir: 

-

1595 return None 

-

1596 if self._children is None: 

-

1597 self._ensure_children_are_resolved() 

-

1598 

-

1599 absolute, _, path_parts = _split_path(path) 

-

1600 current = cast("FSROOverlay", _root(self)) if absolute else self 

-

1601 for no, dir_part in enumerate(path_parts): 

-

1602 if dir_part == ".": 

-

1603 continue 

-

1604 if dir_part == "..": 

-

1605 p = current.parent_dir 

-

1606 if current is None: 

-

1607 raise ValueError(f'The path "{path}" escapes the root dir') 

-

1608 current = p 

-

1609 continue 

-

1610 try: 

-

1611 current = current[dir_part] 

-

1612 except KeyError: 

-

1613 return None 

-

1614 return current 

-

1615 

-

1616 def all_paths(self) -> Iterable["FSROOverlay"]: 

-

1617 yield self 

-

1618 if not self.is_dir: 

-

1619 return 

-

1620 stack = list(self.iterdir) 

-

1621 stack.reverse() 

-

1622 while stack: 

-

1623 current = stack.pop() 

-

1624 yield current 

-

1625 if current.is_dir: 

-

1626 if current._children is None: 

-

1627 current._ensure_children_are_resolved() 

-

1628 stack.extend(reversed(current._children.values())) 

-

1629 

-

1630 def _ensure_children_are_resolved(self) -> None: 

-

1631 if not self.is_dir or self._children: 

-

1632 return 

-

1633 dir_path = self.path 

-

1634 dir_fs_path = self.fs_path 

-

1635 children = {} 

-

1636 for name in sorted(os.listdir(dir_fs_path), key=os.path.basename): 

-

1637 child_path = os.path.join(dir_path, name) if dir_path != "." else name 

-

1638 child_fs_path = ( 

-

1639 os.path.join(dir_fs_path, name) if dir_fs_path != "." else name 

-

1640 ) 

-

1641 children[name] = FSROOverlay( 

-

1642 child_path, 

-

1643 child_fs_path, 

-

1644 self, 

-

1645 ) 

-

1646 self._children = children 

-

1647 

-

1648 @property 

-

1649 def is_detached(self) -> bool: 

-

1650 return False 

-

1651 

-

1652 def __getitem__(self, key) -> "VirtualPath": 

-

1653 if not self.is_dir: 

-

1654 raise KeyError(key) 

-

1655 if self._children is None: 

-

1656 self._ensure_children_are_resolved() 

-

1657 if isinstance(key, FSPath): 

-

1658 key = key.name 

-

1659 return self._children[key] 

-

1660 

-

1661 def __delitem__(self, key) -> None: 

-

1662 self._error_ro_fs() 

-

1663 

-

1664 @property 

-

1665 def is_read_write(self) -> bool: 

-

1666 return False 

-

1667 

-

1668 def _rw_check(self) -> None: 

-

1669 self._error_ro_fs() 

-

1670 

-

1671 def _error_ro_fs(self) -> NoReturn: 

-

1672 raise DebputyFSIsROError( 

-

1673 f'Attempt to write to "{self.path}" failed:' 

-

1674 " Debputy Virtual File system is R/O." 

-

1675 ) 

-

1676 

-

1677 @property 

-

1678 def path(self) -> str: 

-

1679 return self._path 

-

1680 

-

1681 @property 

-

1682 def parent_dir(self) -> Optional["FSROOverlay"]: 

-

1683 parent = self._parent 

-

1684 if parent is None: 

-

1685 return None 

-

1686 resolved = parent() 

-

1687 if resolved is None: 

-

1688 raise RuntimeError("Parent was garbage collected!") 

-

1689 return resolved 

-

1690 

-

1691 def stat(self) -> os.stat_result: 

-

1692 if self._stat_failed_cache: 

-

1693 raise FileNotFoundError( 

-

1694 errno.ENOENT, os.strerror(errno.ENOENT), self.fs_path 

-

1695 ) 

-

1696 

-

1697 if self._stat_cache is None: 

-

1698 try: 

-

1699 self._stat_cache = os.lstat(self.fs_path) 

-

1700 except FileNotFoundError: 

-

1701 self._stat_failed_cache = True 

-

1702 raise 

-

1703 return self._stat_cache 

-

1704 

-

1705 @property 

-

1706 def mode(self) -> int: 

-

1707 return stat.S_IMODE(self.stat().st_mode) 

-

1708 

-

1709 @mode.setter 

-

1710 def mode(self, _unused: int) -> None: 

-

1711 self._error_ro_fs() 

-

1712 

-

1713 @property 

-

1714 def mtime(self) -> float: 

-

1715 return self.stat().st_mtime 

-

1716 

-

1717 @mtime.setter 

-

1718 def mtime(self, new_mtime: float) -> None: 

-

1719 self._error_ro_fs() 

-

1720 

-

1721 def readlink(self) -> str: 

-

1722 if not self.is_symlink: 

-

1723 raise TypeError(f"readlink is only valid for symlinks ({self.path!r})") 

-

1724 if self._readlink_cache is None: 

-

1725 self._readlink_cache = os.readlink(self.fs_path) 

-

1726 return self._readlink_cache 

-

1727 

-

1728 @property 

-

1729 def fs_path(self) -> str: 

-

1730 return self._fs_path 

-

1731 

-

1732 @property 

-

1733 def is_dir(self) -> bool: 

-

1734 # The root path can have a non-existent fs_path (such as d/tmp not always existing) 

-

1735 try: 

-

1736 return stat.S_ISDIR(self.stat().st_mode) 

-

1737 except FileNotFoundError: 

-

1738 return False 

-

1739 

-

1740 @property 

-

1741 def is_file(self) -> bool: 

-

1742 # The root path can have a non-existent fs_path (such as d/tmp not always existing) 

-

1743 try: 

-

1744 return stat.S_ISREG(self.stat().st_mode) 

-

1745 except FileNotFoundError: 

-

1746 return False 

-

1747 

-

1748 @property 

-

1749 def is_symlink(self) -> bool: 

-

1750 # The root path can have a non-existent fs_path (such as d/tmp not always existing) 

-

1751 try: 

-

1752 return stat.S_ISLNK(self.stat().st_mode) 

-

1753 except FileNotFoundError: 

-

1754 return False 

-

1755 

-

1756 @property 

-

1757 def has_fs_path(self) -> bool: 

-

1758 return True 

-

1759 

-

1760 def open( 

-

1761 self, 

-

1762 *, 

-

1763 byte_io: bool = False, 

-

1764 buffering: int = -1, 

-

1765 ) -> Union[TextIO, BinaryIO]: 

-

1766 # Allow symlinks for open here, because we can let the OS resolve the symlink reliably in this 

-

1767 # case. 

-

1768 if not self.is_file and not self.is_symlink: 

-

1769 raise TypeError( 

-

1770 f"Cannot open {self.path} for reading: It is not a file nor a symlink" 

-

1771 ) 

-

1772 

-

1773 if byte_io: 

-

1774 return open(self.fs_path, "rb", buffering=buffering) 

-

1775 return open(self.fs_path, "rt", encoding="utf-8", buffering=buffering) 

-

1776 

-

1777 def chown( 

-

1778 self, 

-

1779 owner: Optional[StaticFileSystemOwner], 

-

1780 group: Optional[StaticFileSystemGroup], 

-

1781 ) -> None: 

-

1782 self._error_ro_fs() 

-

1783 

-

1784 def mkdir(self, name: str) -> "VirtualPath": 

-

1785 self._error_ro_fs() 

-

1786 

-

1787 def add_file( 

-

1788 self, 

-

1789 name: str, 

-

1790 *, 

-

1791 unlink_if_exists: bool = True, 

-

1792 use_fs_path_mode: bool = False, 

-

1793 mode: int = 0o0644, 

-

1794 mtime: Optional[float] = None, 

-

1795 ) -> ContextManager["VirtualPath"]: 

-

1796 self._error_ro_fs() 

-

1797 

-

1798 def add_symlink(self, link_name: str, link_target: str) -> "VirtualPath": 

-

1799 self._error_ro_fs() 

-

1800 

-

1801 def unlink(self, *, recursive: bool = False) -> None: 

-

1802 self._error_ro_fs() 

-

1803 

-

1804 def metadata( 

-

1805 self, 

-

1806 metadata_type: Type[PMT], 

-

1807 *, 

-

1808 owning_plugin: Optional[str] = None, 

-

1809 ) -> PathMetadataReference[PMT]: 

-

1810 current_plugin = self._current_plugin() 

-

1811 if owning_plugin is None: 

-

1812 owning_plugin = current_plugin 

-

1813 return AlwaysEmptyReadOnlyMetadataReference( 

-

1814 owning_plugin, 

-

1815 current_plugin, 

-

1816 metadata_type, 

-

1817 ) 

-

1818 

-

1819 

-

1820class FSROOverlayRootDir(FSROOverlay): 

-

1821 __slots__ = ("_plugin_context",) 

-

1822 

-

1823 def __init__(self, path: str, fs_path: str) -> None: 

-

1824 super().__init__(path, fs_path, None) 

-

1825 self._plugin_context = CurrentPluginContextManager("debputy") 

-

1826 

-

1827 def _current_plugin(self) -> str: 

-

1828 return self._plugin_context.current_plugin_name 

-

1829 

-

1830 @contextlib.contextmanager 

-

1831 def change_plugin_context(self, new_plugin: str) -> Iterator[str]: 

-

1832 with self._plugin_context.change_plugin_context(new_plugin) as r: 

-

1833 yield r 

-

1834 

-

1835 

-

1836def as_path_def(pd: Union[str, PathDef]) -> PathDef: 

-

1837 return PathDef(pd) if isinstance(pd, str) else pd 

-

1838 

-

1839 

-

1840def as_path_defs(paths: Iterable[Union[str, PathDef]]) -> Iterable[PathDef]: 

-

1841 yield from (as_path_def(p) for p in paths) 

-

1842 

-

1843 

-

1844def build_virtual_fs( 

-

1845 paths: Iterable[Union[str, PathDef]], 

-

1846 read_write_fs: bool = False, 

-

1847) -> "FSPath": 

-

1848 root_dir: Optional[FSRootDir] = None 

-

1849 directories: Dict[str, FSPath] = {} 

-

1850 non_directories = set() 

-

1851 

-

1852 def _ensure_parent_dirs(p: str) -> None: 

-

1853 current = p.rstrip("/") 

-

1854 missing_dirs = [] 

-

1855 while True: 

-

1856 current = os.path.dirname(current) 

-

1857 if current in directories: 

-

1858 break 

-

1859 if current in non_directories: 1859 ↛ 1860line 1859 didn't jump to line 1860, because the condition on line 1859 was never true

-

1860 raise ValueError( 

-

1861 f'Conflicting definition for "{current}". The path "{p}" wants it as a directory,' 

-

1862 ' but it is defined as a non-directory. (Ensure dirs end with "/")' 

-

1863 ) 

-

1864 missing_dirs.append(current) 

-

1865 for dir_path in reversed(missing_dirs): 

-

1866 parent_dir = directories[os.path.dirname(dir_path)] 

-

1867 d = VirtualTestPath(os.path.basename(dir_path), parent_dir, is_dir=True) 

-

1868 directories[dir_path] = d 

-

1869 

-

1870 for path_def in as_path_defs(paths): 

-

1871 path = path_def.path_name 

-

1872 if path in directories or path in non_directories: 1872 ↛ 1873line 1872 didn't jump to line 1873, because the condition on line 1872 was never true

-

1873 raise ValueError( 

-

1874 f'Duplicate definition of "{path}". Can be false positive if input is not in' 

-

1875 ' "correct order" (ensure directories occur before their children)' 

-

1876 ) 

-

1877 if root_dir is None: 

-

1878 root_fs_path = None 

-

1879 if path in (".", "./", "/"): 

-

1880 root_fs_path = path_def.fs_path 

-

1881 root_dir = FSRootDir(fs_path=root_fs_path) 

-

1882 directories["."] = root_dir 

-

1883 

-

1884 if path not in (".", "./", "/") and not path.startswith("./"): 

-

1885 path = "./" + path 

-

1886 if path not in (".", "./", "/"): 

-

1887 _ensure_parent_dirs(path) 

-

1888 if path in (".", "./"): 

-

1889 assert "." in directories 

-

1890 continue 

-

1891 is_dir = False 

-

1892 if path.endswith("/"): 

-

1893 path = path[:-1] 

-

1894 is_dir = True 

-

1895 directory = directories[os.path.dirname(path)] 

-

1896 assert not is_dir or not bool( 

-

1897 path_def.link_target 

-

1898 ), f"is_dir={is_dir} vs. link_target={path_def.link_target}" 

-

1899 fs_path = VirtualTestPath( 

-

1900 os.path.basename(path), 

-

1901 directory, 

-

1902 is_dir=is_dir, 

-

1903 mode=path_def.mode, 

-

1904 mtime=path_def.mtime, 

-

1905 has_fs_path=path_def.has_fs_path, 

-

1906 fs_path=path_def.fs_path, 

-

1907 link_target=path_def.link_target, 

-

1908 content=path_def.content, 

-

1909 materialized_content=path_def.materialized_content, 

-

1910 ) 

-

1911 assert not fs_path.is_detached 

-

1912 if fs_path.is_dir: 

-

1913 directories[fs_path.path] = fs_path 

-

1914 else: 

-

1915 non_directories.add(fs_path.path) 

-

1916 

-

1917 if root_dir is None: 

-

1918 root_dir = FSRootDir() 

-

1919 

-

1920 root_dir.is_read_write = read_write_fs 

-

1921 return root_dir 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_highlevel_manifest_parser_py.html b/coverage-report/d_267b6307937f1878_highlevel_manifest_parser_py.html deleted file mode 100644 index 54d2244..0000000 --- a/coverage-report/d_267b6307937f1878_highlevel_manifest_parser_py.html +++ /dev/null @@ -1,637 +0,0 @@ - - - - - Coverage for src/debputy/highlevel_manifest_parser.py: 68% - - - - - -
-
-

- Coverage for src/debputy/highlevel_manifest_parser.py: - 68% -

- -

- 253 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import collections 

-

2import contextlib 

-

3from typing import ( 

-

4 Optional, 

-

5 Dict, 

-

6 Callable, 

-

7 List, 

-

8 Any, 

-

9 Union, 

-

10 Mapping, 

-

11 IO, 

-

12 Iterator, 

-

13 cast, 

-

14 Tuple, 

-

15) 

-

16 

-

17from debian.debian_support import DpkgArchTable 

-

18 

-

19from debputy.highlevel_manifest import ( 

-

20 HighLevelManifest, 

-

21 PackageTransformationDefinition, 

-

22 MutableYAMLManifest, 

-

23) 

-

24from debputy.maintscript_snippet import ( 

-

25 MaintscriptSnippet, 

-

26 STD_CONTROL_SCRIPTS, 

-

27 MaintscriptSnippetContainer, 

-

28) 

-

29from debputy.packages import BinaryPackage, SourcePackage 

-

30from debputy.path_matcher import ( 

-

31 MatchRuleType, 

-

32 ExactFileSystemPath, 

-

33 MatchRule, 

-

34) 

-

35from debputy.substitution import Substitution 

-

36from debputy.util import ( 

-

37 _normalize_path, 

-

38 escape_shell, 

-

39 assume_not_none, 

-

40) 

-

41from debputy.util import _warn, _info 

-

42from ._deb_options_profiles import DebBuildOptionsAndProfiles 

-

43from .architecture_support import DpkgArchitectureBuildProcessValuesTable 

-

44from .filesystem_scan import FSROOverlay 

-

45from .installations import InstallRule, PPFInstallRule 

-

46from .manifest_parser.exceptions import ManifestParseException 

-

47from .manifest_parser.parser_data import ParserContextData 

-

48from .manifest_parser.util import AttributePath 

-

49from .packager_provided_files import detect_all_packager_provided_files 

-

50from .plugin.api import VirtualPath 

-

51from .plugin.api.impl_types import ( 

-

52 TP, 

-

53 TTP, 

-

54 DispatchingTableParser, 

-

55 OPARSER_MANIFEST_ROOT, 

-

56 PackageContextData, 

-

57) 

-

58from .plugin.api.feature_set import PluginProvidedFeatureSet 

-

59from .yaml import YAMLError, MANIFEST_YAML 

-

60 

-

61try: 

-

62 from Levenshtein import distance 

-

63except ImportError: 

-

64 

-

65 def _detect_possible_typo( 

-

66 _d, 

-

67 _key, 

-

68 _attribute_parent_path: AttributePath, 

-

69 required: bool, 

-

70 ) -> None: 

-

71 if required: 

-

72 _info( 

-

73 "Install python3-levenshtein to have debputy try to detect typos in the manifest." 

-

74 ) 

-

75 

-

76else: 

-

77 

-

78 def _detect_possible_typo( 

-

79 d, 

-

80 key, 

-

81 _attribute_parent_path: AttributePath, 

-

82 _required: bool, 

-

83 ) -> None: 

-

84 k_len = len(key) 

-

85 for actual_key in d: 

-

86 if abs(k_len - len(actual_key)) > 2: 

-

87 continue 

-

88 d = distance(key, actual_key) 

-

89 if d > 2: 

-

90 continue 

-

91 path = _attribute_parent_path.path 

-

92 ref = f'at "{path}"' if path else "at the manifest root level" 

-

93 _warn( 

-

94 f'Possible typo: The key "{actual_key}" should probably have been "{key}" {ref}' 

-

95 ) 

-

96 

-

97 

-

98def _per_package_subst_variables( 

-

99 p: BinaryPackage, 

-

100 *, 

-

101 name: Optional[str] = None, 

-

102) -> Dict[str, str]: 

-

103 return { 

-

104 "PACKAGE": name if name is not None else p.name, 

-

105 } 

-

106 

-

107 

-

108class HighLevelManifestParser(ParserContextData): 

-

109 def __init__( 

-

110 self, 

-

111 manifest_path: str, 

-

112 source_package: SourcePackage, 

-

113 binary_packages: Mapping[str, BinaryPackage], 

-

114 substitution: Substitution, 

-

115 dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable, 

-

116 dpkg_arch_query_table: DpkgArchTable, 

-

117 build_env: DebBuildOptionsAndProfiles, 

-

118 plugin_provided_feature_set: PluginProvidedFeatureSet, 

-

119 *, 

-

120 # Available for testing purposes only 

-

121 debian_dir: Union[str, VirtualPath] = "./debian", 

-

122 ): 

-

123 self.manifest_path = manifest_path 

-

124 self._source_package = source_package 

-

125 self._binary_packages = binary_packages 

-

126 self._mutable_yaml_manifest: Optional[MutableYAMLManifest] = None 

-

127 # In source context, some variables are known to be unresolvable. Record this, so 

-

128 # we can give better error messages. 

-

129 self._substitution = substitution 

-

130 self._dpkg_architecture_variables = dpkg_architecture_variables 

-

131 self._dpkg_arch_query_table = dpkg_arch_query_table 

-

132 self._build_env = build_env 

-

133 self._package_state_stack: List[PackageTransformationDefinition] = [] 

-

134 self._plugin_provided_feature_set = plugin_provided_feature_set 

-

135 self._declared_variables = {} 

-

136 

-

137 if isinstance(debian_dir, str): 137 ↛ 138line 137 didn't jump to line 138, because the condition on line 137 was never true

-

138 debian_dir = FSROOverlay.create_root_dir("debian", debian_dir) 

-

139 

-

140 self._debian_dir = debian_dir 

-

141 

-

142 # Delayed initialized; we rely on this delay to parse the variables. 

-

143 self._all_package_states = None 

-

144 

-

145 self._install_rules: Optional[List[InstallRule]] = None 

-

146 self._ownership_caches_loaded = False 

-

147 self._used = False 

-

148 

-

149 def _ensure_package_states_is_initialized(self) -> None: 

-

150 if self._all_package_states is not None: 

-

151 return 

-

152 substitution = self._substitution 

-

153 binary_packages = self._binary_packages 

-

154 assert self._all_package_states is None 

-

155 

-

156 self._all_package_states = { 

-

157 n: PackageTransformationDefinition( 

-

158 binary_package=p, 

-

159 substitution=substitution.with_extra_substitutions( 

-

160 **_per_package_subst_variables(p) 

-

161 ), 

-

162 is_auto_generated_package=False, 

-

163 maintscript_snippets=collections.defaultdict( 

-

164 MaintscriptSnippetContainer 

-

165 ), 

-

166 ) 

-

167 for n, p in binary_packages.items() 

-

168 } 

-

169 for n, p in binary_packages.items(): 

-

170 dbgsym_name = f"{n}-dbgsym" 

-

171 if dbgsym_name in self._all_package_states: 171 ↛ 172line 171 didn't jump to line 172, because the condition on line 171 was never true

-

172 continue 

-

173 self._all_package_states[dbgsym_name] = PackageTransformationDefinition( 

-

174 binary_package=p, 

-

175 substitution=substitution.with_extra_substitutions( 

-

176 **_per_package_subst_variables(p, name=dbgsym_name) 

-

177 ), 

-

178 is_auto_generated_package=True, 

-

179 maintscript_snippets=collections.defaultdict( 

-

180 MaintscriptSnippetContainer 

-

181 ), 

-

182 ) 

-

183 

-

184 @property 

-

185 def binary_packages(self) -> Mapping[str, BinaryPackage]: 

-

186 return self._binary_packages 

-

187 

-

188 @property 

-

189 def _package_states(self) -> Mapping[str, PackageTransformationDefinition]: 

-

190 assert self._all_package_states is not None 

-

191 return self._all_package_states 

-

192 

-

193 @property 

-

194 def dpkg_architecture_variables(self) -> DpkgArchitectureBuildProcessValuesTable: 

-

195 return self._dpkg_architecture_variables 

-

196 

-

197 @property 

-

198 def dpkg_arch_query_table(self) -> DpkgArchTable: 

-

199 return self._dpkg_arch_query_table 

-

200 

-

201 @property 

-

202 def build_env(self) -> DebBuildOptionsAndProfiles: 

-

203 return self._build_env 

-

204 

-

205 def build_manifest(self) -> HighLevelManifest: 

-

206 if self._used: 206 ↛ 207line 206 didn't jump to line 207, because the condition on line 206 was never true

-

207 raise TypeError("build_manifest can only be called once!") 

-

208 self._used = True 

-

209 self._ensure_package_states_is_initialized() 

-

210 for var, attribute_path in self._declared_variables.items(): 

-

211 if not self.substitution.is_used(var): 

-

212 raise ManifestParseException( 

-

213 f'The variable "{var}" is unused. Either use it or remove it.' 

-

214 f" The variable was declared at {attribute_path.path}." 

-

215 ) 

-

216 if isinstance(self, YAMLManifestParser) and self._mutable_yaml_manifest is None: 

-

217 self._mutable_yaml_manifest = MutableYAMLManifest.empty_manifest() 

-

218 all_packager_provided_files = detect_all_packager_provided_files( 

-

219 self._plugin_provided_feature_set.packager_provided_files, 

-

220 self._debian_dir, 

-

221 self.binary_packages, 

-

222 ) 

-

223 

-

224 for package in self._package_states: 

-

225 with self.binary_package_context(package) as context: 

-

226 if not context.is_auto_generated_package: 

-

227 ppf_result = all_packager_provided_files[package] 

-

228 if ppf_result.auto_installable: 228 ↛ 229line 228 didn't jump to line 229, because the condition on line 228 was never true

-

229 context.install_rules.append( 

-

230 PPFInstallRule( 

-

231 context.binary_package, 

-

232 context.substitution, 

-

233 ppf_result.auto_installable, 

-

234 ) 

-

235 ) 

-

236 context.reserved_packager_provided_files.update( 

-

237 ppf_result.reserved_only 

-

238 ) 

-

239 self._transform_dpkg_maintscript_helpers_to_snippets() 

-

240 

-

241 return HighLevelManifest( 

-

242 self.manifest_path, 

-

243 self._mutable_yaml_manifest, 

-

244 self._install_rules, 

-

245 self._source_package, 

-

246 self.binary_packages, 

-

247 self.substitution, 

-

248 self._package_states, 

-

249 self._dpkg_architecture_variables, 

-

250 self._dpkg_arch_query_table, 

-

251 self._build_env, 

-

252 self._plugin_provided_feature_set, 

-

253 self._debian_dir, 

-

254 ) 

-

255 

-

256 @contextlib.contextmanager 

-

257 def binary_package_context( 

-

258 self, package_name: str 

-

259 ) -> Iterator[PackageTransformationDefinition]: 

-

260 if package_name not in self._package_states: 

-

261 self._error( 

-

262 f'The package "{package_name}" is not present in the debian/control file (could not find' 

-

263 f' "Package: {package_name}" in a binary stanza) nor is it a -dbgsym package for one' 

-

264 " for a package in debian/control." 

-

265 ) 

-

266 package_state = self._package_states[package_name] 

-

267 self._package_state_stack.append(package_state) 

-

268 ps_len = len(self._package_state_stack) 

-

269 yield package_state 

-

270 if ps_len != len(self._package_state_stack): 270 ↛ 271line 270 didn't jump to line 271, because the condition on line 270 was never true

-

271 raise RuntimeError("Internal error: Unbalanced stack manipulation detected") 

-

272 self._package_state_stack.pop() 

-

273 

-

274 def dispatch_parser_table_for(self, rule_type: TTP) -> DispatchingTableParser[TP]: 

-

275 t = self._plugin_provided_feature_set.manifest_parser_generator.dispatch_parser_table_for( 

-

276 rule_type 

-

277 ) 

-

278 if t is None: 

-

279 raise AssertionError( 

-

280 f"Internal error: No dispatching parser for {rule_type.__name__}" 

-

281 ) 

-

282 return t 

-

283 

-

284 @property 

-

285 def substitution(self) -> Substitution: 

-

286 if self._package_state_stack: 

-

287 return self._package_state_stack[-1].substitution 

-

288 return self._substitution 

-

289 

-

290 def add_extra_substitution_variables( 

-

291 self, 

-

292 **extra_substitutions: Tuple[str, AttributePath], 

-

293 ) -> Substitution: 

-

294 if self._package_state_stack or self._all_package_states is not None: 294 ↛ 299line 294 didn't jump to line 299, because the condition on line 294 was never true

-

295 # For one, it would not "bubble up" correctly when added to the lowest stack. 

-

296 # And if it is not added to the lowest stack, then you get errors about it being 

-

297 # unknown as soon as you leave the stack (which is weird for the user when 

-

298 # the variable is something known, sometimes not) 

-

299 raise RuntimeError("Cannot use add_extra_substitution from this state") 

-

300 for key, (_, path) in extra_substitutions.items(): 

-

301 self._declared_variables[key] = path 

-

302 self._substitution = self._substitution.with_extra_substitutions( 

-

303 **{k: v[0] for k, v in extra_substitutions.items()} 

-

304 ) 

-

305 return self._substitution 

-

306 

-

307 @property 

-

308 def current_binary_package_state(self) -> PackageTransformationDefinition: 

-

309 if not self._package_state_stack: 309 ↛ 310line 309 didn't jump to line 310, because the condition on line 309 was never true

-

310 raise RuntimeError("Invalid state: Not in a binary package context") 

-

311 return self._package_state_stack[-1] 

-

312 

-

313 @property 

-

314 def is_in_binary_package_state(self) -> bool: 

-

315 return bool(self._package_state_stack) 

-

316 

-

317 def _transform_dpkg_maintscript_helpers_to_snippets(self) -> None: 

-

318 package_state = self.current_binary_package_state 

-

319 for dmh in package_state.dpkg_maintscript_helper_snippets: 319 ↛ 320line 319 didn't jump to line 320, because the loop on line 319 never started

-

320 snippet = MaintscriptSnippet( 

-

321 definition_source=dmh.definition_source, 

-

322 snippet=f'dpkg-maintscript-helper {escape_shell(*dmh.cmdline)} -- "$@"\n', 

-

323 ) 

-

324 for script in STD_CONTROL_SCRIPTS: 

-

325 package_state.maintscript_snippets[script].append(snippet) 

-

326 

-

327 def normalize_path( 

-

328 self, 

-

329 path: str, 

-

330 definition_source: AttributePath, 

-

331 *, 

-

332 allow_root_dir_match: bool = False, 

-

333 ) -> ExactFileSystemPath: 

-

334 try: 

-

335 normalized = _normalize_path(path) 

-

336 except ValueError: 

-

337 self._error( 

-

338 f'The path "{path}" provided in {definition_source.path} should be relative to the root of the' 

-

339 ' package and not use any ".." or "." segments.' 

-

340 ) 

-

341 if normalized == "." and not allow_root_dir_match: 

-

342 self._error( 

-

343 "Manifests must not change the root directory of the deb file. Please correct" 

-

344 f' "{definition_source.path}" (path: "{path}) in {self.manifest_path}' 

-

345 ) 

-

346 return ExactFileSystemPath( 

-

347 self.substitution.substitute(normalized, definition_source.path) 

-

348 ) 

-

349 

-

350 def parse_path_or_glob( 

-

351 self, 

-

352 path_or_glob: str, 

-

353 definition_source: AttributePath, 

-

354 ) -> MatchRule: 

-

355 match_rule = MatchRule.from_path_or_glob( 

-

356 path_or_glob, definition_source.path, substitution=self.substitution 

-

357 ) 

-

358 # NB: "." and "/" will be translated to MATCH_ANYTHING by MatchRule.from_path_or_glob, 

-

359 # so there is no need to check for an exact match on "." like in normalize_path. 

-

360 if match_rule.rule_type == MatchRuleType.MATCH_ANYTHING: 

-

361 self._error( 

-

362 f'The chosen match rule "{path_or_glob}" matches everything (including the deb root directory).' 

-

363 f' Please correct "{definition_source.path}" (path: "{path_or_glob}) in {self.manifest_path} to' 

-

364 f' something that matches "less" than everything.' 

-

365 ) 

-

366 return match_rule 

-

367 

-

368 def parse_manifest(self) -> HighLevelManifest: 

-

369 raise NotImplementedError 

-

370 

-

371 

-

372class YAMLManifestParser(HighLevelManifestParser): 

-

373 def _optional_key( 

-

374 self, 

-

375 d: Mapping[str, Any], 

-

376 key: str, 

-

377 attribute_parent_path: AttributePath, 

-

378 expected_type=None, 

-

379 default_value=None, 

-

380 ): 

-

381 v = d.get(key) 

-

382 if v is None: 

-

383 _detect_possible_typo(d, key, attribute_parent_path, False) 

-

384 return default_value 

-

385 if expected_type is not None: 

-

386 return self._ensure_value_is_type( 

-

387 v, expected_type, key, attribute_parent_path 

-

388 ) 

-

389 return v 

-

390 

-

391 def _required_key( 

-

392 self, 

-

393 d: Mapping[str, Any], 

-

394 key: str, 

-

395 attribute_parent_path: AttributePath, 

-

396 expected_type=None, 

-

397 extra: Optional[Union[str, Callable[[], str]]] = None, 

-

398 ): 

-

399 v = d.get(key) 

-

400 if v is None: 

-

401 _detect_possible_typo(d, key, attribute_parent_path, True) 

-

402 if extra is not None: 

-

403 msg = extra if isinstance(extra, str) else extra() 

-

404 extra_info = " " + msg 

-

405 else: 

-

406 extra_info = "" 

-

407 self._error( 

-

408 f'Missing required key {key} at {attribute_parent_path.path} in manifest "{self.manifest_path}.' 

-

409 f"{extra_info}" 

-

410 ) 

-

411 

-

412 if expected_type is not None: 

-

413 return self._ensure_value_is_type( 

-

414 v, expected_type, key, attribute_parent_path 

-

415 ) 

-

416 return v 

-

417 

-

418 def _ensure_value_is_type( 

-

419 self, 

-

420 v, 

-

421 t, 

-

422 key: Union[str, int, AttributePath], 

-

423 attribute_parent_path: Optional[AttributePath], 

-

424 ): 

-

425 if v is None: 

-

426 return None 

-

427 if not isinstance(v, t): 

-

428 if isinstance(t, tuple): 

-

429 t_msg = "one of: " + ", ".join(x.__name__ for x in t) 

-

430 else: 

-

431 t_msg = f"a {t.__name__}" 

-

432 key_path = ( 

-

433 key.path 

-

434 if isinstance(key, AttributePath) 

-

435 else assume_not_none(attribute_parent_path)[key].path 

-

436 ) 

-

437 self._error( 

-

438 f'The key {key_path} must be {t_msg} in manifest "{self.manifest_path}"' 

-

439 ) 

-

440 return v 

-

441 

-

442 def from_yaml_dict(self, yaml_data: object) -> "HighLevelManifest": 

-

443 attribute_path = AttributePath.root_path() 

-

444 parser_generator = self._plugin_provided_feature_set.manifest_parser_generator 

-

445 dispatchable_object_parsers = parser_generator.dispatchable_object_parsers 

-

446 manifest_root_parser = dispatchable_object_parsers[OPARSER_MANIFEST_ROOT] 

-

447 parsed_data = cast( 

-

448 "ManifestRootRule", 

-

449 manifest_root_parser.parse_input( 

-

450 yaml_data, 

-

451 attribute_path, 

-

452 parser_context=self, 

-

453 ), 

-

454 ) 

-

455 

-

456 packages_dict: Mapping[str, PackageContextData[Mapping[str, Any]]] = cast( 

-

457 "Mapping[str, PackageContextData[Mapping[str, Any]]]", 

-

458 parsed_data.get("packages", {}), 

-

459 ) 

-

460 install_rules = parsed_data.get("installations") 

-

461 if install_rules: 

-

462 self._install_rules = install_rules 

-

463 packages_parent_path = attribute_path["packages"] 

-

464 for package_name_raw, pcd in packages_dict.items(): 

-

465 definition_source = packages_parent_path[package_name_raw] 

-

466 package_name = pcd.resolved_package_name 

-

467 parsed = pcd.value 

-

468 

-

469 package_state: PackageTransformationDefinition 

-

470 with self.binary_package_context(package_name) as package_state: 

-

471 if package_state.is_auto_generated_package: 471 ↛ 473line 471 didn't jump to line 473, because the condition on line 471 was never true

-

472 # Maybe lift (part) of this restriction. 

-

473 self._error( 

-

474 f'Cannot define rules for package "{package_name}" (at {definition_source.path}). It is an' 

-

475 " auto-generated package." 

-

476 ) 

-

477 binary_version = parsed.get("binary-version") 

-

478 if binary_version is not None: 

-

479 package_state.binary_version = ( 

-

480 package_state.substitution.substitute( 

-

481 binary_version, 

-

482 definition_source["binary-version"].path, 

-

483 ) 

-

484 ) 

-

485 search_dirs = parsed.get("installation_search_dirs") 

-

486 if search_dirs is not None: 486 ↛ 487line 486 didn't jump to line 487, because the condition on line 486 was never true

-

487 package_state.search_dirs = search_dirs 

-

488 transformations = parsed.get("transformations") 

-

489 conffile_management = parsed.get("conffile_management") 

-

490 service_rules = parsed.get("services") 

-

491 if transformations: 

-

492 package_state.transformations.extend(transformations) 

-

493 if conffile_management: 493 ↛ 494line 493 didn't jump to line 494, because the condition on line 493 was never true

-

494 package_state.dpkg_maintscript_helper_snippets.extend( 

-

495 conffile_management 

-

496 ) 

-

497 if service_rules: 497 ↛ 498line 497 didn't jump to line 498, because the condition on line 497 was never true

-

498 package_state.requested_service_rules.extend(service_rules) 

-

499 

-

500 return self.build_manifest() 

-

501 

-

502 def _parse_manifest(self, fd: Union[IO[bytes], str]) -> HighLevelManifest: 

-

503 try: 

-

504 data = MANIFEST_YAML.load(fd) 

-

505 except YAMLError as e: 

-

506 msg = str(e) 

-

507 lines = msg.splitlines(keepends=True) 

-

508 i = -1 

-

509 for i, line in enumerate(lines): 

-

510 # Avoid an irrelevant "how do configure the YAML parser" message, which the 

-

511 # user cannot use. 

-

512 if line.startswith("To suppress this check"): 

-

513 break 

-

514 if i > -1 and len(lines) > i + 1: 

-

515 lines = lines[:i] 

-

516 msg = "".join(lines) 

-

517 msg = msg.rstrip() 

-

518 msg += ( 

-

519 f"\n\nYou can use `yamllint -d relaxed {escape_shell(self.manifest_path)}` to validate" 

-

520 " the YAML syntax. The yamllint tool also supports style rules for YAML documents" 

-

521 " (such as indentation rules) in case that is of interest." 

-

522 ) 

-

523 raise ManifestParseException( 

-

524 f"Could not parse {self.manifest_path} as a YAML document: {msg}" 

-

525 ) from e 

-

526 self._mutable_yaml_manifest = MutableYAMLManifest(data) 

-

527 return self.from_yaml_dict(data) 

-

528 

-

529 def parse_manifest( 

-

530 self, 

-

531 *, 

-

532 fd: Optional[Union[IO[bytes], str]] = None, 

-

533 ) -> HighLevelManifest: 

-

534 if fd is None: 534 ↛ 535line 534 didn't jump to line 535, because the condition on line 534 was never true

-

535 with open(self.manifest_path, "rb") as fd: 

-

536 return self._parse_manifest(fd) 

-

537 else: 

-

538 return self._parse_manifest(fd) 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_highlevel_manifest_py.html b/coverage-report/d_267b6307937f1878_highlevel_manifest_py.html deleted file mode 100644 index ec64758..0000000 --- a/coverage-report/d_267b6307937f1878_highlevel_manifest_py.html +++ /dev/null @@ -1,1706 +0,0 @@ - - - - - Coverage for src/debputy/highlevel_manifest.py: 67% - - - - - -
-
-

- Coverage for src/debputy/highlevel_manifest.py: - 67% -

- -

- 801 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2import functools 

-

3import os 

-

4import textwrap 

-

5from contextlib import suppress 

-

6from dataclasses import dataclass, field 

-

7from typing import ( 

-

8 List, 

-

9 Dict, 

-

10 Iterable, 

-

11 Mapping, 

-

12 Any, 

-

13 Union, 

-

14 Optional, 

-

15 TypeVar, 

-

16 Generic, 

-

17 cast, 

-

18 Set, 

-

19 Tuple, 

-

20 Sequence, 

-

21 FrozenSet, 

-

22) 

-

23 

-

24from debian.debian_support import DpkgArchTable 

-

25from ._deb_options_profiles import DebBuildOptionsAndProfiles 

-

26from ._manifest_constants import * 

-

27from .architecture_support import DpkgArchitectureBuildProcessValuesTable 

-

28from .builtin_manifest_rules import builtin_mode_normalization_rules 

-

29from .debhelper_emulation import ( 

-

30 dhe_dbgsym_root_dir, 

-

31 assert_no_dbgsym_migration, 

-

32 read_dbgsym_file, 

-

33) 

-

34from .exceptions import DebputySubstitutionError 

-

35from .filesystem_scan import FSPath, FSRootDir, FSROOverlay 

-

36from .installations import ( 

-

37 InstallRule, 

-

38 SourcePathMatcher, 

-

39 PathAlreadyInstalledOrDiscardedError, 

-

40 NoMatchForInstallPatternError, 

-

41 InstallRuleContext, 

-

42 BinaryPackageInstallRuleContext, 

-

43 InstallSearchDirContext, 

-

44 SearchDir, 

-

45) 

-

46from .intermediate_manifest import TarMember, PathType, IntermediateManifest 

-

47from .maintscript_snippet import ( 

-

48 DpkgMaintscriptHelperCommand, 

-

49 MaintscriptSnippetContainer, 

-

50) 

-

51from .manifest_conditions import ConditionContext 

-

52from .manifest_parser.base_types import FileSystemMatchRule, FileSystemExactMatchRule 

-

53from .manifest_parser.util import AttributePath 

-

54from .packager_provided_files import PackagerProvidedFile 

-

55from .packages import BinaryPackage, SourcePackage 

-

56from .plugin.api.feature_set import PluginProvidedFeatureSet 

-

57from .plugin.api.impl import BinaryCtrlAccessorProviderCreator 

-

58from .plugin.api.impl_types import ( 

-

59 PackageProcessingContextProvider, 

-

60 PackageDataTable, 

-

61) 

-

62from .plugin.api.spec import FlushableSubstvars, VirtualPath 

-

63from .plugin.debputy.binary_package_rules import ServiceRule 

-

64from .substitution import Substitution 

-

65from .transformation_rules import ( 

-

66 TransformationRule, 

-

67 ModeNormalizationTransformationRule, 

-

68 NormalizeShebangLineTransformation, 

-

69) 

-

70from .util import ( 

-

71 _error, 

-

72 _warn, 

-

73 debian_policy_normalize_symlink_target, 

-

74 generated_content_dir, 

-

75 _info, 

-

76) 

-

77from .yaml import MANIFEST_YAML 

-

78from .yaml.compat import CommentedMap, CommentedSeq 

-

79 

-

80 

-

81@dataclass(slots=True) 

-

82class DbgsymInfo: 

-

83 dbgsym_fs_root: FSPath 

-

84 dbgsym_ids: List[str] 

-

85 

-

86 

-

87@dataclass(slots=True, frozen=True) 

-

88class BinaryPackageData: 

-

89 source_package: SourcePackage 

-

90 binary_package: BinaryPackage 

-

91 binary_staging_root_dir: str 

-

92 control_output_dir: Optional[str] 

-

93 fs_root: FSPath 

-

94 substvars: FlushableSubstvars 

-

95 package_metadata_context: PackageProcessingContextProvider 

-

96 ctrl_creator: BinaryCtrlAccessorProviderCreator 

-

97 dbgsym_info: DbgsymInfo 

-

98 

-

99 

-

100@dataclass(slots=True) 

-

101class PackageTransformationDefinition: 

-

102 binary_package: BinaryPackage 

-

103 substitution: Substitution 

-

104 is_auto_generated_package: bool 

-

105 binary_version: Optional[str] = None 

-

106 search_dirs: Optional[List[FileSystemExactMatchRule]] = None 

-

107 dpkg_maintscript_helper_snippets: List[DpkgMaintscriptHelperCommand] = field( 

-

108 default_factory=list 

-

109 ) 

-

110 maintscript_snippets: Dict[str, MaintscriptSnippetContainer] = field( 

-

111 default_factory=dict 

-

112 ) 

-

113 transformations: List[TransformationRule] = field(default_factory=list) 

-

114 reserved_packager_provided_files: Dict[str, List[PackagerProvidedFile]] = field( 

-

115 default_factory=dict 

-

116 ) 

-

117 install_rules: List[InstallRule] = field(default_factory=list) 

-

118 requested_service_rules: List[ServiceRule] = field(default_factory=list) 

-

119 

-

120 

-

121def _path_to_tar_member( 

-

122 path: FSPath, 

-

123 clamp_mtime_to: int, 

-

124) -> TarMember: 

-

125 mtime = float(clamp_mtime_to) 

-

126 owner, uid, group, gid = path.tar_owner_info 

-

127 mode = path.mode 

-

128 

-

129 if path.has_fs_path: 

-

130 mtime = min(mtime, path.mtime) 

-

131 

-

132 if path.is_dir: 

-

133 path_type = PathType.DIRECTORY 

-

134 elif path.is_file: 

-

135 # TODO: someday we will need to deal with hardlinks and it might appear here. 

-

136 path_type = PathType.FILE 

-

137 elif path.is_symlink: 137 ↛ 157line 137 didn't jump to line 157, because the condition on line 137 was never false

-

138 # Special-case that we resolve immediately (since we need to normalize the target anyway) 

-

139 link_target = debian_policy_normalize_symlink_target( 

-

140 path.path, 

-

141 path.readlink(), 

-

142 ) 

-

143 return TarMember.virtual_path( 

-

144 path.tar_path, 

-

145 PathType.SYMLINK, 

-

146 mtime, 

-

147 link_target=link_target, 

-

148 # Force mode to be 0777 as that is the mode we see in the data.tar. In theory, tar lets you set 

-

149 # it to whatever. However, for reproducibility, we have to be well-behaved - and that is 0777. 

-

150 mode=0o0777, 

-

151 owner=owner, 

-

152 uid=uid, 

-

153 group=group, 

-

154 gid=gid, 

-

155 ) 

-

156 else: 

-

157 assert not path.is_symlink 

-

158 raise AssertionError( 

-

159 f"Unsupported file type: {path.path} - not a file, dir nor a symlink!" 

-

160 ) 

-

161 

-

162 if not path.has_fs_path: 

-

163 assert not path.is_file 

-

164 return TarMember.virtual_path( 

-

165 path.tar_path, 

-

166 path_type, 

-

167 mtime, 

-

168 mode=mode, 

-

169 owner=owner, 

-

170 uid=uid, 

-

171 group=group, 

-

172 gid=gid, 

-

173 ) 

-

174 may_steal_fs_path = path._can_replace_inline 

-

175 return TarMember.from_file( 

-

176 path.tar_path, 

-

177 path.fs_path, 

-

178 mode=mode, 

-

179 uid=uid, 

-

180 owner=owner, 

-

181 gid=gid, 

-

182 group=group, 

-

183 path_type=path_type, 

-

184 path_mtime=mtime, 

-

185 clamp_mtime_to=clamp_mtime_to, 

-

186 may_steal_fs_path=may_steal_fs_path, 

-

187 ) 

-

188 

-

189 

-

190def _generate_intermediate_manifest( 

-

191 fs_root: FSPath, 

-

192 clamp_mtime_to: int, 

-

193) -> Iterable[TarMember]: 

-

194 symlinks = [] 

-

195 for path in fs_root.all_paths(): 

-

196 tar_member = _path_to_tar_member(path, clamp_mtime_to) 

-

197 if tar_member.path_type == PathType.SYMLINK: 

-

198 symlinks.append(tar_member) 

-

199 continue 

-

200 yield tar_member 

-

201 yield from symlinks 

-

202 

-

203 

-

204ST = TypeVar("ST") 

-

205T = TypeVar("T") 

-

206 

-

207 

-

208class AbstractYAMLSubStore(Generic[ST]): 

-

209 def __init__( 

-

210 self, 

-

211 parent_store: Any, 

-

212 parent_key: Optional[Union[int, str]], 

-

213 store: Optional[ST] = None, 

-

214 ) -> None: 

-

215 if parent_store is not None and parent_key is not None: 

-

216 try: 

-

217 from_parent_store = parent_store[parent_key] 

-

218 except (KeyError, IndexError): 

-

219 from_parent_store = None 

-

220 if ( 220 ↛ 225line 220 didn't jump to line 225

-

221 store is not None 

-

222 and from_parent_store is not None 

-

223 and store is not parent_store 

-

224 ): 

-

225 raise ValueError( 

-

226 "Store is provided but is not the one already in the parent store" 

-

227 ) 

-

228 if store is None: 228 ↛ 230line 228 didn't jump to line 230, because the condition on line 228 was never false

-

229 store = from_parent_store 

-

230 self._parent_store = parent_store 

-

231 self._parent_key = parent_key 

-

232 self._is_detached = ( 

-

233 parent_key is None or parent_store is None or parent_key not in parent_store 

-

234 ) 

-

235 assert self._is_detached or store is not None 

-

236 if store is None: 

-

237 store = self._create_new_instance() 

-

238 self._store: ST = store 

-

239 

-

240 def _create_new_instance(self) -> ST: 

-

241 raise NotImplementedError 

-

242 

-

243 def create_definition_if_missing(self) -> None: 

-

244 if self._is_detached: 

-

245 self.create_definition() 

-

246 

-

247 def create_definition(self) -> None: 

-

248 if not self._is_detached: 248 ↛ 249line 248 didn't jump to line 249, because the condition on line 248 was never true

-

249 raise RuntimeError("Definition is already present") 

-

250 parent_store = self._parent_store 

-

251 if parent_store is None: 251 ↛ 252line 251 didn't jump to line 252, because the condition on line 251 was never true

-

252 raise RuntimeError( 

-

253 f"Definition is not attached to any parent!? ({self.__class__.__name__})" 

-

254 ) 

-

255 if isinstance(parent_store, list): 

-

256 assert self._parent_key is None 

-

257 self._parent_key = len(parent_store) 

-

258 self._parent_store.append(self._store) 

-

259 else: 

-

260 parent_store[self._parent_key] = self._store 

-

261 self._is_detached = False 

-

262 

-

263 def remove_definition(self) -> None: 

-

264 self._ensure_attached() 

-

265 del self._parent_store[self._parent_key] 

-

266 if isinstance(self._parent_store, list): 

-

267 self._parent_key = None 

-

268 self._is_detached = True 

-

269 

-

270 def _ensure_attached(self) -> None: 

-

271 if self._is_detached: 

-

272 raise RuntimeError("The definition has been removed!") 

-

273 

-

274 

-

275class AbstractYAMLListSubStore(Generic[T], AbstractYAMLSubStore[List[T]]): 

-

276 def _create_new_instance(self) -> List[T]: 

-

277 return CommentedSeq() 

-

278 

-

279 

-

280class AbstractYAMLDictSubStore(Generic[T], AbstractYAMLSubStore[Dict[str, T]]): 

-

281 def _create_new_instance(self) -> Dict[str, T]: 

-

282 return CommentedMap() 

-

283 

-

284 

-

285class MutableCondition: 

-

286 @classmethod 

-

287 def arch_matches(cls, arch_filter: str) -> CommentedMap: 

-

288 return CommentedMap({MK_CONDITION_ARCH_MATCHES: arch_filter}) 

-

289 

-

290 @classmethod 

-

291 def build_profiles_matches(cls, build_profiles_matches: str) -> CommentedMap: 

-

292 return CommentedMap( 

-

293 {MK_CONDITION_BUILD_PROFILES_MATCHES: build_profiles_matches} 

-

294 ) 

-

295 

-

296 

-

297class MutableYAMLSymlink(AbstractYAMLDictSubStore[Any]): 

-

298 @classmethod 

-

299 def new_symlink( 

-

300 cls, link_path: str, link_target: str, condition: Optional[Any] 

-

301 ) -> "MutableYAMLSymlink": 

-

302 inner = { 

-

303 MK_TRANSFORMATIONS_CREATE_SYMLINK_LINK_PATH: link_path, 

-

304 MK_TRANSFORMATIONS_CREATE_SYMLINK_LINK_TARGET: link_target, 

-

305 } 

-

306 content = {MK_TRANSFORMATIONS_CREATE_SYMLINK: inner} 

-

307 if condition is not None: 307 ↛ 308line 307 didn't jump to line 308, because the condition on line 307 was never true

-

308 inner["when"] = condition 

-

309 return cls(None, None, store=CommentedMap(content)) 

-

310 

-

311 @property 

-

312 def symlink_path(self) -> str: 

-

313 return self._store[MK_TRANSFORMATIONS_CREATE_SYMLINK][ 

-

314 MK_TRANSFORMATIONS_CREATE_SYMLINK_LINK_PATH 

-

315 ] 

-

316 

-

317 @symlink_path.setter 

-

318 def symlink_path(self, path: str) -> None: 

-

319 self._store[MK_TRANSFORMATIONS_CREATE_SYMLINK][ 

-

320 MK_TRANSFORMATIONS_CREATE_SYMLINK_LINK_PATH 

-

321 ] = path 

-

322 

-

323 @property 

-

324 def symlink_target(self) -> Optional[str]: 

-

325 return self._store[MK_TRANSFORMATIONS_CREATE_SYMLINK][ 

-

326 MK_TRANSFORMATIONS_CREATE_SYMLINK_LINK_TARGET 

-

327 ] 

-

328 

-

329 @symlink_target.setter 

-

330 def symlink_target(self, target: str) -> None: 

-

331 self._store[MK_TRANSFORMATIONS_CREATE_SYMLINK][ 

-

332 MK_TRANSFORMATIONS_CREATE_SYMLINK_LINK_TARGET 

-

333 ] = target 

-

334 

-

335 

-

336class MutableYAMLConffileManagementItem(AbstractYAMLDictSubStore[Any]): 

-

337 @classmethod 

-

338 def rm_conffile( 

-

339 cls, 

-

340 conffile: str, 

-

341 prior_to_version: Optional[str], 

-

342 owning_package: Optional[str], 

-

343 ) -> "MutableYAMLConffileManagementItem": 

-

344 r = cls( 

-

345 None, 

-

346 None, 

-

347 store=CommentedMap( 

-

348 { 

-

349 MK_CONFFILE_MANAGEMENT_REMOVE: CommentedMap( 

-

350 {MK_CONFFILE_MANAGEMENT_REMOVE_PATH: conffile} 

-

351 ) 

-

352 } 

-

353 ), 

-

354 ) 

-

355 r.prior_to_version = prior_to_version 

-

356 r.owning_package = owning_package 

-

357 return r 

-

358 

-

359 @classmethod 

-

360 def mv_conffile( 

-

361 cls, 

-

362 old_conffile: str, 

-

363 new_conffile: str, 

-

364 prior_to_version: Optional[str], 

-

365 owning_package: Optional[str], 

-

366 ) -> "MutableYAMLConffileManagementItem": 

-

367 r = cls( 

-

368 None, 

-

369 None, 

-

370 store=CommentedMap( 

-

371 { 

-

372 MK_CONFFILE_MANAGEMENT_RENAME: CommentedMap( 

-

373 { 

-

374 MK_CONFFILE_MANAGEMENT_RENAME_SOURCE: old_conffile, 

-

375 MK_CONFFILE_MANAGEMENT_RENAME_TARGET: new_conffile, 

-

376 } 

-

377 ) 

-

378 } 

-

379 ), 

-

380 ) 

-

381 r.prior_to_version = prior_to_version 

-

382 r.owning_package = owning_package 

-

383 return r 

-

384 

-

385 @property 

-

386 def _container(self) -> Dict[str, Any]: 

-

387 assert len(self._store) == 1 

-

388 return next(iter(self._store.values())) 

-

389 

-

390 @property 

-

391 def command(self) -> str: 

-

392 assert len(self._store) == 1 

-

393 return next(iter(self._store)) 

-

394 

-

395 @property 

-

396 def obsolete_conffile(self) -> str: 

-

397 if self.command == MK_CONFFILE_MANAGEMENT_REMOVE: 

-

398 return self._container[MK_CONFFILE_MANAGEMENT_REMOVE_PATH] 

-

399 assert self.command == MK_CONFFILE_MANAGEMENT_RENAME 

-

400 return self._container[MK_CONFFILE_MANAGEMENT_RENAME_SOURCE] 

-

401 

-

402 @obsolete_conffile.setter 

-

403 def obsolete_conffile(self, value: str) -> None: 

-

404 if self.command == MK_CONFFILE_MANAGEMENT_REMOVE: 

-

405 self._container[MK_CONFFILE_MANAGEMENT_REMOVE_PATH] = value 

-

406 else: 

-

407 assert self.command == MK_CONFFILE_MANAGEMENT_RENAME 

-

408 self._container[MK_CONFFILE_MANAGEMENT_RENAME_SOURCE] = value 

-

409 

-

410 @property 

-

411 def new_conffile(self) -> str: 

-

412 if self.command != MK_CONFFILE_MANAGEMENT_RENAME: 

-

413 raise TypeError( 

-

414 f"The new_conffile attribute is only applicable to command {MK_CONFFILE_MANAGEMENT_RENAME}." 

-

415 f" This is a {self.command}" 

-

416 ) 

-

417 return self._container[MK_CONFFILE_MANAGEMENT_RENAME_TARGET] 

-

418 

-

419 @new_conffile.setter 

-

420 def new_conffile(self, value: str) -> None: 

-

421 if self.command != MK_CONFFILE_MANAGEMENT_RENAME: 

-

422 raise TypeError( 

-

423 f"The new_conffile attribute is only applicable to command {MK_CONFFILE_MANAGEMENT_RENAME}." 

-

424 f" This is a {self.command}" 

-

425 ) 

-

426 self._container[MK_CONFFILE_MANAGEMENT_RENAME_TARGET] = value 

-

427 

-

428 @property 

-

429 def prior_to_version(self) -> Optional[str]: 

-

430 return self._container.get(MK_CONFFILE_MANAGEMENT_X_PRIOR_TO_VERSION) 

-

431 

-

432 @prior_to_version.setter 

-

433 def prior_to_version(self, value: Optional[str]) -> None: 

-

434 if value is None: 

-

435 try: 

-

436 del self._container[MK_CONFFILE_MANAGEMENT_X_PRIOR_TO_VERSION] 

-

437 except KeyError: 

-

438 pass 

-

439 else: 

-

440 self._container[MK_CONFFILE_MANAGEMENT_X_PRIOR_TO_VERSION] = value 

-

441 

-

442 @property 

-

443 def owning_package(self) -> Optional[str]: 

-

444 return self._container[MK_CONFFILE_MANAGEMENT_X_PRIOR_TO_VERSION] 

-

445 

-

446 @owning_package.setter 

-

447 def owning_package(self, value: Optional[str]) -> None: 

-

448 if value is None: 

-

449 try: 

-

450 del self._container[MK_CONFFILE_MANAGEMENT_X_OWNING_PACKAGE] 

-

451 except KeyError: 

-

452 pass 

-

453 else: 

-

454 self._container[MK_CONFFILE_MANAGEMENT_X_OWNING_PACKAGE] = value 

-

455 

-

456 

-

457class MutableYAMLPackageDefinition(AbstractYAMLDictSubStore): 

-

458 def _list_store( 

-

459 self, key, *, create_if_absent: bool = False 

-

460 ) -> Optional[List[Dict[str, Any]]]: 

-

461 if self._is_detached or key not in self._store: 

-

462 if create_if_absent: 462 ↛ 463line 462 didn't jump to line 463, because the condition on line 462 was never true

-

463 return None 

-

464 self.create_definition_if_missing() 

-

465 self._store[key] = [] 

-

466 return self._store[key] 

-

467 

-

468 def _insert_item(self, key: str, item: AbstractYAMLDictSubStore) -> None: 

-

469 parent_store = self._list_store(key, create_if_absent=True) 

-

470 assert parent_store is not None 

-

471 if not item._is_detached or ( 471 ↛ 474line 471 didn't jump to line 474, because the condition on line 471 was never true

-

472 item._parent_store is not None and item._parent_store is not parent_store 

-

473 ): 

-

474 raise RuntimeError( 

-

475 "Item is already attached or associated with a different container" 

-

476 ) 

-

477 item._parent_store = parent_store 

-

478 item.create_definition() 

-

479 

-

480 def add_symlink(self, symlink: MutableYAMLSymlink) -> None: 

-

481 self._insert_item(MK_TRANSFORMATIONS, symlink) 

-

482 

-

483 def symlinks(self) -> Iterable[MutableYAMLSymlink]: 

-

484 store = self._list_store(MK_TRANSFORMATIONS) 

-

485 if store is None: 485 ↛ 486line 485 didn't jump to line 486, because the condition on line 485 was never true

-

486 return 

-

487 for i in range(len(store)): 487 ↛ 488line 487 didn't jump to line 488, because the loop on line 487 never started

-

488 d = store[i] 

-

489 if d and isinstance(d, dict) and len(d) == 1 and "symlink" in d: 

-

490 yield MutableYAMLSymlink(store, i) 

-

491 

-

492 def conffile_management_items(self) -> Iterable[MutableYAMLConffileManagementItem]: 

-

493 store = self._list_store(MK_CONFFILE_MANAGEMENT) 

-

494 if store is None: 494 ↛ 495line 494 didn't jump to line 495, because the condition on line 494 was never true

-

495 return 

-

496 yield from ( 

-

497 MutableYAMLConffileManagementItem(store, i) for i in range(len(store)) 

-

498 ) 

-

499 

-

500 def add_conffile_management( 

-

501 self, conffile_management_item: MutableYAMLConffileManagementItem 

-

502 ) -> None: 

-

503 self._insert_item(MK_CONFFILE_MANAGEMENT, conffile_management_item) 

-

504 

-

505 

-

506class AbstractMutableYAMLInstallRule(AbstractYAMLDictSubStore): 

-

507 @property 

-

508 def _container(self) -> Dict[str, Any]: 

-

509 assert len(self._store) == 1 

-

510 return next(iter(self._store.values())) 

-

511 

-

512 @property 

-

513 def into(self) -> Optional[List[str]]: 

-

514 v = self._container[MK_INSTALLATIONS_INSTALL_INTO] 

-

515 if v is None: 

-

516 return None 

-

517 if isinstance(v, str): 

-

518 return [v] 

-

519 return v 

-

520 

-

521 @into.setter 

-

522 def into(self, new_value: Optional[Union[str, List[str]]]) -> None: 

-

523 if new_value is None: 523 ↛ 527line 523 didn't jump to line 527, because the condition on line 523 was never false

-

524 with suppress(KeyError): 

-

525 del self._container[MK_INSTALLATIONS_INSTALL_INTO] 

-

526 return 

-

527 if isinstance(new_value, str): 

-

528 self._container[MK_INSTALLATIONS_INSTALL_INTO] = new_value 

-

529 return 

-

530 new_list = CommentedSeq(new_value) 

-

531 self._container[MK_INSTALLATIONS_INSTALL_INTO] = new_list 

-

532 

-

533 @property 

-

534 def when(self) -> Optional[Union[str, Mapping[str, Any]]]: 

-

535 return self._container[MK_CONDITION_WHEN] 

-

536 

-

537 @when.setter 

-

538 def when(self, new_value: Optional[Union[str, Mapping[str, Any]]]) -> None: 

-

539 if new_value is None: 539 ↛ 540line 539 didn't jump to line 540, because the condition on line 539 was never true

-

540 with suppress(KeyError): 

-

541 del self._container[MK_CONDITION_WHEN] 

-

542 return 

-

543 if isinstance(new_value, str): 543 ↛ 544line 543 didn't jump to line 544, because the condition on line 543 was never true

-

544 self._container[MK_CONDITION_WHEN] = new_value 

-

545 return 

-

546 new_map = CommentedMap(new_value) 

-

547 self._container[MK_CONDITION_WHEN] = new_map 

-

548 

-

549 @classmethod 

-

550 def install_dest( 

-

551 cls, 

-

552 sources: Union[str, List[str]], 

-

553 into: Optional[Union[str, List[str]]], 

-

554 *, 

-

555 dest_dir: Optional[str] = None, 

-

556 when: Optional[Union[str, Mapping[str, Any]]] = None, 

-

557 ) -> "MutableYAMLInstallRuleInstall": 

-

558 k = MK_INSTALLATIONS_INSTALL_SOURCES 

-

559 if isinstance(sources, str): 

-

560 k = MK_INSTALLATIONS_INSTALL_SOURCE 

-

561 r = MutableYAMLInstallRuleInstall( 

-

562 None, 

-

563 None, 

-

564 store=CommentedMap( 

-

565 { 

-

566 MK_INSTALLATIONS_INSTALL: CommentedMap( 

-

567 { 

-

568 k: sources, 

-

569 } 

-

570 ) 

-

571 } 

-

572 ), 

-

573 ) 

-

574 r.dest_dir = dest_dir 

-

575 r.into = into 

-

576 if when is not None: 

-

577 r.when = when 

-

578 return r 

-

579 

-

580 @classmethod 

-

581 def multi_dest_install( 

-

582 cls, 

-

583 sources: Union[str, List[str]], 

-

584 dest_dirs: Sequence[str], 

-

585 into: Optional[Union[str, List[str]]], 

-

586 *, 

-

587 when: Optional[Union[str, Mapping[str, Any]]] = None, 

-

588 ) -> "MutableYAMLInstallRuleInstall": 

-

589 k = MK_INSTALLATIONS_INSTALL_SOURCES 

-

590 if isinstance(sources, str): 590 ↛ 592line 590 didn't jump to line 592, because the condition on line 590 was never false

-

591 k = MK_INSTALLATIONS_INSTALL_SOURCE 

-

592 r = MutableYAMLInstallRuleInstall( 

-

593 None, 

-

594 None, 

-

595 store=CommentedMap( 

-

596 { 

-

597 MK_INSTALLATIONS_MULTI_DEST_INSTALL: CommentedMap( 

-

598 { 

-

599 k: sources, 

-

600 "dest-dirs": dest_dirs, 

-

601 } 

-

602 ) 

-

603 } 

-

604 ), 

-

605 ) 

-

606 r.into = into 

-

607 if when is not None: 607 ↛ 608line 607 didn't jump to line 608, because the condition on line 607 was never true

-

608 r.when = when 

-

609 return r 

-

610 

-

611 @classmethod 

-

612 def install_as( 

-

613 cls, 

-

614 source: str, 

-

615 install_as: str, 

-

616 into: Optional[Union[str, List[str]]], 

-

617 when: Optional[Union[str, Mapping[str, Any]]] = None, 

-

618 ) -> "MutableYAMLInstallRuleInstall": 

-

619 r = MutableYAMLInstallRuleInstall( 

-

620 None, 

-

621 None, 

-

622 store=CommentedMap( 

-

623 { 

-

624 MK_INSTALLATIONS_INSTALL: CommentedMap( 

-

625 { 

-

626 MK_INSTALLATIONS_INSTALL_SOURCE: source, 

-

627 MK_INSTALLATIONS_INSTALL_AS: install_as, 

-

628 } 

-

629 ) 

-

630 } 

-

631 ), 

-

632 ) 

-

633 r.into = into 

-

634 if when is not None: 634 ↛ 635line 634 didn't jump to line 635, because the condition on line 634 was never true

-

635 r.when = when 

-

636 return r 

-

637 

-

638 @classmethod 

-

639 def install_doc_as( 

-

640 cls, 

-

641 source: str, 

-

642 install_as: str, 

-

643 into: Optional[Union[str, List[str]]], 

-

644 when: Optional[Union[str, Mapping[str, Any]]] = None, 

-

645 ) -> "MutableYAMLInstallRuleInstall": 

-

646 r = MutableYAMLInstallRuleInstall( 

-

647 None, 

-

648 None, 

-

649 store=CommentedMap( 

-

650 { 

-

651 MK_INSTALLATIONS_INSTALL_DOCS: CommentedMap( 

-

652 { 

-

653 MK_INSTALLATIONS_INSTALL_SOURCE: source, 

-

654 MK_INSTALLATIONS_INSTALL_AS: install_as, 

-

655 } 

-

656 ) 

-

657 } 

-

658 ), 

-

659 ) 

-

660 r.into = into 

-

661 if when is not None: 

-

662 r.when = when 

-

663 return r 

-

664 

-

665 @classmethod 

-

666 def install_docs( 

-

667 cls, 

-

668 sources: Union[str, List[str]], 

-

669 into: Optional[Union[str, List[str]]], 

-

670 *, 

-

671 dest_dir: Optional[str] = None, 

-

672 when: Optional[Union[str, Mapping[str, Any]]] = None, 

-

673 ) -> "MutableYAMLInstallRuleInstall": 

-

674 k = MK_INSTALLATIONS_INSTALL_SOURCES 

-

675 if isinstance(sources, str): 

-

676 k = MK_INSTALLATIONS_INSTALL_SOURCE 

-

677 r = MutableYAMLInstallRuleInstall( 

-

678 None, 

-

679 None, 

-

680 store=CommentedMap( 

-

681 { 

-

682 MK_INSTALLATIONS_INSTALL_DOCS: CommentedMap( 

-

683 { 

-

684 k: sources, 

-

685 } 

-

686 ) 

-

687 } 

-

688 ), 

-

689 ) 

-

690 r.into = into 

-

691 r.dest_dir = dest_dir 

-

692 if when is not None: 

-

693 r.when = when 

-

694 return r 

-

695 

-

696 @classmethod 

-

697 def install_examples( 

-

698 cls, 

-

699 sources: Union[str, List[str]], 

-

700 into: Optional[Union[str, List[str]]], 

-

701 when: Optional[Union[str, Mapping[str, Any]]] = None, 

-

702 ) -> "MutableYAMLInstallRuleInstallExamples": 

-

703 k = MK_INSTALLATIONS_INSTALL_SOURCES 

-

704 if isinstance(sources, str): 

-

705 k = MK_INSTALLATIONS_INSTALL_SOURCE 

-

706 r = MutableYAMLInstallRuleInstallExamples( 

-

707 None, 

-

708 None, 

-

709 store=CommentedMap( 

-

710 { 

-

711 MK_INSTALLATIONS_INSTALL_EXAMPLES: CommentedMap( 

-

712 { 

-

713 k: sources, 

-

714 } 

-

715 ) 

-

716 } 

-

717 ), 

-

718 ) 

-

719 r.into = into 

-

720 if when is not None: 720 ↛ 721line 720 didn't jump to line 721, because the condition on line 720 was never true

-

721 r.when = when 

-

722 return r 

-

723 

-

724 @classmethod 

-

725 def install_man( 

-

726 cls, 

-

727 sources: Union[str, List[str]], 

-

728 into: Optional[Union[str, List[str]]], 

-

729 language: Optional[str], 

-

730 when: Optional[Union[str, Mapping[str, Any]]] = None, 

-

731 ) -> "MutableYAMLInstallRuleMan": 

-

732 k = MK_INSTALLATIONS_INSTALL_SOURCES 

-

733 if isinstance(sources, str): 733 ↛ 734line 733 didn't jump to line 734, because the condition on line 733 was never true

-

734 k = MK_INSTALLATIONS_INSTALL_SOURCE 

-

735 r = MutableYAMLInstallRuleMan( 

-

736 None, 

-

737 None, 

-

738 store=CommentedMap( 

-

739 { 

-

740 MK_INSTALLATIONS_INSTALL_MAN: CommentedMap( 

-

741 { 

-

742 k: sources, 

-

743 } 

-

744 ) 

-

745 } 

-

746 ), 

-

747 ) 

-

748 r.language = language 

-

749 r.into = into 

-

750 if when is not None: 750 ↛ 751line 750 didn't jump to line 751, because the condition on line 750 was never true

-

751 r.when = when 

-

752 return r 

-

753 

-

754 @classmethod 

-

755 def discard( 

-

756 cls, 

-

757 sources: Union[str, List[str]], 

-

758 ) -> "MutableYAMLInstallRuleDiscard": 

-

759 return MutableYAMLInstallRuleDiscard( 

-

760 None, 

-

761 None, 

-

762 store=CommentedMap({MK_INSTALLATIONS_DISCARD: sources}), 

-

763 ) 

-

764 

-

765 

-

766class MutableYAMLInstallRuleInstallExamples(AbstractMutableYAMLInstallRule): 

-

767 pass 

-

768 

-

769 

-

770class MutableYAMLInstallRuleMan(AbstractMutableYAMLInstallRule): 

-

771 @property 

-

772 def language(self) -> Optional[str]: 

-

773 return self._container[MK_INSTALLATIONS_INSTALL_MAN_LANGUAGE] 

-

774 

-

775 @language.setter 

-

776 def language(self, new_value: Optional[str]) -> None: 

-

777 if new_value is not None: 

-

778 self._container[MK_INSTALLATIONS_INSTALL_MAN_LANGUAGE] = new_value 

-

779 return 

-

780 with suppress(KeyError): 

-

781 del self._container[MK_INSTALLATIONS_INSTALL_MAN_LANGUAGE] 

-

782 

-

783 

-

784class MutableYAMLInstallRuleDiscard(AbstractMutableYAMLInstallRule): 

-

785 pass 

-

786 

-

787 

-

788class MutableYAMLInstallRuleInstall(AbstractMutableYAMLInstallRule): 

-

789 @property 

-

790 def sources(self) -> List[str]: 

-

791 v = self._container[MK_INSTALLATIONS_INSTALL_SOURCES] 

-

792 if isinstance(v, str): 

-

793 return [v] 

-

794 return v 

-

795 

-

796 @sources.setter 

-

797 def sources(self, new_value: Union[str, List[str]]) -> None: 

-

798 if isinstance(new_value, str): 

-

799 self._container[MK_INSTALLATIONS_INSTALL_SOURCES] = new_value 

-

800 return 

-

801 new_list = CommentedSeq(new_value) 

-

802 self._container[MK_INSTALLATIONS_INSTALL_SOURCES] = new_list 

-

803 

-

804 @property 

-

805 def dest_dir(self) -> Optional[str]: 

-

806 return self._container.get(MK_INSTALLATIONS_INSTALL_DEST_DIR) 

-

807 

-

808 @dest_dir.setter 

-

809 def dest_dir(self, new_value: Optional[str]) -> None: 

-

810 if new_value is not None and self.dest_as is not None: 810 ↛ 811line 810 didn't jump to line 811, because the condition on line 810 was never true

-

811 raise ValueError( 

-

812 f'Cannot both have a "{MK_INSTALLATIONS_INSTALL_DEST_DIR}" and' 

-

813 f' "{MK_INSTALLATIONS_INSTALL_AS}"' 

-

814 ) 

-

815 if new_value is not None: 

-

816 self._container[MK_INSTALLATIONS_INSTALL_DEST_DIR] = new_value 

-

817 else: 

-

818 with suppress(KeyError): 

-

819 del self._container[MK_INSTALLATIONS_INSTALL_DEST_DIR] 

-

820 

-

821 @property 

-

822 def dest_as(self) -> Optional[str]: 

-

823 return self._container.get(MK_INSTALLATIONS_INSTALL_AS) 

-

824 

-

825 @dest_as.setter 

-

826 def dest_as(self, new_value: Optional[str]) -> None: 

-

827 if new_value is not None: 

-

828 if self.dest_dir is not None: 

-

829 raise ValueError( 

-

830 f'Cannot both have a "{MK_INSTALLATIONS_INSTALL_DEST_DIR}" and' 

-

831 f' "{MK_INSTALLATIONS_INSTALL_AS}"' 

-

832 ) 

-

833 

-

834 sources = self._container[MK_INSTALLATIONS_INSTALL_SOURCES] 

-

835 if isinstance(sources, list): 

-

836 if len(sources) != 1: 

-

837 raise ValueError( 

-

838 f'Cannot have "{MK_INSTALLATIONS_INSTALL_AS}" when' 

-

839 f' "{MK_INSTALLATIONS_INSTALL_SOURCES}" is not exactly one item' 

-

840 ) 

-

841 self.sources = sources[0] 

-

842 self._container[MK_INSTALLATIONS_INSTALL_AS] = new_value 

-

843 else: 

-

844 with suppress(KeyError): 

-

845 del self._container[MK_INSTALLATIONS_INSTALL_AS] 

-

846 

-

847 

-

848class MutableYAMLInstallationsDefinition(AbstractYAMLListSubStore[Any]): 

-

849 def append(self, install_rule: AbstractMutableYAMLInstallRule) -> None: 

-

850 parent_store = self._store 

-

851 if not install_rule._is_detached or ( 851 ↛ 855line 851 didn't jump to line 855, because the condition on line 851 was never true

-

852 install_rule._parent_store is not None 

-

853 and install_rule._parent_store is not parent_store 

-

854 ): 

-

855 raise RuntimeError( 

-

856 "Item is already attached or associated with a different container" 

-

857 ) 

-

858 self.create_definition_if_missing() 

-

859 install_rule._parent_store = parent_store 

-

860 install_rule.create_definition() 

-

861 

-

862 def extend(self, install_rules: Iterable[AbstractMutableYAMLInstallRule]) -> None: 

-

863 parent_store = self._store 

-

864 for install_rule in install_rules: 

-

865 if not install_rule._is_detached or ( 865 ↛ 869line 865 didn't jump to line 869, because the condition on line 865 was never true

-

866 install_rule._parent_store is not None 

-

867 and install_rule._parent_store is not parent_store 

-

868 ): 

-

869 raise RuntimeError( 

-

870 "Item is already attached or associated with a different container" 

-

871 ) 

-

872 self.create_definition_if_missing() 

-

873 install_rule._parent_store = parent_store 

-

874 install_rule.create_definition() 

-

875 

-

876 

-

877class MutableYAMLManifestVariables(AbstractYAMLDictSubStore): 

-

878 @property 

-

879 def variables(self) -> Dict[str, Any]: 

-

880 return self._store 

-

881 

-

882 def __setitem__(self, key: str, value: Any) -> None: 

-

883 self._store[key] = value 

-

884 self.create_definition_if_missing() 

-

885 

-

886 

-

887class MutableYAMLManifestDefinitions(AbstractYAMLDictSubStore): 

-

888 def manifest_variables( 

-

889 self, *, create_if_absent: bool = True 

-

890 ) -> MutableYAMLManifestVariables: 

-

891 d = MutableYAMLManifestVariables(self._store, MK_MANIFEST_VARIABLES) 

-

892 if create_if_absent: 892 ↛ 893line 892 didn't jump to line 893, because the condition on line 892 was never true

-

893 d.create_definition_if_missing() 

-

894 return d 

-

895 

-

896 

-

897class MutableYAMLManifest: 

-

898 def __init__(self, store: Any) -> None: 

-

899 self._store = store 

-

900 

-

901 @classmethod 

-

902 def empty_manifest(cls) -> "MutableYAMLManifest": 

-

903 return cls(CommentedMap({MK_MANIFEST_VERSION: DEFAULT_MANIFEST_VERSION})) 

-

904 

-

905 @property 

-

906 def manifest_version(self) -> str: 

-

907 return self._store[MK_MANIFEST_VERSION] 

-

908 

-

909 @manifest_version.setter 

-

910 def manifest_version(self, version: str) -> None: 

-

911 if version not in SUPPORTED_MANIFEST_VERSIONS: 

-

912 raise ValueError("Unsupported version") 

-

913 self._store[MK_MANIFEST_VERSION] = version 

-

914 

-

915 def installations( 

-

916 self, 

-

917 *, 

-

918 create_if_absent: bool = True, 

-

919 ) -> MutableYAMLInstallationsDefinition: 

-

920 d = MutableYAMLInstallationsDefinition(self._store, MK_INSTALLATIONS) 

-

921 if create_if_absent: 921 ↛ 922line 921 didn't jump to line 922, because the condition on line 921 was never true

-

922 d.create_definition_if_missing() 

-

923 return d 

-

924 

-

925 def manifest_definitions( 

-

926 self, 

-

927 *, 

-

928 create_if_absent: bool = True, 

-

929 ) -> MutableYAMLManifestDefinitions: 

-

930 d = MutableYAMLManifestDefinitions(self._store, MK_MANIFEST_DEFINITIONS) 

-

931 if create_if_absent: 931 ↛ 932line 931 didn't jump to line 932, because the condition on line 931 was never true

-

932 d.create_definition_if_missing() 

-

933 return d 

-

934 

-

935 def package( 

-

936 self, name: str, *, create_if_absent: bool = True 

-

937 ) -> MutableYAMLPackageDefinition: 

-

938 if MK_PACKAGES not in self._store: 938 ↛ 940line 938 didn't jump to line 940, because the condition on line 938 was never false

-

939 self._store[MK_PACKAGES] = CommentedMap() 

-

940 packages_store = self._store[MK_PACKAGES] 

-

941 package = packages_store.get(name) 

-

942 if package is None: 942 ↛ 949line 942 didn't jump to line 949, because the condition on line 942 was never false

-

943 if not create_if_absent: 943 ↛ 944line 943 didn't jump to line 944, because the condition on line 943 was never true

-

944 raise KeyError(name) 

-

945 assert packages_store is not None 

-

946 d = MutableYAMLPackageDefinition(packages_store, name) 

-

947 d.create_definition() 

-

948 else: 

-

949 d = MutableYAMLPackageDefinition(packages_store, name) 

-

950 return d 

-

951 

-

952 def write_to(self, fd) -> None: 

-

953 MANIFEST_YAML.dump(self._store, fd) 

-

954 

-

955 

-

956def _describe_missing_path(entry: VirtualPath) -> str: 

-

957 if entry.is_dir: 

-

958 return f"{entry.fs_path}/ (empty directory; possible integration point)" 

-

959 if entry.is_symlink: 

-

960 target = os.readlink(entry.fs_path) 

-

961 return f"{entry.fs_path} (symlink; links to {target})" 

-

962 if entry.is_file: 

-

963 return f"{entry.fs_path} (file)" 

-

964 return f"{entry.fs_path} (other!? Probably not supported by debputy and may need a `remove`)" 

-

965 

-

966 

-

967def _detect_missing_installations( 

-

968 path_matcher: SourcePathMatcher, 

-

969 search_dir: VirtualPath, 

-

970) -> None: 

-

971 if not os.path.isdir(search_dir.fs_path): 971 ↛ 973line 971 didn't jump to line 973, because the condition on line 971 was never false

-

972 return 

-

973 missing = list(path_matcher.detect_missing(search_dir)) 

-

974 if not missing: 

-

975 return 

-

976 

-

977 _warn( 

-

978 f"The following paths were present in {search_dir.fs_path}, but not installed (nor explicitly discarded)." 

-

979 ) 

-

980 _warn("") 

-

981 for entry in missing: 

-

982 desc = _describe_missing_path(entry) 

-

983 _warn(f" * {desc}") 

-

984 _warn("") 

-

985 

-

986 excl = textwrap.dedent( 

-

987 """\ 

-

988 - discard: "*" 

-

989 """ 

-

990 ) 

-

991 

-

992 _error( 

-

993 "Please review the list and add either install rules or exclusions to `installations` in" 

-

994 " debian/debputy.manifest. If you do not need any of these paths, add the following to the" 

-

995 f" end of your 'installations`:\n\n{excl}\n" 

-

996 ) 

-

997 

-

998 

-

999def _list_automatic_discard_rules(path_matcher: SourcePathMatcher) -> None: 

-

1000 used_discard_rules = path_matcher.used_auto_discard_rules 

-

1001 # Discard rules can match and then be overridden. In that case, they appear 

-

1002 # but have 0 matches. 

-

1003 if not sum((len(v) for v in used_discard_rules.values()), 0): 

-

1004 return 

-

1005 _info("The following automatic discard rules were triggered:") 

-

1006 example_path: Optional[str] = None 

-

1007 for rule in sorted(used_discard_rules): 

-

1008 for fs_path in sorted(used_discard_rules[rule]): 

-

1009 if example_path is None: 1009 ↛ 1011line 1009 didn't jump to line 1011, because the condition on line 1009 was never false

-

1010 example_path = fs_path 

-

1011 _info(f" * {rule} -> {fs_path}") 

-

1012 assert example_path is not None 

-

1013 _info("") 

-

1014 _info( 

-

1015 "Note that some of these may have been overruled. The overrule detection logic is not" 

-

1016 ) 

-

1017 _info("100% reliable.") 

-

1018 _info("") 

-

1019 _info( 

-

1020 "You can overrule an automatic discard rule by explicitly listing the path. As an example:" 

-

1021 ) 

-

1022 _info(" installations:") 

-

1023 _info(" - install:") 

-

1024 _info(f" source: {example_path}") 

-

1025 

-

1026 

-

1027def _install_everything_from_source_dir_if_present( 

-

1028 dctrl_bin: BinaryPackage, 

-

1029 substitution: Substitution, 

-

1030 path_matcher: SourcePathMatcher, 

-

1031 install_rule_context: InstallRuleContext, 

-

1032 source_condition_context: ConditionContext, 

-

1033 source_dir: VirtualPath, 

-

1034 *, 

-

1035 into_dir: Optional[VirtualPath] = None, 

-

1036) -> None: 

-

1037 attribute_path = AttributePath.builtin_path()[f"installing {source_dir.fs_path}"] 

-

1038 pkg_set = frozenset([dctrl_bin]) 

-

1039 install_rule = InstallRule.install_dest( 

-

1040 [FileSystemMatchRule.from_path_match("*", attribute_path, substitution)], 

-

1041 None, 

-

1042 pkg_set, 

-

1043 f"Built-in; install everything from {source_dir.fs_path} into {dctrl_bin.name}", 

-

1044 None, 

-

1045 ) 

-

1046 pkg_search_dir: Tuple[SearchDir] = ( 

-

1047 SearchDir( 

-

1048 source_dir, 

-

1049 pkg_set, 

-

1050 ), 

-

1051 ) 

-

1052 replacements = { 

-

1053 "search_dirs": pkg_search_dir, 

-

1054 } 

-

1055 if into_dir is not None: 1055 ↛ 1056line 1055 didn't jump to line 1056, because the condition on line 1055 was never true

-

1056 binary_package_contexts = dict(install_rule_context.binary_package_contexts) 

-

1057 updated = binary_package_contexts[dctrl_bin.name].replace(fs_root=into_dir) 

-

1058 binary_package_contexts[dctrl_bin.name] = updated 

-

1059 replacements["binary_package_contexts"] = binary_package_contexts 

-

1060 

-

1061 fake_install_rule_context = install_rule_context.replace(**replacements) 

-

1062 try: 

-

1063 install_rule.perform_install( 

-

1064 path_matcher, 

-

1065 fake_install_rule_context, 

-

1066 source_condition_context, 

-

1067 ) 

-

1068 except ( 

-

1069 NoMatchForInstallPatternError, 

-

1070 PathAlreadyInstalledOrDiscardedError, 

-

1071 ): 

-

1072 # Empty directory or everything excluded by default; ignore the error 

-

1073 pass 

-

1074 

-

1075 

-

1076class HighLevelManifest: 

-

1077 def __init__( 

-

1078 self, 

-

1079 manifest_path: str, 

-

1080 mutable_manifest: Optional[MutableYAMLManifest], 

-

1081 install_rules: Optional[List[InstallRule]], 

-

1082 source_package: SourcePackage, 

-

1083 binary_packages: Mapping[str, BinaryPackage], 

-

1084 substitution: Substitution, 

-

1085 package_transformations: Mapping[str, PackageTransformationDefinition], 

-

1086 dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable, 

-

1087 dpkg_arch_query_table: DpkgArchTable, 

-

1088 build_env: DebBuildOptionsAndProfiles, 

-

1089 plugin_provided_feature_set: PluginProvidedFeatureSet, 

-

1090 debian_dir: VirtualPath, 

-

1091 ) -> None: 

-

1092 self.manifest_path = manifest_path 

-

1093 self.mutable_manifest = mutable_manifest 

-

1094 self._install_rules = install_rules 

-

1095 self._source_package = source_package 

-

1096 self._binary_packages = binary_packages 

-

1097 self.substitution = substitution 

-

1098 self.package_transformations = package_transformations 

-

1099 self._dpkg_architecture_variables = dpkg_architecture_variables 

-

1100 self._dpkg_arch_query_table = dpkg_arch_query_table 

-

1101 self._build_env = build_env 

-

1102 self._used_for: Set[str] = set() 

-

1103 self._plugin_provided_feature_set = plugin_provided_feature_set 

-

1104 self._debian_dir = debian_dir 

-

1105 

-

1106 def source_version(self, include_binnmu_version: bool = True) -> str: 

-

1107 # TODO: There should an easier way to determine the source version; really. 

-

1108 version_var = "{{DEB_VERSION}}" 

-

1109 if not include_binnmu_version: 

-

1110 version_var = "{{_DEBPUTY_INTERNAL_NON_BINNMU_SOURCE}}" 

-

1111 try: 

-

1112 return self.substitution.substitute( 

-

1113 version_var, "internal (resolve version)" 

-

1114 ) 

-

1115 except DebputySubstitutionError as e: 

-

1116 raise AssertionError(f"Could not resolve {version_var}") from e 

-

1117 

-

1118 @property 

-

1119 def debian_dir(self) -> VirtualPath: 

-

1120 return self._debian_dir 

-

1121 

-

1122 @property 

-

1123 def dpkg_architecture_variables(self) -> DpkgArchitectureBuildProcessValuesTable: 

-

1124 return self._dpkg_architecture_variables 

-

1125 

-

1126 @property 

-

1127 def build_env(self) -> DebBuildOptionsAndProfiles: 

-

1128 return self._build_env 

-

1129 

-

1130 @property 

-

1131 def plugin_provided_feature_set(self) -> PluginProvidedFeatureSet: 

-

1132 return self._plugin_provided_feature_set 

-

1133 

-

1134 @property 

-

1135 def active_packages(self) -> Iterable[BinaryPackage]: 

-

1136 yield from (p for p in self._binary_packages.values() if p.should_be_acted_on) 

-

1137 

-

1138 @property 

-

1139 def all_packages(self) -> Iterable[BinaryPackage]: 

-

1140 yield from self._binary_packages.values() 

-

1141 

-

1142 def package_state_for(self, package: str) -> PackageTransformationDefinition: 

-

1143 return self.package_transformations[package] 

-

1144 

-

1145 def _detect_doc_main_package_for(self, package: BinaryPackage) -> BinaryPackage: 

-

1146 name = package.name 

-

1147 # If it is not a -doc package, then docs should be installed 

-

1148 # under its own package name. 

-

1149 if not name.endswith("-doc"): 1149 ↛ 1151line 1149 didn't jump to line 1151, because the condition on line 1149 was never false

-

1150 return package 

-

1151 name = name[:-4] 

-

1152 main_package = self._binary_packages.get(name) 

-

1153 if main_package: 

-

1154 return main_package 

-

1155 if name.startswith("lib"): 

-

1156 dev_pkg = self._binary_packages.get(f"{name}-dev") 

-

1157 if dev_pkg: 

-

1158 return dev_pkg 

-

1159 

-

1160 # If we found no better match; default to the doc package itself. 

-

1161 return package 

-

1162 

-

1163 def perform_installations( 

-

1164 self, 

-

1165 *, 

-

1166 install_request_context: Optional[InstallSearchDirContext] = None, 

-

1167 enable_manifest_installation_feature: bool = True, 

-

1168 ) -> PackageDataTable: 

-

1169 package_data_dict = {} 

-

1170 package_data_table = PackageDataTable(package_data_dict) 

-

1171 if install_request_context is None: 1171 ↛ 1173line 1171 didn't jump to line 1173, because the condition on line 1171 was never true

-

1172 

-

1173 @functools.lru_cache(None) 

-

1174 def _as_path(fs_path: str) -> VirtualPath: 

-

1175 return FSROOverlay.create_root_dir(".", fs_path) 

-

1176 

-

1177 dtmp_dir = _as_path("debian/tmp") 

-

1178 source_root_dir = _as_path(".") 

-

1179 into = frozenset(self._binary_packages.values()) 

-

1180 default_search_dirs = [dtmp_dir] 

-

1181 per_package_search_dirs = { 

-

1182 t.binary_package: [_as_path(f.match_rule.path) for f in t.search_dirs] 

-

1183 for t in self.package_transformations.values() 

-

1184 if t.search_dirs is not None 

-

1185 } 

-

1186 search_dirs = _determine_search_dir_order( 

-

1187 per_package_search_dirs, 

-

1188 into, 

-

1189 default_search_dirs, 

-

1190 source_root_dir, 

-

1191 ) 

-

1192 check_for_uninstalled_dirs = tuple( 

-

1193 s.search_dir 

-

1194 for s in search_dirs 

-

1195 if s.search_dir.fs_path != source_root_dir.fs_path 

-

1196 ) 

-

1197 _present_installation_dirs(search_dirs, check_for_uninstalled_dirs, into) 

-

1198 else: 

-

1199 dtmp_dir = None 

-

1200 search_dirs = install_request_context.search_dirs 

-

1201 into = frozenset(self._binary_packages.values()) 

-

1202 seen = set() 

-

1203 for search_dir in search_dirs: 

-

1204 seen.update(search_dir.applies_to) 

-

1205 

-

1206 missing = into - seen 

-

1207 if missing: 1207 ↛ 1208line 1207 didn't jump to line 1208, because the condition on line 1207 was never true

-

1208 names = ", ".join(p.name for p in missing) 

-

1209 raise ValueError( 

-

1210 f"The following package(s) had no search dirs: {names}." 

-

1211 " (Generally, the source root would be applicable to all packages)" 

-

1212 ) 

-

1213 extra_names = seen - into 

-

1214 if extra_names: 1214 ↛ 1215line 1214 didn't jump to line 1215, because the condition on line 1214 was never true

-

1215 names = ", ".join(p.name for p in extra_names) 

-

1216 raise ValueError( 

-

1217 f"The install_request_context referenced the following unknown package(s): {names}" 

-

1218 ) 

-

1219 

-

1220 check_for_uninstalled_dirs = ( 

-

1221 install_request_context.check_for_uninstalled_dirs 

-

1222 ) 

-

1223 

-

1224 install_rule_context = InstallRuleContext(search_dirs) 

-

1225 

-

1226 if ( 1226 ↛ 1232line 1226 didn't jump to line 1232

-

1227 enable_manifest_installation_feature 

-

1228 and self._install_rules is None 

-

1229 and dtmp_dir is not None 

-

1230 and os.path.isdir(dtmp_dir.fs_path) 

-

1231 ): 

-

1232 msg = ( 

-

1233 "The build system appears to have provided the output of upstream build system's" 

-

1234 " install in debian/tmp. However, these are no provisions for debputy to install" 

-

1235 " any of that into any of the debian packages listed in debian/control." 

-

1236 " To avoid accidentally creating empty packages, debputy will insist that you " 

-

1237 " explicitly define an empty installation definition if you did not want to " 

-

1238 " install any of those files even though they have been provided." 

-

1239 ' Example: "installations: []"' 

-

1240 ) 

-

1241 _error(msg) 

-

1242 elif ( 1242 ↛ 1245line 1242 didn't jump to line 1245

-

1243 not enable_manifest_installation_feature and self._install_rules is not None 

-

1244 ): 

-

1245 _error( 

-

1246 f"The `installations` feature cannot be used in {self.manifest_path} with this integration mode." 

-

1247 f" Please remove or comment out the `installations` keyword." 

-

1248 ) 

-

1249 

-

1250 for dctrl_bin in self.all_packages: 

-

1251 package = dctrl_bin.name 

-

1252 doc_main_package = self._detect_doc_main_package_for(dctrl_bin) 

-

1253 

-

1254 install_rule_context[package] = BinaryPackageInstallRuleContext( 

-

1255 dctrl_bin, 

-

1256 FSRootDir(), 

-

1257 doc_main_package, 

-

1258 ) 

-

1259 

-

1260 if enable_manifest_installation_feature: 1260 ↛ 1265line 1260 didn't jump to line 1265

-

1261 discard_rules = list( 

-

1262 self.plugin_provided_feature_set.auto_discard_rules.values() 

-

1263 ) 

-

1264 else: 

-

1265 discard_rules = [ 

-

1266 self.plugin_provided_feature_set.auto_discard_rules["debian-dir"] 

-

1267 ] 

-

1268 path_matcher = SourcePathMatcher(discard_rules) 

-

1269 

-

1270 source_condition_context = ConditionContext( 

-

1271 binary_package=None, 

-

1272 substitution=self.substitution, 

-

1273 build_env=self._build_env, 

-

1274 dpkg_architecture_variables=self._dpkg_architecture_variables, 

-

1275 dpkg_arch_query_table=self._dpkg_arch_query_table, 

-

1276 ) 

-

1277 

-

1278 for dctrl_bin in self.active_packages: 

-

1279 package = dctrl_bin.name 

-

1280 if install_request_context: 1280 ↛ 1285line 1280 didn't jump to line 1285, because the condition on line 1280 was never false

-

1281 build_system_staging_dir = install_request_context.debian_pkg_dirs.get( 

-

1282 package 

-

1283 ) 

-

1284 else: 

-

1285 build_system_staging_dir_fs_path = os.path.join("debian", package) 

-

1286 if os.path.isdir(build_system_staging_dir_fs_path): 

-

1287 build_system_staging_dir = FSROOverlay.create_root_dir( 

-

1288 ".", 

-

1289 build_system_staging_dir_fs_path, 

-

1290 ) 

-

1291 else: 

-

1292 build_system_staging_dir = None 

-

1293 

-

1294 if build_system_staging_dir is not None: 

-

1295 _install_everything_from_source_dir_if_present( 

-

1296 dctrl_bin, 

-

1297 self.substitution, 

-

1298 path_matcher, 

-

1299 install_rule_context, 

-

1300 source_condition_context, 

-

1301 build_system_staging_dir, 

-

1302 ) 

-

1303 

-

1304 if self._install_rules: 

-

1305 # FIXME: Check that every install rule remains used after transformations have run. 

-

1306 # What we want to check is transformations do not exclude everything from an install 

-

1307 # rule. The hard part here is that renaming (etc.) is fine, so we cannot 1:1 string 

-

1308 # match. 

-

1309 for install_rule in self._install_rules: 

-

1310 install_rule.perform_install( 

-

1311 path_matcher, 

-

1312 install_rule_context, 

-

1313 source_condition_context, 

-

1314 ) 

-

1315 

-

1316 if enable_manifest_installation_feature: 1316 ↛ 1320line 1316 didn't jump to line 1320, because the condition on line 1316 was never false

-

1317 for search_dir in check_for_uninstalled_dirs: 

-

1318 _detect_missing_installations(path_matcher, search_dir) 

-

1319 

-

1320 for dctrl_bin in self.all_packages: 

-

1321 package = dctrl_bin.name 

-

1322 binary_install_rule_context = install_rule_context[package] 

-

1323 build_system_pkg_staging_dir = os.path.join("debian", package) 

-

1324 fs_root = binary_install_rule_context.fs_root 

-

1325 

-

1326 context = self.package_transformations[package] 

-

1327 if dctrl_bin.should_be_acted_on and enable_manifest_installation_feature: 1327 ↛ 1335line 1327 didn't jump to line 1335, because the condition on line 1327 was never false

-

1328 for special_install_rule in context.install_rules: 1328 ↛ 1329line 1328 didn't jump to line 1329, because the loop on line 1328 never started

-

1329 special_install_rule.perform_install( 

-

1330 path_matcher, 

-

1331 install_rule_context, 

-

1332 source_condition_context, 

-

1333 ) 

-

1334 

-

1335 if dctrl_bin.should_be_acted_on: 1335 ↛ 1347line 1335 didn't jump to line 1347, because the condition on line 1335 was never false

-

1336 self.apply_fs_transformations(package, fs_root) 

-

1337 substvars_file = f"debian/{package}.substvars" 

-

1338 substvars = FlushableSubstvars.load_from_path( 

-

1339 substvars_file, missing_ok=True 

-

1340 ) 

-

1341 # We do not want to touch the substvars file (non-clean rebuild contamination) 

-

1342 substvars.substvars_path = None 

-

1343 control_output_dir = generated_content_dir( 

-

1344 package=dctrl_bin, subdir_key="DEBIAN" 

-

1345 ) 

-

1346 else: 

-

1347 substvars = FlushableSubstvars() 

-

1348 control_output_dir = None 

-

1349 

-

1350 udeb_package = self._binary_packages.get(f"{package}-udeb") 

-

1351 if udeb_package and not udeb_package.is_udeb: 1351 ↛ 1352line 1351 didn't jump to line 1352, because the condition on line 1351 was never true

-

1352 udeb_package = None 

-

1353 

-

1354 package_metadata_context = PackageProcessingContextProvider( 

-

1355 self, 

-

1356 dctrl_bin, 

-

1357 udeb_package, 

-

1358 package_data_table, 

-

1359 # FIXME: source_package 

-

1360 ) 

-

1361 

-

1362 ctrl_creator = BinaryCtrlAccessorProviderCreator( 

-

1363 package_metadata_context, 

-

1364 substvars, 

-

1365 context.maintscript_snippets, 

-

1366 context.substitution, 

-

1367 ) 

-

1368 

-

1369 if not enable_manifest_installation_feature: 1369 ↛ 1370line 1369 didn't jump to line 1370, because the condition on line 1369 was never true

-

1370 assert_no_dbgsym_migration(dctrl_bin) 

-

1371 dh_dbgsym_root_fs = FSROOverlay.create_root_dir( 

-

1372 "", dhe_dbgsym_root_dir(dctrl_bin) 

-

1373 ) 

-

1374 dbgsym_root_fs = FSRootDir() 

-

1375 _install_everything_from_source_dir_if_present( 

-

1376 dctrl_bin, 

-

1377 self.substitution, 

-

1378 path_matcher, 

-

1379 install_rule_context, 

-

1380 source_condition_context, 

-

1381 dh_dbgsym_root_fs, 

-

1382 into_dir=dbgsym_root_fs, 

-

1383 ) 

-

1384 dbgsym_build_ids = read_dbgsym_file(dctrl_bin) 

-

1385 dbgsym_info = DbgsymInfo( 

-

1386 dbgsym_root_fs, 

-

1387 dbgsym_build_ids, 

-

1388 ) 

-

1389 else: 

-

1390 dbgsym_info = DbgsymInfo( 

-

1391 FSRootDir(), 

-

1392 [], 

-

1393 ) 

-

1394 

-

1395 package_data_dict[package] = BinaryPackageData( 

-

1396 self._source_package, 

-

1397 dctrl_bin, 

-

1398 build_system_pkg_staging_dir, 

-

1399 control_output_dir, 

-

1400 fs_root, 

-

1401 substvars, 

-

1402 package_metadata_context, 

-

1403 ctrl_creator, 

-

1404 dbgsym_info, 

-

1405 ) 

-

1406 

-

1407 _list_automatic_discard_rules(path_matcher) 

-

1408 

-

1409 return package_data_table 

-

1410 

-

1411 def condition_context( 

-

1412 self, binary_package: Optional[Union[BinaryPackage, str]] 

-

1413 ) -> ConditionContext: 

-

1414 if binary_package is None: 1414 ↛ 1415line 1414 didn't jump to line 1415, because the condition on line 1414 was never true

-

1415 return ConditionContext( 

-

1416 binary_package=None, 

-

1417 substitution=self.substitution, 

-

1418 build_env=self._build_env, 

-

1419 dpkg_architecture_variables=self._dpkg_architecture_variables, 

-

1420 dpkg_arch_query_table=self._dpkg_arch_query_table, 

-

1421 ) 

-

1422 if not isinstance(binary_package, str): 1422 ↛ 1423line 1422 didn't jump to line 1423, because the condition on line 1422 was never true

-

1423 binary_package = binary_package.name 

-

1424 

-

1425 package_transformation = self.package_transformations[binary_package] 

-

1426 return ConditionContext( 

-

1427 binary_package=package_transformation.binary_package, 

-

1428 substitution=package_transformation.substitution, 

-

1429 build_env=self._build_env, 

-

1430 dpkg_architecture_variables=self._dpkg_architecture_variables, 

-

1431 dpkg_arch_query_table=self._dpkg_arch_query_table, 

-

1432 ) 

-

1433 

-

1434 def apply_fs_transformations( 

-

1435 self, 

-

1436 package: str, 

-

1437 fs_root: FSPath, 

-

1438 ) -> None: 

-

1439 if package in self._used_for: 1439 ↛ 1440line 1439 didn't jump to line 1440, because the condition on line 1439 was never true

-

1440 raise ValueError( 

-

1441 f"data.tar contents for {package} has already been finalized!?" 

-

1442 ) 

-

1443 if package not in self.package_transformations: 1443 ↛ 1444line 1443 didn't jump to line 1444, because the condition on line 1443 was never true

-

1444 raise ValueError( 

-

1445 f'The package "{package}" was not relevant for the manifest!?' 

-

1446 ) 

-

1447 package_transformation = self.package_transformations[package] 

-

1448 condition_context = ConditionContext( 

-

1449 binary_package=package_transformation.binary_package, 

-

1450 substitution=package_transformation.substitution, 

-

1451 build_env=self._build_env, 

-

1452 dpkg_architecture_variables=self._dpkg_architecture_variables, 

-

1453 dpkg_arch_query_table=self._dpkg_arch_query_table, 

-

1454 ) 

-

1455 norm_rules = list( 

-

1456 builtin_mode_normalization_rules( 

-

1457 self._dpkg_architecture_variables, 

-

1458 package_transformation.binary_package, 

-

1459 package_transformation.substitution, 

-

1460 ) 

-

1461 ) 

-

1462 norm_mode_transformation_rule = ModeNormalizationTransformationRule(norm_rules) 

-

1463 norm_mode_transformation_rule.transform_file_system(fs_root, condition_context) 

-

1464 for transformation in package_transformation.transformations: 

-

1465 transformation.transform_file_system(fs_root, condition_context) 

-

1466 interpreter_normalization = NormalizeShebangLineTransformation() 

-

1467 interpreter_normalization.transform_file_system(fs_root, condition_context) 

-

1468 

-

1469 def finalize_data_tar_contents( 

-

1470 self, 

-

1471 package: str, 

-

1472 fs_root: FSPath, 

-

1473 clamp_mtime_to: int, 

-

1474 ) -> IntermediateManifest: 

-

1475 if package in self._used_for: 1475 ↛ 1476line 1475 didn't jump to line 1476, because the condition on line 1475 was never true

-

1476 raise ValueError( 

-

1477 f"data.tar contents for {package} has already been finalized!?" 

-

1478 ) 

-

1479 if package not in self.package_transformations: 1479 ↛ 1480line 1479 didn't jump to line 1480, because the condition on line 1479 was never true

-

1480 raise ValueError( 

-

1481 f'The package "{package}" was not relevant for the manifest!?' 

-

1482 ) 

-

1483 self._used_for.add(package) 

-

1484 

-

1485 # At this point, there so be no further mutations to the file system (because the will not 

-

1486 # be present in the intermediate manifest) 

-

1487 cast("FSRootDir", fs_root).is_read_write = False 

-

1488 

-

1489 intermediate_manifest = list( 

-

1490 _generate_intermediate_manifest( 

-

1491 fs_root, 

-

1492 clamp_mtime_to, 

-

1493 ) 

-

1494 ) 

-

1495 return intermediate_manifest 

-

1496 

-

1497 def apply_to_binary_staging_directory( 

-

1498 self, 

-

1499 package: str, 

-

1500 fs_root: FSPath, 

-

1501 clamp_mtime_to: int, 

-

1502 ) -> IntermediateManifest: 

-

1503 self.apply_fs_transformations(package, fs_root) 

-

1504 return self.finalize_data_tar_contents(package, fs_root, clamp_mtime_to) 

-

1505 

-

1506 

-

1507@dataclasses.dataclass(slots=True) 

-

1508class SearchDirOrderState: 

-

1509 search_dir: VirtualPath 

-

1510 applies_to: Union[Set[BinaryPackage], FrozenSet[BinaryPackage]] = dataclasses.field( 

-

1511 default_factory=set 

-

1512 ) 

-

1513 after: Set[str] = dataclasses.field(default_factory=set) 

-

1514 

-

1515 

-

1516def _present_installation_dirs( 

-

1517 search_dirs: Sequence[SearchDir], 

-

1518 checked_missing_dirs: Sequence[VirtualPath], 

-

1519 all_pkgs: FrozenSet[BinaryPackage], 

-

1520) -> None: 

-

1521 _info("The following directories are considered search dirs (in order):") 

-

1522 max_len = max((len(s.search_dir.fs_path) for s in search_dirs), default=1) 

-

1523 for search_dir in search_dirs: 

-

1524 applies_to = "" 

-

1525 if search_dir.applies_to < all_pkgs: 

-

1526 names = ", ".join(p.name for p in search_dir.applies_to) 

-

1527 applies_to = f" [only applicable to: {names}]" 

-

1528 remark = "" 

-

1529 if not os.path.isdir(search_dir.search_dir.fs_path): 

-

1530 remark = " (skipped; absent)" 

-

1531 _info(f" * {search_dir.search_dir.fs_path:{max_len}}{applies_to}{remark}") 

-

1532 

-

1533 if checked_missing_dirs: 

-

1534 _info('The following directories are considered for "not-installed" paths;') 

-

1535 for d in checked_missing_dirs: 

-

1536 remark = "" 

-

1537 if not os.path.isdir(d.fs_path): 

-

1538 remark = " (skipped; absent)" 

-

1539 _info(f" * {d.fs_path:{max_len}}{remark}") 

-

1540 

-

1541 

-

1542def _determine_search_dir_order( 

-

1543 requested: Mapping[BinaryPackage, List[VirtualPath]], 

-

1544 all_pkgs: FrozenSet[BinaryPackage], 

-

1545 default_search_dirs: List[VirtualPath], 

-

1546 source_root: VirtualPath, 

-

1547) -> Sequence[SearchDir]: 

-

1548 search_dir_table = {} 

-

1549 assert requested.keys() <= all_pkgs 

-

1550 for pkg in all_pkgs: 

-

1551 paths = requested.get(pkg, default_search_dirs) 

-

1552 previous_search_dir: Optional[SearchDirOrderState] = None 

-

1553 for path in paths: 

-

1554 try: 

-

1555 search_dir_state = search_dir_table[path.fs_path] 

-

1556 except KeyError: 

-

1557 search_dir_state = SearchDirOrderState(path) 

-

1558 search_dir_table[path.fs_path] = search_dir_state 

-

1559 search_dir_state.applies_to.add(pkg) 

-

1560 if previous_search_dir is not None: 

-

1561 search_dir_state.after.add(previous_search_dir.search_dir.fs_path) 

-

1562 previous_search_dir = search_dir_state 

-

1563 

-

1564 search_dirs_in_order = [] 

-

1565 released = set() 

-

1566 remaining = set() 

-

1567 for search_dir_state in search_dir_table.values(): 

-

1568 if not (search_dir_state.after <= released): 

-

1569 remaining.add(search_dir_state.search_dir.fs_path) 

-

1570 continue 

-

1571 search_dirs_in_order.append(search_dir_state) 

-

1572 released.add(search_dir_state.search_dir.fs_path) 

-

1573 

-

1574 while remaining: 

-

1575 current_released = len(released) 

-

1576 for fs_path in remaining: 

-

1577 search_dir_state = search_dir_table[fs_path] 

-

1578 if not search_dir_state.after.issubset(released): 

-

1579 remaining.add(search_dir_state.search_dir.fs_path) 

-

1580 continue 

-

1581 search_dirs_in_order.append(search_dir_state) 

-

1582 released.add(search_dir_state.search_dir.fs_path) 

-

1583 

-

1584 if current_released == len(released): 

-

1585 names = ", ".join(remaining) 

-

1586 _error( 

-

1587 f"There is a circular dependency (somewhere) between the search dirs: {names}." 

-

1588 " Note that the search directories across all packages have to be ordered (and the" 

-

1589 " source root should generally be last)" 

-

1590 ) 

-

1591 remaining -= released 

-

1592 

-

1593 search_dirs_in_order.append( 

-

1594 SearchDirOrderState( 

-

1595 source_root, 

-

1596 all_pkgs, 

-

1597 ) 

-

1598 ) 

-

1599 

-

1600 return tuple( 

-

1601 # Avoid duplicating all_pkgs 

-

1602 SearchDir( 

-

1603 s.search_dir, 

-

1604 frozenset(s.applies_to) if s.applies_to != all_pkgs else all_pkgs, 

-

1605 ) 

-

1606 for s in search_dirs_in_order 

-

1607 ) 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_installations_py.html b/coverage-report/d_267b6307937f1878_installations_py.html deleted file mode 100644 index 6318f48..0000000 --- a/coverage-report/d_267b6307937f1878_installations_py.html +++ /dev/null @@ -1,1261 +0,0 @@ - - - - - Coverage for src/debputy/installations.py: 65% - - - - - -
-
-

- Coverage for src/debputy/installations.py: - 65% -

- -

- 499 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import collections 

-

2import dataclasses 

-

3import os.path 

-

4import re 

-

5from enum import IntEnum 

-

6from typing import ( 

-

7 List, 

-

8 Dict, 

-

9 FrozenSet, 

-

10 Callable, 

-

11 Union, 

-

12 Iterator, 

-

13 Tuple, 

-

14 Set, 

-

15 Sequence, 

-

16 Optional, 

-

17 Iterable, 

-

18 TYPE_CHECKING, 

-

19 cast, 

-

20 Any, 

-

21 Mapping, 

-

22) 

-

23 

-

24from debputy.exceptions import DebputyRuntimeError 

-

25from debputy.filesystem_scan import FSPath 

-

26from debputy.manifest_conditions import ( 

-

27 ConditionContext, 

-

28 ManifestCondition, 

-

29 _BUILD_DOCS_BDO, 

-

30) 

-

31from debputy.manifest_parser.base_types import ( 

-

32 FileSystemMatchRule, 

-

33 FileSystemExactMatchRule, 

-

34 DebputyDispatchableType, 

-

35) 

-

36from debputy.packages import BinaryPackage 

-

37from debputy.path_matcher import MatchRule, ExactFileSystemPath, MATCH_ANYTHING 

-

38from debputy.substitution import Substitution 

-

39from debputy.util import _error, _warn 

-

40 

-

41if TYPE_CHECKING: 

-

42 from debputy.packager_provided_files import PackagerProvidedFile 

-

43 from debputy.plugin.api import VirtualPath 

-

44 from debputy.plugin.api.impl_types import PluginProvidedDiscardRule 

-

45 

-

46 

-

47_MAN_TH_LINE = re.compile(r'^[.]TH\s+\S+\s+"?(\d+[^"\s]*)"?') 

-

48_MAN_DT_LINE = re.compile(r"^[.]Dt\s+\S+\s+(\d+\S*)") 

-

49_MAN_SECTION_BASENAME = re.compile(r"[.]([1-9]\w*)(?:[.]gz)?$") 

-

50_MAN_REAL_SECTION = re.compile(r"^(\d+)") 

-

51_MAN_INST_BASENAME = re.compile(r"[.][^.]+$") 

-

52MAN_GUESS_LANG_FROM_PATH = re.compile( 

-

53 r"(?:^|/)man/(?:([a-z][a-z](?:_[A-Z][A-Z])?)(?:\.[^/]+)?)?/man[1-9]/" 

-

54) 

-

55MAN_GUESS_FROM_BASENAME = re.compile(r"[.]([a-z][a-z](?:_[A-Z][A-Z])?)[.](?:[1-9]|man)") 

-

56 

-

57 

-

58class InstallRuleError(DebputyRuntimeError): 

-

59 pass 

-

60 

-

61 

-

62class PathAlreadyInstalledOrDiscardedError(InstallRuleError): 

-

63 @property 

-

64 def path(self) -> str: 

-

65 return cast("str", self.args[0]) 

-

66 

-

67 @property 

-

68 def into(self) -> FrozenSet[BinaryPackage]: 

-

69 return cast("FrozenSet[BinaryPackage]", self.args[1]) 

-

70 

-

71 @property 

-

72 def definition_source(self) -> str: 

-

73 return cast("str", self.args[2]) 

-

74 

-

75 

-

76class ExactPathMatchTwiceError(InstallRuleError): 

-

77 @property 

-

78 def path(self) -> str: 

-

79 return cast("str", self.args[1]) 

-

80 

-

81 @property 

-

82 def into(self) -> BinaryPackage: 

-

83 return cast("BinaryPackage", self.args[2]) 

-

84 

-

85 @property 

-

86 def definition_source(self) -> str: 

-

87 return cast("str", self.args[3]) 

-

88 

-

89 

-

90class NoMatchForInstallPatternError(InstallRuleError): 

-

91 @property 

-

92 def pattern(self) -> str: 

-

93 return cast("str", self.args[1]) 

-

94 

-

95 @property 

-

96 def search_dirs(self) -> Sequence["SearchDir"]: 

-

97 return cast("Sequence[SearchDir]", self.args[2]) 

-

98 

-

99 @property 

-

100 def definition_source(self) -> str: 

-

101 return cast("str", self.args[3]) 

-

102 

-

103 

-

104@dataclasses.dataclass(slots=True, frozen=True) 

-

105class SearchDir: 

-

106 search_dir: "VirtualPath" 

-

107 applies_to: FrozenSet[BinaryPackage] 

-

108 

-

109 

-

110@dataclasses.dataclass(slots=True, frozen=True) 

-

111class BinaryPackageInstallRuleContext: 

-

112 binary_package: BinaryPackage 

-

113 fs_root: FSPath 

-

114 doc_main_package: BinaryPackage 

-

115 

-

116 def replace(self, **changes: Any) -> "BinaryPackageInstallRuleContext": 

-

117 return dataclasses.replace(self, **changes) 

-

118 

-

119 

-

120@dataclasses.dataclass(slots=True, frozen=True) 

-

121class InstallSearchDirContext: 

-

122 search_dirs: Sequence[SearchDir] 

-

123 check_for_uninstalled_dirs: Sequence["VirtualPath"] 

-

124 # TODO: Support search dirs per-package 

-

125 debian_pkg_dirs: Mapping[str, "VirtualPath"] = dataclasses.field( 

-

126 default_factory=dict 

-

127 ) 

-

128 

-

129 

-

130@dataclasses.dataclass(slots=True) 

-

131class InstallRuleContext: 

-

132 # TODO: Search dirs should be per-package 

-

133 search_dirs: Sequence[SearchDir] 

-

134 binary_package_contexts: Dict[str, BinaryPackageInstallRuleContext] = ( 

-

135 dataclasses.field(default_factory=dict) 

-

136 ) 

-

137 

-

138 def __getitem__(self, item: str) -> BinaryPackageInstallRuleContext: 

-

139 return self.binary_package_contexts[item] 

-

140 

-

141 def __setitem__(self, key: str, value: BinaryPackageInstallRuleContext) -> None: 

-

142 self.binary_package_contexts[key] = value 

-

143 

-

144 def replace(self, **changes: Any) -> "InstallRuleContext": 

-

145 return dataclasses.replace(self, **changes) 

-

146 

-

147 

-

148@dataclasses.dataclass(slots=True, frozen=True) 

-

149class PathMatch: 

-

150 path: "VirtualPath" 

-

151 search_dir: "VirtualPath" 

-

152 is_exact_match: bool 

-

153 into: FrozenSet[BinaryPackage] 

-

154 

-

155 

-

156class DiscardState(IntEnum): 

-

157 UNCHECKED = 0 

-

158 NOT_DISCARDED = 1 

-

159 DISCARDED_BY_PLUGIN_PROVIDED_RULE = 2 

-

160 DISCARDED_BY_MANIFEST_RULE = 3 

-

161 

-

162 

-

163def _determine_manpage_section( 

-

164 match_rule: PathMatch, 

-

165 provided_section: Optional[int], 

-

166 definition_source: str, 

-

167) -> Optional[str]: 

-

168 section = str(provided_section) if provided_section is not None else None 

-

169 if section is None: 

-

170 detected_section = None 

-

171 with open(match_rule.path.fs_path, "r") as fd: 

-

172 for line in fd: 

-

173 if not line.startswith((".TH", ".Dt")): 

-

174 continue 

-

175 

-

176 m = _MAN_DT_LINE.match(line) 

-

177 if not m: 

-

178 m = _MAN_TH_LINE.match(line) 

-

179 if not m: 

-

180 continue 

-

181 detected_section = m.group(1) 

-

182 if "." in detected_section: 

-

183 _warn( 

-

184 f"Ignoring detected section {detected_section} in {match_rule.path.fs_path}" 

-

185 f" (detected via {definition_source}): It looks too much like a version" 

-

186 ) 

-

187 detected_section = None 

-

188 break 

-

189 if detected_section is None: 

-

190 m = _MAN_SECTION_BASENAME.search(os.path.basename(match_rule.path.path)) 

-

191 if m: 

-

192 detected_section = m.group(1) 

-

193 section = detected_section 

-

194 

-

195 return section 

-

196 

-

197 

-

198def _determine_manpage_real_section( 

-

199 match_rule: PathMatch, 

-

200 section: Optional[str], 

-

201 definition_source: str, 

-

202) -> int: 

-

203 real_section = None 

-

204 if section is not None: 

-

205 m = _MAN_REAL_SECTION.match(section) 

-

206 if m: 

-

207 real_section = int(m.group(1)) 

-

208 if real_section is None or real_section < 0 or real_section > 9: 

-

209 if real_section is not None: 

-

210 _warn( 

-

211 f"Computed section for {match_rule.path.fs_path} was {real_section} (section: {section})," 

-

212 f" which is not a valid section (must be between 1 and 9 incl.)" 

-

213 ) 

-

214 _error( 

-

215 f"Could not determine the section for {match_rule.path.fs_path} automatically. The man page" 

-

216 f" was detected via {definition_source}. Consider using `section: <number>` to" 

-

217 " explicitly declare the section. Keep in mind that it applies to all man pages for that" 

-

218 " rule and you may have to split the rule into two for this reason." 

-

219 ) 

-

220 return real_section 

-

221 

-

222 

-

223def _determine_manpage_language( 

-

224 match_rule: PathMatch, 

-

225 provided_language: Optional[str], 

-

226) -> Optional[str]: 

-

227 if provided_language is not None: 

-

228 if provided_language not in ("derive-from-basename", "derive-from-path"): 

-

229 return provided_language if provided_language != "C" else None 

-

230 if provided_language == "derive-from-basename": 

-

231 m = MAN_GUESS_FROM_BASENAME.search(match_rule.path.name) 

-

232 if m is None: 

-

233 return None 

-

234 return m.group(1) 

-

235 # Fall-through for derive-from-path case 

-

236 m = MAN_GUESS_LANG_FROM_PATH.search(match_rule.path.path) 

-

237 if m is None: 

-

238 return None 

-

239 return m.group(1) 

-

240 

-

241 

-

242def _dest_path_for_manpage( 

-

243 provided_section: Optional[int], 

-

244 provided_language: Optional[str], 

-

245 definition_source: str, 

-

246) -> Callable[["PathMatch"], str]: 

-

247 def _manpage_dest_path(match_rule: PathMatch) -> str: 

-

248 inst_basename = _MAN_INST_BASENAME.sub("", match_rule.path.name) 

-

249 section = _determine_manpage_section( 

-

250 match_rule, provided_section, definition_source 

-

251 ) 

-

252 real_section = _determine_manpage_real_section( 

-

253 match_rule, section, definition_source 

-

254 ) 

-

255 assert section is not None 

-

256 language = _determine_manpage_language(match_rule, provided_language) 

-

257 if language is None: 

-

258 maybe_language = "" 

-

259 else: 

-

260 maybe_language = f"{language}/" 

-

261 lang_suffix = f".{language}" 

-

262 if inst_basename.endswith(lang_suffix): 

-

263 inst_basename = inst_basename[: -len(lang_suffix)] 

-

264 

-

265 return ( 

-

266 f"usr/share/man/{maybe_language}man{real_section}/{inst_basename}.{section}" 

-

267 ) 

-

268 

-

269 return _manpage_dest_path 

-

270 

-

271 

-

272class SourcePathMatcher: 

-

273 def __init__(self, auto_discard_rules: List["PluginProvidedDiscardRule"]) -> None: 

-

274 self._already_matched: Dict[ 

-

275 str, 

-

276 Tuple[FrozenSet[BinaryPackage], str], 

-

277 ] = {} 

-

278 self._exact_match_request: Set[Tuple[str, str]] = set() 

-

279 self._discarded: Dict[str, DiscardState] = {} 

-

280 self._auto_discard_rules = auto_discard_rules 

-

281 self.used_auto_discard_rules: Dict[str, Set[str]] = collections.defaultdict(set) 

-

282 

-

283 def is_reserved(self, path: "VirtualPath") -> bool: 

-

284 fs_path = path.fs_path 

-

285 if fs_path in self._already_matched: 

-

286 return True 

-

287 result = self._discarded.get(fs_path, DiscardState.UNCHECKED) 

-

288 if result == DiscardState.UNCHECKED: 288 ↛ 290line 288 didn't jump to line 290, because the condition on line 288 was never false

-

289 result = self._check_plugin_provided_exclude_state_for(path) 

-

290 if result == DiscardState.NOT_DISCARDED: 

-

291 return False 

-

292 

-

293 return True 

-

294 

-

295 def exclude(self, path: str) -> None: 

-

296 self._discarded[path] = DiscardState.DISCARDED_BY_MANIFEST_RULE 

-

297 

-

298 def _run_plugin_provided_discard_rules_on(self, path: "VirtualPath") -> bool: 

-

299 for dr in self._auto_discard_rules: 

-

300 verdict = dr.should_discard(path) 

-

301 if verdict: 

-

302 self.used_auto_discard_rules[dr.name].add(path.fs_path) 

-

303 return True 

-

304 return False 

-

305 

-

306 def _check_plugin_provided_exclude_state_for( 

-

307 self, 

-

308 path: "VirtualPath", 

-

309 ) -> DiscardState: 

-

310 cache_misses = [] 

-

311 current_path = path 

-

312 while True: 

-

313 fs_path = current_path.fs_path 

-

314 exclude_state = self._discarded.get(fs_path, DiscardState.UNCHECKED) 

-

315 if exclude_state != DiscardState.UNCHECKED: 

-

316 verdict = exclude_state 

-

317 break 

-

318 cache_misses.append(fs_path) 

-

319 if self._run_plugin_provided_discard_rules_on(current_path): 

-

320 verdict = DiscardState.DISCARDED_BY_PLUGIN_PROVIDED_RULE 

-

321 break 

-

322 # We cannot trust a "NOT_DISCARDED" until we check its parent (the directory could 

-

323 # be excluded without the files in it triggering the rule). 

-

324 parent_dir = current_path.parent_dir 

-

325 if not parent_dir: 

-

326 verdict = DiscardState.NOT_DISCARDED 

-

327 break 

-

328 current_path = parent_dir 

-

329 if cache_misses: 329 ↛ 332line 329 didn't jump to line 332, because the condition on line 329 was never false

-

330 for p in cache_misses: 

-

331 self._discarded[p] = verdict 

-

332 return verdict 

-

333 

-

334 def may_match( 

-

335 self, 

-

336 match: PathMatch, 

-

337 *, 

-

338 is_exact_match: bool = False, 

-

339 ) -> Tuple[FrozenSet[BinaryPackage], bool]: 

-

340 m = self._already_matched.get(match.path.fs_path) 

-

341 if m: 341 ↛ 342line 341 didn't jump to line 342, because the condition on line 341 was never true

-

342 return m[0], False 

-

343 current_path = match.path.fs_path 

-

344 discard_state = self._discarded.get(current_path, DiscardState.UNCHECKED) 

-

345 

-

346 if discard_state == DiscardState.UNCHECKED: 

-

347 discard_state = self._check_plugin_provided_exclude_state_for(match.path) 

-

348 

-

349 assert discard_state is not None and discard_state != DiscardState.UNCHECKED 

-

350 

-

351 is_discarded = discard_state != DiscardState.NOT_DISCARDED 

-

352 if ( 

-

353 is_exact_match 

-

354 and discard_state == DiscardState.DISCARDED_BY_PLUGIN_PROVIDED_RULE 

-

355 ): 

-

356 is_discarded = False 

-

357 return frozenset(), is_discarded 

-

358 

-

359 def reserve( 

-

360 self, 

-

361 path: "VirtualPath", 

-

362 reserved_by: FrozenSet[BinaryPackage], 

-

363 definition_source: str, 

-

364 *, 

-

365 is_exact_match: bool = False, 

-

366 ) -> None: 

-

367 fs_path = path.fs_path 

-

368 self._already_matched[fs_path] = reserved_by, definition_source 

-

369 if not is_exact_match: 369 ↛ 371line 369 didn't jump to line 371, because the condition on line 369 was never false

-

370 return 

-

371 for pkg in reserved_by: 

-

372 m_key = (pkg.name, fs_path) 

-

373 self._exact_match_request.add(m_key) 

-

374 try: 

-

375 del self._discarded[fs_path] 

-

376 except KeyError: 

-

377 pass 

-

378 for discarded_paths in self.used_auto_discard_rules.values(): 

-

379 discarded_paths.discard(fs_path) 

-

380 

-

381 def detect_missing(self, search_dir: "VirtualPath") -> Iterator["VirtualPath"]: 

-

382 stack = list(search_dir.iterdir) 

-

383 while stack: 

-

384 m = stack.pop() 

-

385 if m.is_dir: 

-

386 s_len = len(stack) 

-

387 stack.extend(m.iterdir) 

-

388 

-

389 if s_len == len(stack) and not self.is_reserved(m): 

-

390 # "Explicitly" empty dir 

-

391 yield m 

-

392 elif not self.is_reserved(m): 

-

393 yield m 

-

394 

-

395 def find_and_reserve_all_matches( 

-

396 self, 

-

397 match_rule: MatchRule, 

-

398 search_dirs: Sequence[SearchDir], 

-

399 dir_only_match: bool, 

-

400 match_filter: Optional[Callable[["VirtualPath"], bool]], 

-

401 reserved_by: FrozenSet[BinaryPackage], 

-

402 definition_source: str, 

-

403 ) -> Tuple[List[PathMatch], Tuple[int, ...]]: 

-

404 matched = [] 

-

405 already_installed_paths = 0 

-

406 already_excluded_paths = 0 

-

407 glob_expand = False if isinstance(match_rule, ExactFileSystemPath) else True 

-

408 

-

409 for match in _resolve_path( 

-

410 match_rule, 

-

411 search_dirs, 

-

412 dir_only_match, 

-

413 match_filter, 

-

414 reserved_by, 

-

415 ): 

-

416 installed_into, excluded = self.may_match( 

-

417 match, is_exact_match=not glob_expand 

-

418 ) 

-

419 if installed_into: 419 ↛ 420line 419 didn't jump to line 420, because the condition on line 419 was never true

-

420 if glob_expand: 

-

421 already_installed_paths += 1 

-

422 continue 

-

423 packages = ", ".join(p.name for p in installed_into) 

-

424 raise PathAlreadyInstalledOrDiscardedError( 

-

425 f'The "{match.path.fs_path}" has been reserved by and installed into {packages}.' 

-

426 f" The definition that triggered this issue is {definition_source}.", 

-

427 match, 

-

428 installed_into, 

-

429 definition_source, 

-

430 ) 

-

431 if excluded: 

-

432 if glob_expand: 432 ↛ 435line 432 didn't jump to line 435, because the condition on line 432 was never false

-

433 already_excluded_paths += 1 

-

434 continue 

-

435 raise PathAlreadyInstalledOrDiscardedError( 

-

436 f'The "{match.path.fs_path}" has been excluded. If you want this path installed, move it' 

-

437 f" above the exclusion rule that excluded it. The definition that triggered this" 

-

438 f" issue is {definition_source}.", 

-

439 match, 

-

440 installed_into, 

-

441 definition_source, 

-

442 ) 

-

443 if not glob_expand: 

-

444 for pkg in match.into: 

-

445 m_key = (pkg.name, match.path.fs_path) 

-

446 if m_key in self._exact_match_request: 446 ↛ 447line 446 didn't jump to line 447, because the condition on line 446 was never true

-

447 raise ExactPathMatchTwiceError( 

-

448 f'The path "{match.path.fs_path}" (via exact match) has already been installed' 

-

449 f" into {pkg.name}. The second installation triggered by {definition_source}", 

-

450 match.path, 

-

451 pkg, 

-

452 definition_source, 

-

453 ) 

-

454 self._exact_match_request.add(m_key) 

-

455 

-

456 if reserved_by: 456 ↛ 462line 456 didn't jump to line 462, because the condition on line 456 was never false

-

457 self._already_matched[match.path.fs_path] = ( 

-

458 match.into, 

-

459 definition_source, 

-

460 ) 

-

461 else: 

-

462 self.exclude(match.path.fs_path) 

-

463 matched.append(match) 

-

464 exclude_counts = already_installed_paths, already_excluded_paths 

-

465 return matched, exclude_counts 

-

466 

-

467 

-

468def _resolve_path( 

-

469 match_rule: MatchRule, 

-

470 search_dirs: Iterable["SearchDir"], 

-

471 dir_only_match: bool, 

-

472 match_filter: Optional[Callable[["VirtualPath"], bool]], 

-

473 into: FrozenSet[BinaryPackage], 

-

474) -> Iterator[PathMatch]: 

-

475 missing_matches = set(into) 

-

476 for sdir in search_dirs: 

-

477 matched = False 

-

478 if into and missing_matches.isdisjoint(sdir.applies_to): 478 ↛ 480line 478 didn't jump to line 480, because the condition on line 478 was never true

-

479 # All the packages, where this search dir applies, already got a match 

-

480 continue 

-

481 applicable = sdir.applies_to & missing_matches 

-

482 for matched_path in match_rule.finditer( 

-

483 sdir.search_dir, 

-

484 ignore_paths=match_filter, 

-

485 ): 

-

486 if dir_only_match and not matched_path.is_dir: 486 ↛ 487line 486 didn't jump to line 487, because the condition on line 486 was never true

-

487 continue 

-

488 if matched_path.parent_dir is None: 

-

489 if match_rule is MATCH_ANYTHING: 489 ↛ 491line 489 didn't jump to line 491, because the condition on line 489 was never false

-

490 continue 

-

491 _error( 

-

492 f"The pattern {match_rule.describe_match_short()} matched the root dir." 

-

493 ) 

-

494 yield PathMatch(matched_path, sdir.search_dir, False, applicable) 

-

495 matched = True 

-

496 # continue; we want to match everything we can from this search directory. 

-

497 

-

498 if matched: 

-

499 missing_matches -= applicable 

-

500 if into and not missing_matches: 

-

501 # For install rules, we can stop as soon as all packages had a match 

-

502 # For discard rules, all search directories must be visited. Otherwise, 

-

503 # you would have to repeat the discard rule once per search dir to be 

-

504 # sure something is fully discarded 

-

505 break 

-

506 

-

507 

-

508def _resolve_dest_paths( 

-

509 match: PathMatch, 

-

510 dest_paths: Sequence[Tuple[str, bool]], 

-

511 install_context: "InstallRuleContext", 

-

512) -> Sequence[Tuple[str, "FSPath"]]: 

-

513 dest_and_roots = [] 

-

514 for dest_path, dest_path_is_format in dest_paths: 

-

515 if dest_path_is_format: 

-

516 for pkg in match.into: 

-

517 parent_dir = match.path.parent_dir 

-

518 pkg_install_context = install_context[pkg.name] 

-

519 fs_root = pkg_install_context.fs_root 

-

520 dpath = dest_path.format( 

-

521 basename=match.path.name, 

-

522 dirname=parent_dir.path if parent_dir is not None else "", 

-

523 package_name=pkg.name, 

-

524 doc_main_package_name=pkg_install_context.doc_main_package.name, 

-

525 ) 

-

526 if dpath.endswith("/"): 526 ↛ 527line 526 didn't jump to line 527, because the condition on line 526 was never true

-

527 raise ValueError( 

-

528 f'Provided destination (when resolved for {pkg.name}) for "{match.path.path}" ended' 

-

529 f' with "/" ("{dest_path}"), which it must not!' 

-

530 ) 

-

531 dest_and_roots.append((dpath, fs_root)) 

-

532 else: 

-

533 if dest_path.endswith("/"): 533 ↛ 534line 533 didn't jump to line 534, because the condition on line 533 was never true

-

534 raise ValueError( 

-

535 f'Provided destination for "{match.path.path}" ended with "/" ("{dest_path}"),' 

-

536 " which it must not!" 

-

537 ) 

-

538 dest_and_roots.extend( 

-

539 (dest_path, install_context[pkg.name].fs_root) for pkg in match.into 

-

540 ) 

-

541 return dest_and_roots 

-

542 

-

543 

-

544def _resolve_matches( 

-

545 matches: List[PathMatch], 

-

546 dest_paths: Union[Sequence[Tuple[str, bool]], Callable[[PathMatch], str]], 

-

547 install_context: "InstallRuleContext", 

-

548) -> Iterator[Tuple[PathMatch, Sequence[Tuple[str, "FSPath"]]]]: 

-

549 if callable(dest_paths): 549 ↛ 550line 549 didn't jump to line 550, because the condition on line 549 was never true

-

550 compute_dest_path = dest_paths 

-

551 for match in matches: 

-

552 dpath = compute_dest_path(match) 

-

553 if dpath.endswith("/"): 

-

554 raise ValueError( 

-

555 f'Provided destination for "{match.path.path}" ended with "/" ("{dpath}"), which it must not!' 

-

556 ) 

-

557 dest_and_roots = [ 

-

558 (dpath, install_context[pkg.name].fs_root) for pkg in match.into 

-

559 ] 

-

560 yield match, dest_and_roots 

-

561 else: 

-

562 for match in matches: 

-

563 dest_and_roots = _resolve_dest_paths( 

-

564 match, 

-

565 dest_paths, 

-

566 install_context, 

-

567 ) 

-

568 yield match, dest_and_roots 

-

569 

-

570 

-

571class InstallRule(DebputyDispatchableType): 

-

572 __slots__ = ( 

-

573 "_already_matched", 

-

574 "_exact_match_request", 

-

575 "_condition", 

-

576 "_match_filter", 

-

577 "_definition_source", 

-

578 ) 

-

579 

-

580 def __init__( 

-

581 self, 

-

582 condition: Optional[ManifestCondition], 

-

583 definition_source: str, 

-

584 *, 

-

585 match_filter: Optional[Callable[["VirtualPath"], bool]] = None, 

-

586 ) -> None: 

-

587 self._condition = condition 

-

588 self._definition_source = definition_source 

-

589 self._match_filter = match_filter 

-

590 

-

591 def _check_single_match( 

-

592 self, source: FileSystemMatchRule, matches: List[PathMatch] 

-

593 ) -> None: 

-

594 seen_pkgs = set() 

-

595 problem_pkgs = frozenset() 

-

596 for m in matches: 

-

597 problem_pkgs = seen_pkgs & m.into 

-

598 if problem_pkgs: 598 ↛ 599line 598 didn't jump to line 599, because the condition on line 598 was never true

-

599 break 

-

600 seen_pkgs.update(problem_pkgs) 

-

601 if problem_pkgs: 601 ↛ 602line 601 didn't jump to line 602, because the condition on line 601 was never true

-

602 pkg_names = ", ".join(sorted(p.name for p in problem_pkgs)) 

-

603 _error( 

-

604 f'The pattern "{source.raw_match_rule}" matched multiple entries for the packages: {pkg_names}.' 

-

605 "However, it should matched exactly one item. Please tighten the pattern defined" 

-

606 f" in {self._definition_source}" 

-

607 ) 

-

608 

-

609 def _match_pattern( 

-

610 self, 

-

611 path_matcher: SourcePathMatcher, 

-

612 fs_match_rule: FileSystemMatchRule, 

-

613 condition_context: ConditionContext, 

-

614 search_dirs: Sequence[SearchDir], 

-

615 into: FrozenSet[BinaryPackage], 

-

616 ) -> List[PathMatch]: 

-

617 (matched, exclude_counts) = path_matcher.find_and_reserve_all_matches( 

-

618 fs_match_rule.match_rule, 

-

619 search_dirs, 

-

620 fs_match_rule.raw_match_rule.endswith("/"), 

-

621 self._match_filter, 

-

622 into, 

-

623 self._definition_source, 

-

624 ) 

-

625 

-

626 already_installed_paths, already_excluded_paths = exclude_counts 

-

627 

-

628 if into: 628 ↛ 633line 628 didn't jump to line 633, because the condition on line 628 was never false

-

629 allow_empty_match = all(not p.should_be_acted_on for p in into) 629 ↛ exitline 629 didn't finish the generator expression on line 629

-

630 else: 

-

631 # discard rules must match provided at least one search dir exist. If none of them 

-

632 # exist, then we assume the discard rule is for a package that will not be built 

-

633 allow_empty_match = any(s.search_dir.is_dir for s in search_dirs) 

-

634 if self._condition is not None and not self._condition.evaluate( 634 ↛ 637line 634 didn't jump to line 637, because the condition on line 634 was never true

-

635 condition_context 

-

636 ): 

-

637 allow_empty_match = True 

-

638 

-

639 if not matched and not allow_empty_match: 

-

640 search_dir_text = ", ".join(x.search_dir.fs_path for x in search_dirs) 

-

641 if already_excluded_paths and already_installed_paths: 641 ↛ 642line 641 didn't jump to line 642, because the condition on line 641 was never true

-

642 total_paths = already_excluded_paths + already_installed_paths 

-

643 msg = ( 

-

644 f"There were no matches for {fs_match_rule.raw_match_rule} in {search_dir_text} after ignoring" 

-

645 f" {total_paths} path(s) already been matched previously either by install or" 

-

646 f" exclude rules. If you wanted to install some of these paths into multiple" 

-

647 f" packages, please tweak the definition that installed them to install them" 

-

648 f' into multiple packages (usually change "into: foo" to "into: [foo, bar]".' 

-

649 f" If you wanted to install these paths and exclude rules are getting in your" 

-

650 f" way, then please move this install rule before the exclusion rule that causes" 

-

651 f" issue or, in case of built-in excludes, list the paths explicitly (without" 

-

652 f" using patterns). Source for this issue is {self._definition_source}. Match rule:" 

-

653 f" {fs_match_rule.match_rule.describe_match_exact()}" 

-

654 ) 

-

655 elif already_excluded_paths: 655 ↛ 656line 655 didn't jump to line 656

-

656 msg = ( 

-

657 f"There were no matches for {fs_match_rule.raw_match_rule} in {search_dir_text} after ignoring" 

-

658 f" {already_excluded_paths} path(s) that have been excluded." 

-

659 " If you wanted to install some of these paths, please move the install rule" 

-

660 " before the exclusion rule or, in case of built-in excludes, list the paths explicitly" 

-

661 f" (without using patterns). Source for this issue is {self._definition_source}. Match rule:" 

-

662 f" {fs_match_rule.match_rule.describe_match_exact()}" 

-

663 ) 

-

664 elif already_installed_paths: 664 ↛ 665line 664 didn't jump to line 665

-

665 msg = ( 

-

666 f"There were no matches for {fs_match_rule.raw_match_rule} in {search_dir_text} after ignoring" 

-

667 f" {already_installed_paths} path(s) already been matched previously." 

-

668 " If you wanted to install some of these paths into multiple packages," 

-

669 f" please tweak the definition that installed them to install them into" 

-

670 f' multiple packages (usually change "into: foo" to "into: [foo, bar]".' 

-

671 f" Source for this issue is {self._definition_source}. Match rule:" 

-

672 f" {fs_match_rule.match_rule.describe_match_exact()}" 

-

673 ) 

-

674 else: 

-

675 # TODO: Try harder to find the match and point out possible typos 

-

676 msg = ( 

-

677 f"There were no matches for {fs_match_rule.raw_match_rule} in {search_dir_text} (definition:" 

-

678 f" {self._definition_source}). Match rule: {fs_match_rule.match_rule.describe_match_exact()}" 

-

679 ) 

-

680 raise NoMatchForInstallPatternError( 

-

681 msg, 

-

682 fs_match_rule, 

-

683 search_dirs, 

-

684 self._definition_source, 

-

685 ) 

-

686 return matched 

-

687 

-

688 def _install_matches( 

-

689 self, 

-

690 path_matcher: SourcePathMatcher, 

-

691 matches: List[PathMatch], 

-

692 dest_paths: Union[Sequence[Tuple[str, bool]], Callable[[PathMatch], str]], 

-

693 install_context: "InstallRuleContext", 

-

694 into: FrozenSet[BinaryPackage], 

-

695 condition_context: ConditionContext, 

-

696 ) -> None: 

-

697 if ( 697 ↛ exit,   697 ↛ 7032 missed branches: 1) line 697 didn't jump to the function exit, 2) line 697 didn't jump to line 703, because the condition on line 697 was never true

-

698 self._condition is not None 

-

699 and not self._condition.evaluate(condition_context) 

-

700 ) or not any(p.should_be_acted_on for p in into): 

-

701 # Rule is disabled; skip all its actions - also allow empty matches 

-

702 # for this particular case. 

-

703 return 

-

704 

-

705 if not matches: 705 ↛ 706line 705 didn't jump to line 706, because the condition on line 705 was never true

-

706 raise ValueError("matches must not be empty") 

-

707 

-

708 for match, dest_paths_and_roots in _resolve_matches( 

-

709 matches, 

-

710 dest_paths, 

-

711 install_context, 

-

712 ): 

-

713 install_recursively_into_dirs = [] 

-

714 for dest, fs_root in dest_paths_and_roots: 

-

715 dir_part, basename = os.path.split(dest) 

-

716 # We do not associate these with the FS path. First off, 

-

717 # it is complicated to do in most cases (indeed, debhelper 

-

718 # does not preserve these directories either) and secondly, 

-

719 # it is "only" mtime and mode - mostly irrelevant as the 

-

720 # directory is 99.9% likely to be 0755 (we are talking 

-

721 # directories like "/usr", "/usr/share"). 

-

722 dir_path = fs_root.mkdirs(dir_part) 

-

723 existing_path = dir_path.get(basename) 

-

724 

-

725 if match.path.is_dir: 

-

726 if existing_path is not None and not existing_path.is_dir: 726 ↛ 727line 726 didn't jump to line 727, because the condition on line 726 was never true

-

727 existing_path.unlink() 

-

728 existing_path = None 

-

729 current_dir = existing_path 

-

730 

-

731 if current_dir is None: 731 ↛ 735line 731 didn't jump to line 735, because the condition on line 731 was never false

-

732 current_dir = dir_path.mkdir( 

-

733 basename, reference_path=match.path 

-

734 ) 

-

735 install_recursively_into_dirs.append(current_dir) 

-

736 else: 

-

737 if existing_path is not None and existing_path.is_dir: 737 ↛ 738line 737 didn't jump to line 738, because the condition on line 737 was never true

-

738 _error( 

-

739 f"Cannot install {match.path} ({match.path.fs_path}) as {dest}. That path already exist" 

-

740 f" and is a directory. This error was triggered via {self._definition_source}." 

-

741 ) 

-

742 

-

743 if match.path.is_symlink: 

-

744 dir_path.add_symlink( 

-

745 basename, match.path.readlink(), reference_path=match.path 

-

746 ) 

-

747 else: 

-

748 dir_path.insert_file_from_fs_path( 

-

749 basename, 

-

750 match.path.fs_path, 

-

751 follow_symlinks=False, 

-

752 use_fs_path_mode=True, 

-

753 reference_path=match.path, 

-

754 ) 

-

755 if install_recursively_into_dirs: 

-

756 self._install_dir_recursively( 

-

757 path_matcher, install_recursively_into_dirs, match, into 

-

758 ) 

-

759 

-

760 def _install_dir_recursively( 

-

761 self, 

-

762 path_matcher: SourcePathMatcher, 

-

763 parent_dirs: Sequence[FSPath], 

-

764 match: PathMatch, 

-

765 into: FrozenSet[BinaryPackage], 

-

766 ) -> None: 

-

767 stack = [ 

-

768 (parent_dirs, e) 

-

769 for e in match.path.iterdir 

-

770 if not path_matcher.is_reserved(e) 

-

771 ] 

-

772 

-

773 while stack: 

-

774 current_dirs, dir_entry = stack.pop() 

-

775 path_matcher.reserve( 

-

776 dir_entry, 

-

777 into, 

-

778 self._definition_source, 

-

779 is_exact_match=False, 

-

780 ) 

-

781 if dir_entry.is_dir: 781 ↛ 782line 781 didn't jump to line 782, because the condition on line 781 was never true

-

782 new_dirs = [ 

-

783 d.mkdir(dir_entry.name, reference_path=dir_entry) 

-

784 for d in current_dirs 

-

785 ] 

-

786 stack.extend( 

-

787 (new_dirs, de) 

-

788 for de in dir_entry.iterdir 

-

789 if not path_matcher.is_reserved(de) 

-

790 ) 

-

791 elif dir_entry.is_symlink: 

-

792 for current_dir in current_dirs: 

-

793 current_dir.add_symlink( 

-

794 dir_entry.name, 

-

795 dir_entry.readlink(), 

-

796 reference_path=dir_entry, 

-

797 ) 

-

798 elif dir_entry.is_file: 798 ↛ 808line 798 didn't jump to line 808, because the condition on line 798 was never false

-

799 for current_dir in current_dirs: 

-

800 current_dir.insert_file_from_fs_path( 

-

801 dir_entry.name, 

-

802 dir_entry.fs_path, 

-

803 use_fs_path_mode=True, 

-

804 follow_symlinks=False, 

-

805 reference_path=dir_entry, 

-

806 ) 

-

807 else: 

-

808 _error( 

-

809 f"Unsupported file type: {dir_entry.fs_path} - neither a file, directory or symlink" 

-

810 ) 

-

811 

-

812 def perform_install( 

-

813 self, 

-

814 path_matcher: SourcePathMatcher, 

-

815 install_context: InstallRuleContext, 

-

816 condition_context: ConditionContext, 

-

817 ) -> None: 

-

818 raise NotImplementedError 

-

819 

-

820 @classmethod 

-

821 def install_as( 

-

822 cls, 

-

823 source: FileSystemMatchRule, 

-

824 dest_path: str, 

-

825 into: FrozenSet[BinaryPackage], 

-

826 definition_source: str, 

-

827 condition: Optional[ManifestCondition], 

-

828 ) -> "InstallRule": 

-

829 return GenericInstallationRule( 

-

830 [source], 

-

831 [(dest_path, False)], 

-

832 into, 

-

833 condition, 

-

834 definition_source, 

-

835 require_single_match=True, 

-

836 ) 

-

837 

-

838 @classmethod 

-

839 def install_dest( 

-

840 cls, 

-

841 sources: Sequence[FileSystemMatchRule], 

-

842 dest_dir: Optional[str], 

-

843 into: FrozenSet[BinaryPackage], 

-

844 definition_source: str, 

-

845 condition: Optional[ManifestCondition], 

-

846 ) -> "InstallRule": 

-

847 if dest_dir is None: 

-

848 dest_dir = "{dirname}/{basename}" 

-

849 else: 

-

850 dest_dir = os.path.join(dest_dir, "{basename}") 

-

851 return GenericInstallationRule( 

-

852 sources, 

-

853 [(dest_dir, True)], 

-

854 into, 

-

855 condition, 

-

856 definition_source, 

-

857 ) 

-

858 

-

859 @classmethod 

-

860 def install_multi_as( 

-

861 cls, 

-

862 source: FileSystemMatchRule, 

-

863 dest_paths: Sequence[str], 

-

864 into: FrozenSet[BinaryPackage], 

-

865 definition_source: str, 

-

866 condition: Optional[ManifestCondition], 

-

867 ) -> "InstallRule": 

-

868 if len(dest_paths) < 2: 868 ↛ 869line 868 didn't jump to line 869, because the condition on line 868 was never true

-

869 raise ValueError( 

-

870 "Please use `install_as` when there is less than 2 dest path" 

-

871 ) 

-

872 dps = tuple((dp, False) for dp in dest_paths) 

-

873 return GenericInstallationRule( 

-

874 [source], 

-

875 dps, 

-

876 into, 

-

877 condition, 

-

878 definition_source, 

-

879 require_single_match=True, 

-

880 ) 

-

881 

-

882 @classmethod 

-

883 def install_multi_dest( 

-

884 cls, 

-

885 sources: Sequence[FileSystemMatchRule], 

-

886 dest_dirs: Sequence[str], 

-

887 into: FrozenSet[BinaryPackage], 

-

888 definition_source: str, 

-

889 condition: Optional[ManifestCondition], 

-

890 ) -> "InstallRule": 

-

891 if len(dest_dirs) < 2: 891 ↛ 892line 891 didn't jump to line 892, because the condition on line 891 was never true

-

892 raise ValueError( 

-

893 "Please use `install_dest` when there is less than 2 dest dir" 

-

894 ) 

-

895 dest_paths = tuple((os.path.join(dp, "{basename}"), True) for dp in dest_dirs) 

-

896 return GenericInstallationRule( 

-

897 sources, 

-

898 dest_paths, 

-

899 into, 

-

900 condition, 

-

901 definition_source, 

-

902 ) 

-

903 

-

904 @classmethod 

-

905 def install_doc( 

-

906 cls, 

-

907 sources: Sequence[FileSystemMatchRule], 

-

908 dest_dir: Optional[str], 

-

909 into: FrozenSet[BinaryPackage], 

-

910 definition_source: str, 

-

911 condition: Optional[ManifestCondition], 

-

912 ) -> "InstallRule": 

-

913 cond: ManifestCondition = _BUILD_DOCS_BDO 

-

914 if condition is not None: 

-

915 cond = ManifestCondition.all_of([cond, condition]) 

-

916 dest_path_is_format = False 

-

917 if dest_dir is None: 

-

918 dest_dir = "usr/share/doc/{doc_main_package_name}/{basename}" 

-

919 dest_path_is_format = True 

-

920 

-

921 return GenericInstallationRule( 

-

922 sources, 

-

923 [(dest_dir, dest_path_is_format)], 

-

924 into, 

-

925 cond, 

-

926 definition_source, 

-

927 ) 

-

928 

-

929 @classmethod 

-

930 def install_doc_as( 

-

931 cls, 

-

932 source: FileSystemMatchRule, 

-

933 dest_path: str, 

-

934 into: FrozenSet[BinaryPackage], 

-

935 definition_source: str, 

-

936 condition: Optional[ManifestCondition], 

-

937 ) -> "InstallRule": 

-

938 cond: ManifestCondition = _BUILD_DOCS_BDO 

-

939 if condition is not None: 

-

940 cond = ManifestCondition.all_of([cond, condition]) 

-

941 

-

942 return GenericInstallationRule( 

-

943 [source], 

-

944 [(dest_path, False)], 

-

945 into, 

-

946 cond, 

-

947 definition_source, 

-

948 require_single_match=True, 

-

949 ) 

-

950 

-

951 @classmethod 

-

952 def install_examples( 

-

953 cls, 

-

954 sources: Sequence[FileSystemMatchRule], 

-

955 into: FrozenSet[BinaryPackage], 

-

956 definition_source: str, 

-

957 condition: Optional[ManifestCondition], 

-

958 ) -> "InstallRule": 

-

959 cond: ManifestCondition = _BUILD_DOCS_BDO 

-

960 if condition is not None: 960 ↛ 961line 960 didn't jump to line 961, because the condition on line 960 was never true

-

961 cond = ManifestCondition.all_of([cond, condition]) 

-

962 return GenericInstallationRule( 

-

963 sources, 

-

964 [("usr/share/doc/{doc_main_package_name}/examples/{basename}", True)], 

-

965 into, 

-

966 cond, 

-

967 definition_source, 

-

968 ) 

-

969 

-

970 @classmethod 

-

971 def install_man( 

-

972 cls, 

-

973 sources: Sequence[FileSystemMatchRule], 

-

974 into: FrozenSet[BinaryPackage], 

-

975 section: Optional[int], 

-

976 language: Optional[str], 

-

977 definition_source: str, 

-

978 condition: Optional[ManifestCondition], 

-

979 ) -> "InstallRule": 

-

980 cond: ManifestCondition = _BUILD_DOCS_BDO 

-

981 if condition is not None: 981 ↛ 982line 981 didn't jump to line 982, because the condition on line 981 was never true

-

982 cond = ManifestCondition.all_of([cond, condition]) 

-

983 

-

984 dest_path_computer = _dest_path_for_manpage( 

-

985 section, language, definition_source 

-

986 ) 

-

987 

-

988 return GenericInstallationRule( 988 ↛ exitline 988 didn't jump to the function exit

-

989 sources, 

-

990 dest_path_computer, 

-

991 into, 

-

992 cond, 

-

993 definition_source, 

-

994 match_filter=lambda m: not m.is_file, 

-

995 ) 

-

996 

-

997 @classmethod 

-

998 def discard_paths( 

-

999 cls, 

-

1000 paths: Sequence[FileSystemMatchRule], 

-

1001 definition_source: str, 

-

1002 condition: Optional[ManifestCondition], 

-

1003 *, 

-

1004 limit_to: Optional[Sequence[FileSystemExactMatchRule]] = None, 

-

1005 ) -> "InstallRule": 

-

1006 return DiscardRule( 

-

1007 paths, 

-

1008 condition, 

-

1009 tuple(limit_to) if limit_to is not None else tuple(), 

-

1010 definition_source, 

-

1011 ) 

-

1012 

-

1013 

-

1014class PPFInstallRule(InstallRule): 

-

1015 __slots__ = ( 

-

1016 "_ppfs", 

-

1017 "_substitution", 

-

1018 "_into", 

-

1019 ) 

-

1020 

-

1021 def __init__( 

-

1022 self, 

-

1023 into: BinaryPackage, 

-

1024 substitution: Substitution, 

-

1025 ppfs: Sequence["PackagerProvidedFile"], 

-

1026 ) -> None: 

-

1027 super().__init__( 

-

1028 None, 

-

1029 "<built-in; PPF install rule>", 

-

1030 ) 

-

1031 self._substitution = substitution 

-

1032 self._ppfs = ppfs 

-

1033 self._into = into 

-

1034 

-

1035 def perform_install( 

-

1036 self, 

-

1037 path_matcher: SourcePathMatcher, 

-

1038 install_context: InstallRuleContext, 

-

1039 condition_context: ConditionContext, 

-

1040 ) -> None: 

-

1041 binary_install_context = install_context[self._into.name] 

-

1042 fs_root = binary_install_context.fs_root 

-

1043 for ppf in self._ppfs: 

-

1044 source_path = ppf.path.fs_path 

-

1045 dest_dir, name = ppf.compute_dest() 

-

1046 dir_path = fs_root.mkdirs(dest_dir) 

-

1047 

-

1048 dir_path.insert_file_from_fs_path( 

-

1049 name, 

-

1050 source_path, 

-

1051 follow_symlinks=True, 

-

1052 use_fs_path_mode=False, 

-

1053 mode=ppf.definition.default_mode, 

-

1054 ) 

-

1055 

-

1056 

-

1057class GenericInstallationRule(InstallRule): 

-

1058 __slots__ = ( 

-

1059 "_sources", 

-

1060 "_into", 

-

1061 "_dest_paths", 

-

1062 "_require_single_match", 

-

1063 ) 

-

1064 

-

1065 def __init__( 

-

1066 self, 

-

1067 sources: Sequence[FileSystemMatchRule], 

-

1068 dest_paths: Union[Sequence[Tuple[str, bool]], Callable[[PathMatch], str]], 

-

1069 into: FrozenSet[BinaryPackage], 

-

1070 condition: Optional[ManifestCondition], 

-

1071 definition_source: str, 

-

1072 *, 

-

1073 require_single_match: bool = False, 

-

1074 match_filter: Optional[Callable[["VirtualPath"], bool]] = None, 

-

1075 ) -> None: 

-

1076 super().__init__( 

-

1077 condition, 

-

1078 definition_source, 

-

1079 match_filter=match_filter, 

-

1080 ) 

-

1081 self._sources = sources 

-

1082 self._into = into 

-

1083 self._dest_paths = dest_paths 

-

1084 self._require_single_match = require_single_match 

-

1085 if self._require_single_match and len(sources) != 1: 1085 ↛ 1086line 1085 didn't jump to line 1086, because the condition on line 1085 was never true

-

1086 raise ValueError("require_single_match implies sources must have len 1") 

-

1087 

-

1088 def perform_install( 

-

1089 self, 

-

1090 path_matcher: SourcePathMatcher, 

-

1091 install_context: InstallRuleContext, 

-

1092 condition_context: ConditionContext, 

-

1093 ) -> None: 

-

1094 for source in self._sources: 

-

1095 matches = self._match_pattern( 

-

1096 path_matcher, 

-

1097 source, 

-

1098 condition_context, 

-

1099 install_context.search_dirs, 

-

1100 self._into, 

-

1101 ) 

-

1102 if self._require_single_match and len(matches) > 1: 

-

1103 self._check_single_match(source, matches) 

-

1104 self._install_matches( 

-

1105 path_matcher, 

-

1106 matches, 

-

1107 self._dest_paths, 

-

1108 install_context, 

-

1109 self._into, 

-

1110 condition_context, 

-

1111 ) 

-

1112 

-

1113 

-

1114class DiscardRule(InstallRule): 

-

1115 __slots__ = ("_fs_match_rules", "_limit_to") 

-

1116 

-

1117 def __init__( 

-

1118 self, 

-

1119 fs_match_rules: Sequence[FileSystemMatchRule], 

-

1120 condition: Optional[ManifestCondition], 

-

1121 limit_to: Sequence[FileSystemExactMatchRule], 

-

1122 definition_source: str, 

-

1123 ) -> None: 

-

1124 super().__init__(condition, definition_source) 

-

1125 self._fs_match_rules = fs_match_rules 

-

1126 self._limit_to = limit_to 

-

1127 

-

1128 def perform_install( 

-

1129 self, 

-

1130 path_matcher: SourcePathMatcher, 

-

1131 install_context: InstallRuleContext, 

-

1132 condition_context: ConditionContext, 

-

1133 ) -> None: 

-

1134 into = frozenset() 

-

1135 limit_to = self._limit_to 

-

1136 if limit_to: 

-

1137 matches = {x.match_rule.path for x in limit_to} 

-

1138 search_dirs = tuple( 

-

1139 s 

-

1140 for s in install_context.search_dirs 

-

1141 if s.search_dir.fs_path in matches 

-

1142 ) 

-

1143 if len(limit_to) != len(search_dirs): 

-

1144 matches.difference(s.search_dir.fs_path for s in search_dirs) 

-

1145 paths = ":".join(matches) 

-

1146 _error( 

-

1147 f"The discard rule defined at {self._definition_source} mentions the following" 

-

1148 f" search directories that were not known to debputy: {paths}." 

-

1149 " Either the search dir is missing somewhere else or it should be removed from" 

-

1150 " the discard rule." 

-

1151 ) 

-

1152 else: 

-

1153 search_dirs = install_context.search_dirs 

-

1154 

-

1155 for fs_match_rule in self._fs_match_rules: 

-

1156 self._match_pattern( 

-

1157 path_matcher, 

-

1158 fs_match_rule, 

-

1159 condition_context, 

-

1160 search_dirs, 

-

1161 into, 

-

1162 ) 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_intermediate_manifest_py.html b/coverage-report/d_267b6307937f1878_intermediate_manifest_py.html deleted file mode 100644 index 685223a..0000000 --- a/coverage-report/d_267b6307937f1878_intermediate_manifest_py.html +++ /dev/null @@ -1,432 +0,0 @@ - - - - - Coverage for src/debputy/intermediate_manifest.py: 62% - - - - - -
-
-

- Coverage for src/debputy/intermediate_manifest.py: - 62% -

- -

- 172 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2import json 

-

3import os 

-

4import stat 

-

5import sys 

-

6import tarfile 

-

7from enum import Enum 

-

8 

-

9 

-

10from typing import Optional, List, Dict, Any, Iterable, Union, Self, Mapping, IO 

-

11 

-

12IntermediateManifest = List["TarMember"] 

-

13 

-

14 

-

15class PathType(Enum): 

-

16 FILE = ("file", tarfile.REGTYPE) 

-

17 DIRECTORY = ("directory", tarfile.DIRTYPE) 

-

18 SYMLINK = ("symlink", tarfile.SYMTYPE) 

-

19 # TODO: Add hardlink, FIFO, Char device, BLK device, etc. 

-

20 

-

21 @property 

-

22 def manifest_key(self) -> str: 

-

23 return self.value[0] 

-

24 

-

25 @property 

-

26 def tarinfo_type(self) -> bytes: 

-

27 return self.value[1] 

-

28 

-

29 @property 

-

30 def can_be_virtual(self) -> bool: 

-

31 return self in (PathType.DIRECTORY, PathType.SYMLINK) 

-

32 

-

33 

-

34KEY2PATH_TYPE = {pt.manifest_key: pt for pt in PathType} 

-

35 

-

36 

-

37def _dirname(path: str) -> str: 

-

38 path = path.rstrip("/") 

-

39 if path == ".": 39 ↛ 41line 39 didn't jump to line 41, because the condition on line 39 was never false

-

40 return path 

-

41 return os.path.dirname(path) 

-

42 

-

43 

-

44def _fs_type_from_st_mode(fs_path: str, st_mode: int) -> PathType: 

-

45 if stat.S_ISREG(st_mode): 

-

46 path_type = PathType.FILE 

-

47 elif stat.S_ISDIR(st_mode): 

-

48 path_type = PathType.DIRECTORY 

-

49 # elif stat.S_ISFIFO(st_result): 

-

50 # type = FIFOTYPE 

-

51 elif stat.S_ISLNK(st_mode): 

-

52 raise ValueError( 

-

53 "Symlinks should have been rewritten to use the virtual rule." 

-

54 " Otherwise, the link would not be normalized according to Debian Policy." 

-

55 ) 

-

56 # elif stat.S_ISCHR(st_result): 

-

57 # type = CHRTYPE 

-

58 # elif stat.S_ISBLK(st_result): 

-

59 # type = BLKTYPE 

-

60 else: 

-

61 raise ValueError( 

-

62 f"The path {fs_path} had an unsupported/unknown file type." 

-

63 f" Probably a bug in the tool" 

-

64 ) 

-

65 return path_type 

-

66 

-

67 

-

68@dataclasses.dataclass(slots=True) 

-

69class TarMember: 

-

70 member_path: str 

-

71 path_type: PathType 

-

72 fs_path: Optional[str] 

-

73 mode: int 

-

74 owner: str 

-

75 uid: int 

-

76 group: str 

-

77 gid: int 

-

78 mtime: float 

-

79 link_target: str = "" 

-

80 is_virtual_entry: bool = False 

-

81 may_steal_fs_path: bool = False 

-

82 

-

83 def create_tar_info(self, tar_fd: tarfile.TarFile) -> tarfile.TarInfo: 

-

84 tar_info: tarfile.TarInfo 

-

85 if self.is_virtual_entry: 

-

86 assert self.path_type.can_be_virtual 

-

87 tar_info = tar_fd.tarinfo(self.member_path) 

-

88 tar_info.size = 0 

-

89 tar_info.type = self.path_type.tarinfo_type 

-

90 tar_info.linkpath = self.link_target 

-

91 else: 

-

92 try: 

-

93 tar_info = tar_fd.gettarinfo( 

-

94 name=self.fs_path, arcname=self.member_path 

-

95 ) 

-

96 except (TypeError, ValueError) as e: 

-

97 raise ValueError( 

-

98 f"Unable to prepare tar info for {self.member_path}" 

-

99 ) from e 

-

100 # TODO: Eventually, we should be able to unconditionally rely on link_target. However, 

-

101 # until we got symlinks and hardlinks correctly done in the JSON generator, it will be 

-

102 # conditional for now. 

-

103 if self.link_target != "": 103 ↛ 104line 103 didn't jump to line 104, because the condition on line 103 was never true

-

104 tar_info.linkpath = self.link_target 

-

105 tar_info.mode = self.mode 

-

106 tar_info.uname = self.owner 

-

107 tar_info.uid = self.uid 

-

108 tar_info.gname = self.group 

-

109 tar_info.gid = self.gid 

-

110 tar_info.mode = self.mode 

-

111 tar_info.mtime = int(self.mtime) 

-

112 

-

113 return tar_info 

-

114 

-

115 @classmethod 

-

116 def from_file( 

-

117 cls, 

-

118 member_path: str, 

-

119 fs_path: str, 

-

120 mode: Optional[int] = None, 

-

121 owner: str = "root", 

-

122 uid: int = 0, 

-

123 group: str = "root", 

-

124 gid: int = 0, 

-

125 path_mtime: Optional[Union[float, int]] = None, 

-

126 clamp_mtime_to: Optional[int] = None, 

-

127 path_type: Optional[PathType] = None, 

-

128 may_steal_fs_path: bool = False, 

-

129 ) -> "TarMember": 

-

130 # Avoid lstat'ing if we can as it makes it easier to do tests of the code 

-

131 # (as we do not need an existing physical fs path) 

-

132 if path_type is None or path_mtime is None or mode is None: 132 ↛ 133line 132 didn't jump to line 133, because the condition on line 132 was never true

-

133 st_result = os.lstat(fs_path) 

-

134 st_mode = st_result.st_mode 

-

135 if mode is None: 

-

136 mode = st_mode 

-

137 if path_mtime is None: 

-

138 path_mtime = st_result.st_mtime 

-

139 if path_type is None: 

-

140 path_type = _fs_type_from_st_mode(fs_path, st_mode) 

-

141 

-

142 if clamp_mtime_to is not None and path_mtime > clamp_mtime_to: 142 ↛ 143line 142 didn't jump to line 143, because the condition on line 142 was never true

-

143 path_mtime = clamp_mtime_to 

-

144 

-

145 if may_steal_fs_path: 145 ↛ 146line 145 didn't jump to line 146, because the condition on line 145 was never true

-

146 assert ( 

-

147 "debputy/scratch-dir/" in fs_path 

-

148 ), f"{fs_path} should not have been stealable" 

-

149 

-

150 return cls( 

-

151 member_path=member_path, 

-

152 path_type=path_type, 

-

153 fs_path=fs_path, 

-

154 mode=mode, 

-

155 owner=owner, 

-

156 uid=uid, 

-

157 group=group, 

-

158 gid=gid, 

-

159 mtime=float(path_mtime), 

-

160 is_virtual_entry=False, 

-

161 may_steal_fs_path=may_steal_fs_path, 

-

162 ) 

-

163 

-

164 @classmethod 

-

165 def virtual_path( 

-

166 cls, 

-

167 member_path: str, 

-

168 path_type: PathType, 

-

169 mtime: float, 

-

170 mode: int, 

-

171 link_target: str = "", 

-

172 owner: str = "root", 

-

173 uid: int = 0, 

-

174 group: str = "root", 

-

175 gid: int = 0, 

-

176 ) -> Self: 

-

177 if not path_type.can_be_virtual: 177 ↛ 178line 177 didn't jump to line 178, because the condition on line 177 was never true

-

178 raise ValueError(f"The path type {path_type.name} cannot be virtual") 

-

179 if (path_type == PathType.SYMLINK) ^ bool(link_target): 179 ↛ 180line 179 didn't jump to line 180, because the condition on line 179 was never true

-

180 if not link_target: 

-

181 raise ValueError("Symlinks must have a link target") 

-

182 # TODO: Dear future programmer. Hardlinks will appear here some day and you will have to fix this 

-

183 # code then! 

-

184 raise ValueError("Non-symlinks must not have a link target") 

-

185 return cls( 

-

186 member_path=member_path, 

-

187 path_type=path_type, 

-

188 fs_path=None, 

-

189 link_target=link_target, 

-

190 mode=mode, 

-

191 owner=owner, 

-

192 uid=uid, 

-

193 group=group, 

-

194 gid=gid, 

-

195 mtime=mtime, 

-

196 is_virtual_entry=True, 

-

197 ) 

-

198 

-

199 def clone_and_replace(self, /, **changes: Any) -> "TarMember": 

-

200 return dataclasses.replace(self, **changes) 

-

201 

-

202 def to_manifest(self) -> Dict[str, Any]: 

-

203 d = dataclasses.asdict(self) 

-

204 try: 

-

205 d["mode"] = oct(self.mode) 

-

206 except (TypeError, ValueError) as e: 

-

207 raise TypeError(f"Bad mode in TarMember {self.member_path}") from e 

-

208 d["path_type"] = self.path_type.manifest_key 

-

209 # "compress" the output by removing redundant fields 

-

210 if self.link_target is None or self.link_target == "": 210 ↛ 212line 210 didn't jump to line 212, because the condition on line 210 was never false

-

211 del d["link_target"] 

-

212 if self.is_virtual_entry: 212 ↛ 216line 212 didn't jump to line 216, because the condition on line 212 was never false

-

213 assert self.fs_path is None 

-

214 del d["fs_path"] 

-

215 else: 

-

216 del d["is_virtual_entry"] 

-

217 return d 

-

218 

-

219 @classmethod 

-

220 def parse_intermediate_manifest(cls, manifest_path: str) -> IntermediateManifest: 

-

221 directories = {"."} 

-

222 if manifest_path == "-": 222 ↛ 223line 222 didn't jump to line 223, because the condition on line 222 was never true

-

223 with sys.stdin as fd: 

-

224 data = json.load(fd) 

-

225 contents = [TarMember.from_dict(m) for m in data] 

-

226 else: 

-

227 with open(manifest_path) as fd: 

-

228 data = json.load(fd) 

-

229 contents = [TarMember.from_dict(m) for m in data] 

-

230 if not contents: 230 ↛ 231line 230 didn't jump to line 231, because the condition on line 230 was never true

-

231 raise ValueError( 

-

232 "Empty manifest (note that the root directory should always be present" 

-

233 ) 

-

234 if contents[0].member_path != "./": 234 ↛ 235line 234 didn't jump to line 235, because the condition on line 234 was never true

-

235 raise ValueError('The first member must always be the root directory "./"') 

-

236 for tar_member in contents: 

-

237 directory = _dirname(tar_member.member_path) 

-

238 if directory not in directories: 238 ↛ 239line 238 didn't jump to line 239, because the condition on line 238 was never true

-

239 raise ValueError( 

-

240 f'The path "{tar_member.member_path}" came before the directory it is in (or the path' 

-

241 f" is not a directory). Either way leads to a broken deb." 

-

242 ) 

-

243 if tar_member.path_type == PathType.DIRECTORY: 243 ↛ 236line 243 didn't jump to line 236, because the condition on line 243 was never false

-

244 directories.add(tar_member.member_path.rstrip("/")) 

-

245 return contents 

-

246 

-

247 @classmethod 

-

248 def from_dict(cls, d: Any) -> "TarMember": 

-

249 member_path = d["member_path"] 

-

250 raw_mode = d["mode"] 

-

251 if not raw_mode.startswith("0o"): 251 ↛ 252line 251 didn't jump to line 252, because the condition on line 251 was never true

-

252 raise ValueError(f"Bad mode for {member_path}") 

-

253 is_virtual_entry = d.get("is_virtual_entry") or False 

-

254 path_type = KEY2PATH_TYPE[d["path_type"]] 

-

255 fs_path = d.get("fs_path") 

-

256 mode = int(raw_mode[2:], 8) 

-

257 if is_virtual_entry: 257 ↛ 268line 257 didn't jump to line 268, because the condition on line 257 was never false

-

258 if not path_type.can_be_virtual: 258 ↛ 259line 258 didn't jump to line 259, because the condition on line 258 was never true

-

259 raise ValueError( 

-

260 f"Bad file type or is_virtual_entry for {d['member_path']}." 

-

261 " The file type cannot be virtual" 

-

262 ) 

-

263 if fs_path is not None: 263 ↛ 264line 263 didn't jump to line 264, because the condition on line 263 was never true

-

264 raise ValueError( 

-

265 f'Invalid declaration for "{member_path}".' 

-

266 " The path is listed as a virtual entry but has a file system path" 

-

267 ) 

-

268 elif fs_path is None: 

-

269 raise ValueError( 

-

270 f'Invalid declaration for "{member_path}".' 

-

271 " The path is neither a virtual path nor does it have a file system path!" 

-

272 ) 

-

273 if path_type == PathType.DIRECTORY and not member_path.endswith("/"): 273 ↛ 274line 273 didn't jump to line 274, because the condition on line 273 was never true

-

274 raise ValueError( 

-

275 f'Invalid declaration for "{member_path}".' 

-

276 " The path is listed as a directory but does not end with a slash" 

-

277 ) 

-

278 

-

279 link_target = d.get("link_target") 

-

280 if path_type == PathType.SYMLINK: 280 ↛ 281line 280 didn't jump to line 281, because the condition on line 280 was never true

-

281 if mode != 0o777: 

-

282 raise ValueError( 

-

283 f'Invalid declaration for "{member_path}".' 

-

284 f" Symlinks must have mode 0o0777, got {oct(mode)[2:]}." 

-

285 ) 

-

286 if not link_target: 

-

287 raise ValueError( 

-

288 f'Invalid declaration for "{member_path}".' 

-

289 " Symlinks must have a link_target" 

-

290 ) 

-

291 elif link_target is not None and link_target != "": 291 ↛ 293line 291 didn't jump to line 293, because the condition on line 291 was never true

-

292 # TODO: Eventually hardlinks should have them too. But that is a problem for a future programmer 

-

293 raise ValueError( 

-

294 f'Invalid declaration for "{member_path}".' 

-

295 " Only symlinks can have a link_target" 

-

296 ) 

-

297 else: 

-

298 link_target = "" 

-

299 may_steal_fs_path = d.get("may_steal_fs_path") or False 

-

300 

-

301 if may_steal_fs_path: 301 ↛ 302line 301 didn't jump to line 302, because the condition on line 301 was never true

-

302 assert ( 

-

303 "debputy/scratch-dir/" in fs_path 

-

304 ), f"{fs_path} should not have been stealable" 

-

305 return cls( 

-

306 member_path=member_path, 

-

307 path_type=path_type, 

-

308 fs_path=fs_path, 

-

309 mode=mode, 

-

310 owner=d["owner"], 

-

311 uid=d["uid"], 

-

312 group=d["group"], 

-

313 gid=d["gid"], 

-

314 mtime=float(d["mtime"]), 

-

315 link_target=link_target, 

-

316 is_virtual_entry=is_virtual_entry, 

-

317 may_steal_fs_path=may_steal_fs_path, 

-

318 ) 

-

319 

-

320 

-

321def output_intermediate_manifest( 

-

322 manifest_output_file: str, 

-

323 members: Iterable[TarMember], 

-

324) -> None: 

-

325 with open(manifest_output_file, "w") as fd: 

-

326 output_intermediate_manifest_to_fd(fd, members) 

-

327 

-

328 

-

329def output_intermediate_manifest_to_fd( 

-

330 fd: IO[str], members: Iterable[TarMember] 

-

331) -> None: 

-

332 serial_format = [m.to_manifest() for m in members] 

-

333 json.dump(serial_format, fd) 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_interpreter_py.html b/coverage-report/d_267b6307937f1878_interpreter_py.html deleted file mode 100644 index b851c18..0000000 --- a/coverage-report/d_267b6307937f1878_interpreter_py.html +++ /dev/null @@ -1,319 +0,0 @@ - - - - - Coverage for src/debputy/interpreter.py: 96% - - - - - -
-
-

- Coverage for src/debputy/interpreter.py: - 96% -

- -

- 82 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2import os.path 

-

3import re 

-

4import shutil 

-

5from typing import Optional, IO, TYPE_CHECKING 

-

6 

-

7if TYPE_CHECKING: 

-

8 from debputy.plugin.api import VirtualPath 

-

9 

-

10_SHEBANG_RE = re.compile( 

-

11 rb""" 

-

12 ^[#][!]\s* 

-

13 (/\S+/([a-zA-Z][^/\s]*)) 

-

14""", 

-

15 re.VERBOSE | re.ASCII, 

-

16) 

-

17_WORD = re.compile(rb"\s+(\S+)") 

-

18_STRIP_VERSION = re.compile(r"(-?\d+(?:[.]\d.+)?)$") 

-

19 

-

20_KNOWN_INTERPRETERS = { 

-

21 os.path.basename(c): c 

-

22 for c in ["/bin/sh", "/bin/bash", "/bin/dash", "/usr/bin/perl", "/usr/bin/python"] 

-

23} 

-

24 

-

25 

-

26class Interpreter: 

-

27 @property 

-

28 def original_command(self) -> str: 

-

29 """The original command (without arguments) from the #! line 

-

30 

-

31 This returns the command as it was written (without flags/arguments) in the file. 

-

32 

-

33 Note as a special-case, if the original command is `env` then the first argument is included 

-

34 as well, because it is assumed to be the real command. 

-

35 

-

36 

-

37 >>> # Note: Normally, you would use `VirtualPath.interpreter()` instead for extracting the interpreter 

-

38 >>> python3 = extract_shebang_interpreter(b"#! /usr/bin/python3 -b") 

-

39 >>> python3.original_command 

-

40 '/usr/bin/python3' 

-

41 >>> env_sh = extract_shebang_interpreter(b"#! /usr/bin/env sh") 

-

42 >>> env_sh.original_command 

-

43 '/usr/bin/env sh' 

-

44 

-

45 :return: The original command in the #!-line 

-

46 """ 

-

47 raise NotImplementedError 

-

48 

-

49 @property 

-

50 def command_full_basename(self) -> str: 

-

51 """The full basename of the command (with version) 

-

52 

-

53 Note that for #!-lines that uses `env`, this will return the argument for `env` rather than 

-

54 `env`. 

-

55 

-

56 >>> # Note: Normally, you would use `VirtualPath.interpreter()` instead for extracting the interpreter 

-

57 >>> python3 = extract_shebang_interpreter(b"#! /usr/bin/python3 -b") 

-

58 >>> python3.command_full_basename 

-

59 'python3' 

-

60 >>> env_sh = extract_shebang_interpreter(b"#! /usr/bin/env sh") 

-

61 >>> env_sh.command_full_basename 

-

62 'sh' 

-

63 

-

64 :return: The full basename of the command. 

-

65 """ 

-

66 raise NotImplementedError 

-

67 

-

68 @property 

-

69 def command_stem(self) -> str: 

-

70 """The basename of the command **without** version 

-

71 

-

72 Note that for #!-lines that uses `env`, this will return the argument for `env` rather than 

-

73 `env`. 

-

74 

-

75 >>> # Note: Normally, you would use `VirtualPath.interpreter()` instead for extracting the interpreter 

-

76 >>> python3 = extract_shebang_interpreter(b"#! /usr/bin/python3 -b") 

-

77 >>> python3.command_stem 

-

78 'python' 

-

79 >>> env_sh = extract_shebang_interpreter(b"#! /usr/bin/env sh") 

-

80 >>> env_sh.command_stem 

-

81 'sh' 

-

82 >>> python3 = extract_shebang_interpreter(b"#! /usr/bin/python3.12-dbg -b") 

-

83 >>> python3.command_stem 

-

84 'python' 

-

85 

-

86 :return: The basename of the command **without** version. 

-

87 """ 

-

88 raise NotImplementedError 

-

89 

-

90 @property 

-

91 def interpreter_version(self) -> str: 

-

92 """The version part of the basename 

-

93 

-

94 Note that for #!-lines that uses `env`, this will return the argument for `env` rather than 

-

95 `env`. 

-

96 

-

97 >>> # Note: Normally, you would use `VirtualPath.interpreter()` instead for extracting the interpreter 

-

98 >>> python3 = extract_shebang_interpreter(b"#! /usr/bin/python3 -b") 

-

99 >>> python3.interpreter_version 

-

100 '3' 

-

101 >>> env_sh = extract_shebang_interpreter(b"#! /usr/bin/env sh") 

-

102 >>> env_sh.interpreter_version 

-

103 '' 

-

104 >>> python3 = extract_shebang_interpreter(b"#! /usr/bin/python3.12-dbg -b") 

-

105 >>> python3.interpreter_version 

-

106 '3.12-dbg' 

-

107 

-

108 :return: The version part of the command or the empty string if the command is versionless. 

-

109 """ 

-

110 raise NotImplementedError 

-

111 

-

112 @property 

-

113 def fixup_needed(self) -> bool: 

-

114 """Whether the interpreter uses a non-canonical location 

-

115 

-

116 >>> # Note: Normally, you would use `VirtualPath.interpreter()` instead for extracting the interpreter 

-

117 >>> python3 = extract_shebang_interpreter(b"#! /usr/bin/python3 -b") 

-

118 >>> python3.fixup_needed 

-

119 False 

-

120 >>> env_sh = extract_shebang_interpreter(b"#! /usr/bin/env sh") 

-

121 >>> env_sh.fixup_needed 

-

122 True 

-

123 >>> ub_sh = extract_shebang_interpreter(b"#! /usr/bin/sh") 

-

124 >>> ub_sh.fixup_needed 

-

125 True 

-

126 >>> sh = extract_shebang_interpreter(b"#! /bin/sh") 

-

127 >>> sh.fixup_needed 

-

128 False 

-

129 

-

130 :return: True if this interpreter is uses a non-canonical version. 

-

131 """ 

-

132 return False 

-

133 

-

134 

-

135@dataclasses.dataclass(slots=True, frozen=True) 

-

136class DetectedInterpreter(Interpreter): 

-

137 original_command: str 

-

138 command_full_basename: str 

-

139 command_stem: str 

-

140 interpreter_version: str 

-

141 correct_command: Optional[str] = None 

-

142 corrected_shebang_line: Optional[str] = None 

-

143 

-

144 @property 

-

145 def fixup_needed(self) -> bool: 

-

146 return self.corrected_shebang_line is not None 

-

147 

-

148 def replace_shebang_line(self, path: "VirtualPath") -> None: 

-

149 new_shebang_line = self.corrected_shebang_line 

-

150 assert new_shebang_line.startswith("#!") 

-

151 if not new_shebang_line.endswith("\n"): 151 ↛ 153line 151 didn't jump to line 153, because the condition on line 151 was never false

-

152 new_shebang_line += "\n" 

-

153 parent_dir = path.parent_dir 

-

154 assert parent_dir is not None 

-

155 with path.open(byte_io=True) as rfd: 

-

156 original_first_line = rfd.readline() 

-

157 if not original_first_line.startswith(b"#!"): 157 ↛ 158line 157 didn't jump to line 158, because the condition on line 157 was never true

-

158 raise ValueError( 

-

159 f'The provided path "{path.path}" does not start with a shebang line!?' 

-

160 ) 

-

161 mtime = path.mtime 

-

162 with path.replace_fs_path_content() as new_fs_path, open( 

-

163 new_fs_path, "wb" 

-

164 ) as wfd: 

-

165 wfd.write(new_shebang_line.encode("utf-8")) 

-

166 shutil.copyfileobj(rfd, wfd) 

-

167 # Ensure the mtime is not updated (we do not count interpreter correction as a "change") 

-

168 path.mtime = mtime 

-

169 

-

170 

-

171def extract_shebang_interpreter_from_file( 

-

172 fd: IO[bytes], 

-

173) -> Optional[DetectedInterpreter]: 

-

174 first_line = fd.readline(4096) 

-

175 if b"\n" not in first_line: 

-

176 # If there is no newline, then it is probably not a shebang line 

-

177 return None 

-

178 return extract_shebang_interpreter(first_line) 

-

179 

-

180 

-

181def extract_shebang_interpreter(first_line: bytes) -> Optional[DetectedInterpreter]: 

-

182 m = _SHEBANG_RE.search(first_line) 

-

183 if not m: 

-

184 return None 

-

185 raw_command = m.group(1).strip().decode("utf-8") 

-

186 command_full_basename = m.group(2).strip().decode("utf-8") 

-

187 endpos = m.end() 

-

188 if command_full_basename == "env": 

-

189 wm = _WORD.search(first_line, pos=m.end()) 

-

190 if wm is not None: 190 ↛ 194line 190 didn't jump to line 194, because the condition on line 190 was never false

-

191 command_full_basename = wm.group(1).decode("utf-8") 

-

192 raw_command += " " + command_full_basename 

-

193 endpos = wm.end() 

-

194 command_stem = command_full_basename 

-

195 vm = _STRIP_VERSION.search(command_full_basename) 

-

196 if vm: 

-

197 version = vm.group(1) 

-

198 command_stem = command_full_basename[: -len(version)] 

-

199 else: 

-

200 version = "" 

-

201 correct_command = _KNOWN_INTERPRETERS.get(command_stem) 

-

202 if correct_command is not None and version != "": 

-

203 correct_command += version 

-

204 

-

205 if correct_command is not None and correct_command != raw_command: 

-

206 trailing = first_line[endpos + 1 :].strip().decode("utf-8") 

-

207 corrected_shebang_line = "#! " + correct_command 

-

208 if trailing: 

-

209 corrected_shebang_line += " " + trailing 

-

210 else: 

-

211 corrected_shebang_line = None 

-

212 

-

213 return DetectedInterpreter( 

-

214 raw_command, 

-

215 command_full_basename, 

-

216 command_stem, 

-

217 version, 

-

218 correct_command, 

-

219 corrected_shebang_line, 

-

220 ) 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_maintscript_snippet_py.html b/coverage-report/d_267b6307937f1878_maintscript_snippet_py.html deleted file mode 100644 index 1bdbaed..0000000 --- a/coverage-report/d_267b6307937f1878_maintscript_snippet_py.html +++ /dev/null @@ -1,283 +0,0 @@ - - - - - Coverage for src/debputy/maintscript_snippet.py: 63% - - - - - -
-
-

- Coverage for src/debputy/maintscript_snippet.py: - 63% -

- -

- 87 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2from typing import Sequence, Optional, List, Literal, Iterable, Dict, Self 

-

3 

-

4from debputy.manifest_parser.base_types import DebputyDispatchableType 

-

5from debputy.manifest_parser.util import AttributePath 

-

6 

-

7STD_CONTROL_SCRIPTS = frozenset( 

-

8 { 

-

9 "preinst", 

-

10 "prerm", 

-

11 "postinst", 

-

12 "postrm", 

-

13 } 

-

14) 

-

15UDEB_CONTROL_SCRIPTS = frozenset( 

-

16 { 

-

17 "postinst", 

-

18 "menutest", 

-

19 "isinstallable", 

-

20 } 

-

21) 

-

22ALL_CONTROL_SCRIPTS = STD_CONTROL_SCRIPTS | UDEB_CONTROL_SCRIPTS | {"config"} 

-

23 

-

24 

-

25@dataclasses.dataclass(slots=True, frozen=True) 

-

26class MaintscriptSnippet: 

-

27 definition_source: str 

-

28 snippet: str 

-

29 snippet_order: Optional[Literal["service"]] = None 

-

30 

-

31 def script_content(self) -> str: 

-

32 lines = [ 

-

33 f"# Snippet source: {self.definition_source}\n", 

-

34 self.snippet, 

-

35 ] 

-

36 if not self.snippet.endswith("\n"): 36 ↛ 37line 36 didn't jump to line 37, because the condition on line 36 was never true

-

37 lines.append("\n") 

-

38 return "".join(lines) 

-

39 

-

40 

-

41class MaintscriptSnippetContainer: 

-

42 def __init__(self) -> None: 

-

43 self._generic_snippets: List[MaintscriptSnippet] = [] 

-

44 self._snippets_by_order: Dict[Literal["service"], List[MaintscriptSnippet]] = {} 

-

45 

-

46 def copy(self) -> "MaintscriptSnippetContainer": 

-

47 instance = self.__class__() 

-

48 instance._generic_snippets = self._generic_snippets.copy() 

-

49 instance._snippets_by_order = self._snippets_by_order.copy() 

-

50 return instance 

-

51 

-

52 def append(self, maintscript_snippet: MaintscriptSnippet) -> None: 

-

53 if maintscript_snippet.snippet_order is None: 53 ↛ 56line 53 didn't jump to line 56, because the condition on line 53 was never false

-

54 self._generic_snippets.append(maintscript_snippet) 

-

55 else: 

-

56 if maintscript_snippet.snippet_order not in self._snippets_by_order: 

-

57 self._snippets_by_order[maintscript_snippet.snippet_order] = [] 

-

58 self._snippets_by_order[maintscript_snippet.snippet_order].append( 

-

59 maintscript_snippet 

-

60 ) 

-

61 

-

62 def has_content(self, snippet_order: Optional[Literal["service"]] = None) -> bool: 

-

63 if snippet_order is None: 

-

64 return bool(self._generic_snippets) 

-

65 if snippet_order not in self._snippets_by_order: 

-

66 return False 

-

67 return bool(self._snippets_by_order[snippet_order]) 

-

68 

-

69 def all_snippets(self) -> Iterable[MaintscriptSnippet]: 

-

70 yield from self._generic_snippets 

-

71 for snippets in self._snippets_by_order.values(): 

-

72 yield from snippets 

-

73 

-

74 def generate_snippet( 

-

75 self, 

-

76 tool_with_version: Optional[str] = None, 

-

77 snippet_order: Optional[Literal["service"]] = None, 

-

78 reverse: bool = False, 

-

79 ) -> Optional[str]: 

-

80 inner_content = "" 

-

81 if snippet_order is None: 81 ↛ 86line 81 didn't jump to line 86, because the condition on line 81 was never false

-

82 snippets = ( 

-

83 reversed(self._generic_snippets) if reverse else self._generic_snippets 

-

84 ) 

-

85 inner_content = "".join(s.script_content() for s in snippets) 

-

86 elif snippet_order in self._snippets_by_order: 

-

87 snippets = self._snippets_by_order[snippet_order] 

-

88 if reverse: 

-

89 snippets = reversed(snippets) 

-

90 inner_content = "".join(s.script_content() for s in snippets) 

-

91 

-

92 if not inner_content: 92 ↛ 93line 92 didn't jump to line 93, because the condition on line 92 was never true

-

93 return None 

-

94 

-

95 if tool_with_version: 95 ↛ 96line 95 didn't jump to line 96, because the condition on line 95 was never true

-

96 return ( 

-

97 f"# Automatically added by {tool_with_version}\n" 

-

98 + inner_content 

-

99 + "# End automatically added section" 

-

100 ) 

-

101 return inner_content 

-

102 

-

103 

-

104class DpkgMaintscriptHelperCommand(DebputyDispatchableType): 

-

105 __slots__ = ("cmdline", "definition_source") 

-

106 

-

107 def __init__(self, cmdline: Sequence[str], definition_source: str): 

-

108 self.cmdline = cmdline 

-

109 self.definition_source = definition_source 

-

110 

-

111 @classmethod 

-

112 def _finish_cmd( 

-

113 cls, 

-

114 definition_source: str, 

-

115 cmdline: List[str], 

-

116 prior_version: Optional[str], 

-

117 owning_package: Optional[str], 

-

118 ) -> Self: 

-

119 if prior_version is not None: 

-

120 cmdline.append(prior_version) 

-

121 if owning_package is not None: 

-

122 if prior_version is None: 122 ↛ 124line 122 didn't jump to line 124, because the condition on line 122 was never true

-

123 # Empty is allowed according to `man dpkg-maintscript-helper` 

-

124 cmdline.append("") 

-

125 cmdline.append(owning_package) 

-

126 return cls( 

-

127 tuple(cmdline), 

-

128 definition_source, 

-

129 ) 

-

130 

-

131 @classmethod 

-

132 def rm_conffile( 

-

133 cls, 

-

134 definition_source: AttributePath, 

-

135 conffile: str, 

-

136 prior_version: Optional[str] = None, 

-

137 owning_package: Optional[str] = None, 

-

138 ) -> Self: 

-

139 cmdline = ["rm_conffile", conffile] 

-

140 return cls._finish_cmd( 

-

141 definition_source.path, cmdline, prior_version, owning_package 

-

142 ) 

-

143 

-

144 @classmethod 

-

145 def mv_conffile( 

-

146 cls, 

-

147 definition_source: AttributePath, 

-

148 old_conffile: str, 

-

149 new_confile: str, 

-

150 prior_version: Optional[str] = None, 

-

151 owning_package: Optional[str] = None, 

-

152 ) -> Self: 

-

153 cmdline = ["mv_conffile", old_conffile, new_confile] 

-

154 return cls._finish_cmd( 

-

155 definition_source.path, cmdline, prior_version, owning_package 

-

156 ) 

-

157 

-

158 @classmethod 

-

159 def symlink_to_dir( 

-

160 cls, 

-

161 definition_source: AttributePath, 

-

162 pathname: str, 

-

163 old_target: str, 

-

164 prior_version: Optional[str] = None, 

-

165 owning_package: Optional[str] = None, 

-

166 ) -> Self: 

-

167 cmdline = ["symlink_to_dir", pathname, old_target] 

-

168 return cls._finish_cmd( 

-

169 definition_source.path, cmdline, prior_version, owning_package 

-

170 ) 

-

171 

-

172 @classmethod 

-

173 def dir_to_symlink( 

-

174 cls, 

-

175 definition_source: AttributePath, 

-

176 pathname: str, 

-

177 new_target: str, 

-

178 prior_version: Optional[str] = None, 

-

179 owning_package: Optional[str] = None, 

-

180 ) -> Self: 

-

181 cmdline = ["dir_to_symlink", pathname, new_target] 

-

182 return cls._finish_cmd( 

-

183 definition_source.path, cmdline, prior_version, owning_package 

-

184 ) 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_manifest_conditions_py.html b/coverage-report/d_267b6307937f1878_manifest_conditions_py.html deleted file mode 100644 index f548dd4..0000000 --- a/coverage-report/d_267b6307937f1878_manifest_conditions_py.html +++ /dev/null @@ -1,338 +0,0 @@ - - - - - Coverage for src/debputy/manifest_conditions.py: 65% - - - - - -
-
-

- Coverage for src/debputy/manifest_conditions.py: - 65% -

- -

- 134 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2from enum import Enum 

-

3from typing import List, Callable, Optional, Sequence 

-

4 

-

5from debian.debian_support import DpkgArchTable 

-

6 

-

7from debputy._deb_options_profiles import DebBuildOptionsAndProfiles 

-

8from debputy.architecture_support import DpkgArchitectureBuildProcessValuesTable 

-

9from debputy.manifest_parser.base_types import DebputyDispatchableType 

-

10from debputy.packages import BinaryPackage 

-

11from debputy.substitution import Substitution 

-

12from debputy.util import active_profiles_match 

-

13 

-

14 

-

15@dataclasses.dataclass(slots=True, frozen=True) 

-

16class ConditionContext: 

-

17 binary_package: Optional[BinaryPackage] 

-

18 build_env: DebBuildOptionsAndProfiles 

-

19 substitution: Substitution 

-

20 dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable 

-

21 dpkg_arch_query_table: DpkgArchTable 

-

22 

-

23 

-

24class ManifestCondition(DebputyDispatchableType): 

-

25 __slots__ = () 

-

26 

-

27 def describe(self) -> str: 

-

28 raise NotImplementedError 

-

29 

-

30 def negated(self) -> "ManifestCondition": 

-

31 return NegatedManifestCondition(self) 

-

32 

-

33 def evaluate(self, context: ConditionContext) -> bool: 

-

34 raise NotImplementedError 

-

35 

-

36 @classmethod 

-

37 def _manifest_group( 

-

38 cls, 

-

39 match_type: "_ConditionGroupMatchType", 

-

40 conditions: "Sequence[ManifestCondition]", 

-

41 ) -> "ManifestCondition": 

-

42 condition = conditions[0] 

-

43 if ( 43 ↛ 47line 43 didn't jump to line 47

-

44 isinstance(condition, ManifestConditionGroup) 

-

45 and condition.match_type == match_type 

-

46 ): 

-

47 return condition.extend(conditions[1:]) 

-

48 return ManifestConditionGroup(match_type, conditions) 

-

49 

-

50 @classmethod 

-

51 def any_of(cls, conditions: "Sequence[ManifestCondition]") -> "ManifestCondition": 

-

52 return cls._manifest_group(_ConditionGroupMatchType.ANY_OF, conditions) 

-

53 

-

54 @classmethod 

-

55 def all_of(cls, conditions: "Sequence[ManifestCondition]") -> "ManifestCondition": 

-

56 return cls._manifest_group(_ConditionGroupMatchType.ALL_OF, conditions) 

-

57 

-

58 @classmethod 

-

59 def is_cross_building(cls) -> "ManifestCondition": 

-

60 return _IS_CROSS_BUILDING 

-

61 

-

62 @classmethod 

-

63 def can_execute_compiled_binaries(cls) -> "ManifestCondition": 

-

64 return _CAN_EXECUTE_COMPILED_BINARIES 

-

65 

-

66 @classmethod 

-

67 def run_build_time_tests(cls) -> "ManifestCondition": 

-

68 return _RUN_BUILD_TIME_TESTS 

-

69 

-

70 

-

71class NegatedManifestCondition(ManifestCondition): 

-

72 __slots__ = ("_condition",) 

-

73 

-

74 def __init__(self, condition: ManifestCondition) -> None: 

-

75 self._condition = condition 

-

76 

-

77 def negated(self) -> "ManifestCondition": 

-

78 return self._condition 

-

79 

-

80 def describe(self) -> str: 

-

81 return f"not ({self._condition.describe()})" 

-

82 

-

83 def evaluate(self, context: ConditionContext) -> bool: 

-

84 return not self._condition.evaluate(context) 

-

85 

-

86 

-

87class _ConditionGroupMatchType(Enum): 

-

88 ANY_OF = (any, "At least one of: [{conditions}]") 

-

89 ALL_OF = (all, "All of: [{conditions}]") 

-

90 

-

91 def describe(self, conditions: Sequence[ManifestCondition]) -> str: 

-

92 return self.value[1].format( 

-

93 conditions=", ".join(x.describe() for x in conditions) 

-

94 ) 

-

95 

-

96 def evaluate( 

-

97 self, conditions: Sequence[ManifestCondition], context: ConditionContext 

-

98 ) -> bool: 

-

99 return self.value[0](c.evaluate(context) for c in conditions) 

-

100 

-

101 

-

102class ManifestConditionGroup(ManifestCondition): 

-

103 __slots__ = ("match_type", "_conditions") 

-

104 

-

105 def __init__( 

-

106 self, 

-

107 match_type: _ConditionGroupMatchType, 

-

108 conditions: Sequence[ManifestCondition], 

-

109 ) -> None: 

-

110 self.match_type = match_type 

-

111 self._conditions = conditions 

-

112 

-

113 def describe(self) -> str: 

-

114 return self.match_type.describe(self._conditions) 

-

115 

-

116 def evaluate(self, context: ConditionContext) -> bool: 

-

117 return self.match_type.evaluate(self._conditions, context) 

-

118 

-

119 def extend( 

-

120 self, 

-

121 conditions: Sequence[ManifestCondition], 

-

122 ) -> "ManifestConditionGroup": 

-

123 combined = list(self._conditions) 

-

124 combined.extend(conditions) 

-

125 return ManifestConditionGroup( 

-

126 self.match_type, 

-

127 combined, 

-

128 ) 

-

129 

-

130 

-

131class ArchMatchManifestConditionBase(ManifestCondition): 

-

132 __slots__ = ("_arch_spec", "_is_negated") 

-

133 

-

134 def __init__(self, arch_spec: List[str], *, is_negated: bool = False) -> None: 

-

135 self._arch_spec = arch_spec 

-

136 self._is_negated = is_negated 

-

137 

-

138 def negated(self) -> "ManifestCondition": 

-

139 return self.__class__(self._arch_spec, is_negated=not self._is_negated) 

-

140 

-

141 

-

142class SourceContextArchMatchManifestCondition(ArchMatchManifestConditionBase): 

-

143 def describe(self) -> str: 

-

144 if self._is_negated: 

-

145 return f'architecture (for source package) matches *none* of [{", ".join(self._arch_spec)}]' 

-

146 return f'architecture (for source package) matches any of [{", ".join(self._arch_spec)}]' 

-

147 

-

148 def evaluate(self, context: ConditionContext) -> bool: 

-

149 arch = context.dpkg_architecture_variables.current_host_arch 

-

150 match = context.dpkg_arch_query_table.architecture_is_concerned( 

-

151 arch, self._arch_spec 

-

152 ) 

-

153 return not match if self._is_negated else match 

-

154 

-

155 

-

156class BinaryPackageContextArchMatchManifestCondition(ArchMatchManifestConditionBase): 

-

157 def describe(self) -> str: 

-

158 if self._is_negated: 

-

159 return f'architecture (for binary package) matches *none* of [{", ".join(self._arch_spec)}]' 

-

160 return f'architecture (for binary package) matches any of [{", ".join(self._arch_spec)}]' 

-

161 

-

162 def evaluate(self, context: ConditionContext) -> bool: 

-

163 binary_package = context.binary_package 

-

164 if binary_package is None: 

-

165 raise RuntimeError( 

-

166 "Condition only applies in the context of a BinaryPackage, but was evaluated" 

-

167 " without one" 

-

168 ) 

-

169 arch = binary_package.resolved_architecture 

-

170 match = context.dpkg_arch_query_table.architecture_is_concerned( 

-

171 arch, self._arch_spec 

-

172 ) 

-

173 return not match if self._is_negated else match 

-

174 

-

175 

-

176class BuildProfileMatch(ManifestCondition): 

-

177 __slots__ = ("_profile_spec", "_is_negated") 

-

178 

-

179 def __init__(self, profile_spec: str, *, is_negated: bool = False) -> None: 

-

180 self._profile_spec = profile_spec 

-

181 self._is_negated = is_negated 

-

182 

-

183 def negated(self) -> "ManifestCondition": 

-

184 return self.__class__(self._profile_spec, is_negated=not self._is_negated) 

-

185 

-

186 def describe(self) -> str: 

-

187 if self._is_negated: 

-

188 return f"DEB_BUILD_PROFILES matches *none* of [{self._profile_spec}]" 

-

189 return f"DEB_BUILD_PROFILES matches any of [{self._profile_spec}]" 

-

190 

-

191 def evaluate(self, context: ConditionContext) -> bool: 

-

192 match = active_profiles_match( 

-

193 self._profile_spec, context.build_env.deb_build_profiles 

-

194 ) 

-

195 return not match if self._is_negated else match 

-

196 

-

197 

-

198@dataclasses.dataclass(frozen=True, slots=True) 

-

199class _SingletonCondition(ManifestCondition): 

-

200 description: str 

-

201 implementation: Callable[[ConditionContext], bool] 

-

202 

-

203 def describe(self) -> str: 

-

204 return self.description 

-

205 

-

206 def evaluate(self, context: ConditionContext) -> bool: 

-

207 return self.implementation(context) 

-

208 

-

209 

-

210def _can_run_built_binaries(context: ConditionContext) -> bool: 

-

211 if not context.dpkg_architecture_variables.is_cross_compiling: 

-

212 return True 

-

213 # User / Builder asserted that we could even though we are cross-compiling, so we have to assume it is true 

-

214 return "crossbuildcanrunhostbinaries" in context.build_env.deb_build_options 

-

215 

-

216 

-

217_IS_CROSS_BUILDING = _SingletonCondition( 217 ↛ exitline 217 didn't jump to the function exit

-

218 "Cross Compiling (i.e., DEB_HOST_GNU_TYPE != DEB_BUILD_GNU_TYPE)", 

-

219 lambda c: c.dpkg_architecture_variables.is_cross_compiling, 

-

220) 

-

221 

-

222_CAN_EXECUTE_COMPILED_BINARIES = _SingletonCondition( 

-

223 "Can run built binaries (natively or via transparent emulation)", 

-

224 _can_run_built_binaries, 

-

225) 

-

226 

-

227_RUN_BUILD_TIME_TESTS = _SingletonCondition( 227 ↛ exitline 227 didn't jump to the function exit

-

228 "Run build time tests", 

-

229 lambda c: "nocheck" not in c.build_env.deb_build_options, 

-

230) 

-

231 

-

232_BUILD_DOCS_BDO = _SingletonCondition( 

-

233 "Build docs (nodocs not in DEB_BUILD_OPTIONS)", 

-

234 lambda c: "nodocs" not in c.build_env.deb_build_options, 

-

235) 

-

236 

-

237 

-

238del _SingletonCondition 

-

239del _can_run_built_binaries 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_packager_provided_files_py.html b/coverage-report/d_267b6307937f1878_packager_provided_files_py.html deleted file mode 100644 index e481e46..0000000 --- a/coverage-report/d_267b6307937f1878_packager_provided_files_py.html +++ /dev/null @@ -1,422 +0,0 @@ - - - - - Coverage for src/debputy/packager_provided_files.py: 84% - - - - - -
-
-

- Coverage for src/debputy/packager_provided_files.py: - 84% -

- -

- 140 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import collections 

-

2import dataclasses 

-

3from typing import Mapping, Iterable, Dict, List, Optional, Tuple 

-

4 

-

5from debputy.packages import BinaryPackage 

-

6from debputy.plugin.api import VirtualPath 

-

7from debputy.plugin.api.impl_types import PackagerProvidedFileClassSpec 

-

8from debputy.util import _error 

-

9 

-

10 

-

11@dataclasses.dataclass(frozen=True, slots=True) 

-

12class PackagerProvidedFile: 

-

13 path: VirtualPath 

-

14 package_name: str 

-

15 installed_as_basename: str 

-

16 provided_key: str 

-

17 definition: PackagerProvidedFileClassSpec 

-

18 match_priority: int = 0 

-

19 fuzzy_match: bool = False 

-

20 

-

21 def compute_dest(self) -> Tuple[str, str]: 

-

22 return self.definition.compute_dest( 

-

23 self.installed_as_basename, 

-

24 owning_package=self.package_name, 

-

25 path=self.path, 

-

26 ) 

-

27 

-

28 

-

29@dataclasses.dataclass(frozen=True, slots=True) 

-

30class PerPackagePackagerProvidedResult: 

-

31 auto_installable: List[PackagerProvidedFile] 

-

32 reserved_only: Dict[str, List[PackagerProvidedFile]] 

-

33 

-

34 

-

35def _find_package_name_prefix( 

-

36 binary_packages: Mapping[str, BinaryPackage], 

-

37 main_binary_package: str, 

-

38 max_periods_in_package_name: int, 

-

39 path: VirtualPath, 

-

40 *, 

-

41 allow_fuzzy_matches: bool = False, 

-

42) -> Iterable[Tuple[str, str, bool, bool]]: 

-

43 if max_periods_in_package_name < 1: 

-

44 prefix, remaining = path.name.split(".", 1) 

-

45 package_name = prefix 

-

46 bug_950723 = False 

-

47 if allow_fuzzy_matches and package_name.endswith("@"): 47 ↛ 48line 47 didn't jump to line 48, because the condition on line 47 was never true

-

48 package_name = package_name[:-1] 

-

49 bug_950723 = True 

-

50 if package_name in binary_packages: 50 ↛ 53line 50 didn't jump to line 53, because the condition on line 50 was never false

-

51 yield package_name, remaining, True, bug_950723 

-

52 else: 

-

53 yield main_binary_package, path.name, False, False 

-

54 return 

-

55 

-

56 parts = path.name.split(".", max_periods_in_package_name + 1) 

-

57 for p in range(len(parts) - 1, 0, -1): 

-

58 name = ".".join(parts[0:p]) 

-

59 bug_950723 = False 

-

60 if allow_fuzzy_matches and name.endswith("@"): 60 ↛ 61line 60 didn't jump to line 61, because the condition on line 60 was never true

-

61 name = name[:-1] 

-

62 bug_950723 = True 

-

63 

-

64 if name in binary_packages: 

-

65 remaining = ".".join(parts[p:]) 

-

66 yield name, remaining, True, bug_950723 

-

67 # main package case 

-

68 yield main_binary_package, path.name, False, False 

-

69 

-

70 

-

71def _find_definition( 

-

72 packager_provided_files: Mapping[str, PackagerProvidedFileClassSpec], 

-

73 basename: str, 

-

74) -> Tuple[Optional[str], Optional[PackagerProvidedFileClassSpec]]: 

-

75 definition = packager_provided_files.get(basename) 

-

76 if definition is not None: 

-

77 return None, definition 

-

78 install_as_name = basename 

-

79 file_class = "" 

-

80 while "." in install_as_name: 

-

81 install_as_name, file_class_part = install_as_name.rsplit(".", 1) 

-

82 file_class = ( 

-

83 file_class_part + "." + file_class if file_class != "" else file_class_part 

-

84 ) 

-

85 definition = packager_provided_files.get(file_class) 

-

86 if definition is not None: 

-

87 return install_as_name, definition 

-

88 return None, None 

-

89 

-

90 

-

91def _check_mismatches( 

-

92 path: VirtualPath, 

-

93 definition: PackagerProvidedFileClassSpec, 

-

94 owning_package: BinaryPackage, 

-

95 install_as_name: Optional[str], 

-

96 had_arch: bool, 

-

97) -> None: 

-

98 if install_as_name is not None and not definition.allow_name_segment: 98 ↛ 99line 98 didn't jump to line 99, because the condition on line 98 was never true

-

99 _error( 

-

100 f'The file "{path.fs_path}" looks like a packager provided file for' 

-

101 f' {owning_package.name} of type {definition.stem} with the custom name "{install_as_name}".' 

-

102 " However, this file type does not allow custom naming. The file type was registered" 

-

103 f" by {definition.debputy_plugin_metadata.plugin_name} in case you disagree and want" 

-

104 " to file a bug/feature request." 

-

105 ) 

-

106 if had_arch: 

-

107 if owning_package.is_arch_all: 107 ↛ 108line 107 didn't jump to line 108, because the condition on line 107 was never true

-

108 _error( 

-

109 f'The file "{path.fs_path}" looks like an architecture specific packager provided file for' 

-

110 f" {owning_package.name} of type {definition.stem}." 

-

111 " However, the package in question is arch:all. The use of architecture specific files" 

-

112 " for arch:all packages does not make sense." 

-

113 ) 

-

114 if not definition.allow_architecture_segment: 114 ↛ 115line 114 didn't jump to line 115, because the condition on line 114 was never true

-

115 _error( 

-

116 f'The file "{path.fs_path}" looks like an architecture specific packager provided file for' 

-

117 f" {owning_package.name} of type {definition.stem}." 

-

118 " However, this file type does not allow architecture specific variants. The file type was registered" 

-

119 f" by {definition.debputy_plugin_metadata.plugin_name} in case you disagree and want" 

-

120 " to file a bug/feature request." 

-

121 ) 

-

122 

-

123 

-

124def _split_path( 

-

125 packager_provided_files: Mapping[str, PackagerProvidedFileClassSpec], 

-

126 binary_packages: Mapping[str, BinaryPackage], 

-

127 main_binary_package: str, 

-

128 max_periods_in_package_name: int, 

-

129 path: VirtualPath, 

-

130 *, 

-

131 allow_fuzzy_matches: bool = False, 

-

132) -> Iterable[PackagerProvidedFile]: 

-

133 owning_package_name = main_binary_package 

-

134 basename = path.name 

-

135 match_priority = 0 

-

136 had_arch = False 

-

137 if "." not in basename: 

-

138 definition = packager_provided_files.get(basename) 

-

139 if definition is None: 139 ↛ 140line 139 didn't jump to line 140, because the condition on line 139 was never true

-

140 return 

-

141 if definition.packageless_is_fallback_for_all_packages: 

-

142 yield from ( 

-

143 PackagerProvidedFile( 

-

144 path=path, 

-

145 package_name=n, 

-

146 installed_as_basename=n, 

-

147 provided_key=".UNNAMED.", 

-

148 definition=definition, 

-

149 match_priority=match_priority, 

-

150 fuzzy_match=False, 

-

151 ) 

-

152 for n in binary_packages 

-

153 ) 

-

154 else: 

-

155 yield PackagerProvidedFile( 

-

156 path=path, 

-

157 package_name=owning_package_name, 

-

158 installed_as_basename=owning_package_name, 

-

159 provided_key=".UNNAMED.", 

-

160 definition=definition, 

-

161 match_priority=match_priority, 

-

162 fuzzy_match=False, 

-

163 ) 

-

164 return 

-

165 

-

166 for ( 

-

167 owning_package_name, 

-

168 basename, 

-

169 explicit_package, 

-

170 bug_950723, 

-

171 ) in _find_package_name_prefix( 

-

172 binary_packages, 

-

173 main_binary_package, 

-

174 max_periods_in_package_name, 

-

175 path, 

-

176 allow_fuzzy_matches=allow_fuzzy_matches, 

-

177 ): 

-

178 owning_package = binary_packages[owning_package_name] 

-

179 match_priority = 1 if explicit_package else 0 

-

180 fuzzy_match = False 

-

181 

-

182 if allow_fuzzy_matches and basename.endswith(".in") and len(basename) > 3: 182 ↛ 183line 182 didn't jump to line 183, because the condition on line 182 was never true

-

183 basename = basename[:-3] 

-

184 fuzzy_match = True 

-

185 

-

186 if "." in basename: 

-

187 remaining, last_word = basename.rsplit(".", 1) 

-

188 # We cannot use "resolved_architecture" as it would return "all". 

-

189 if last_word == owning_package.package_deb_architecture_variable("ARCH"): 

-

190 match_priority = 3 

-

191 basename = remaining 

-

192 had_arch = True 

-

193 elif last_word == owning_package.package_deb_architecture_variable( 193 ↛ 196line 193 didn't jump to line 196, because the condition on line 193 was never true

-

194 "ARCH_OS" 

-

195 ): 

-

196 match_priority = 2 

-

197 basename = remaining 

-

198 had_arch = True 

-

199 elif last_word == "all" and owning_package.is_arch_all: 199 ↛ 202line 199 didn't jump to line 202, because the condition on line 199 was never true

-

200 # This case does not make sense, but we detect it so we can report an error 

-

201 # via _check_mismatches. 

-

202 match_priority = -1 

-

203 basename = remaining 

-

204 had_arch = True 

-

205 

-

206 install_as_name, definition = _find_definition( 

-

207 packager_provided_files, basename 

-

208 ) 

-

209 if definition is None: 

-

210 continue 

-

211 

-

212 # Note: bug_950723 implies allow_fuzzy_matches 

-

213 if bug_950723 and not definition.bug_950723: 213 ↛ 214line 213 didn't jump to line 214, because the condition on line 213 was never true

-

214 continue 

-

215 

-

216 _check_mismatches( 

-

217 path, 

-

218 definition, 

-

219 owning_package, 

-

220 install_as_name, 

-

221 had_arch, 

-

222 ) 

-

223 if ( 

-

224 definition.packageless_is_fallback_for_all_packages 

-

225 and install_as_name is None 

-

226 and not had_arch 

-

227 and not explicit_package 

-

228 ): 

-

229 yield from ( 

-

230 PackagerProvidedFile( 

-

231 path=path, 

-

232 package_name=n, 

-

233 installed_as_basename=f"{n}@" if bug_950723 else n, 

-

234 provided_key=".UNNAMED." if bug_950723 else ".UNNAMED@.", 

-

235 definition=definition, 

-

236 match_priority=match_priority, 

-

237 fuzzy_match=fuzzy_match, 

-

238 ) 

-

239 for n in binary_packages 

-

240 ) 

-

241 else: 

-

242 provided_key = ( 

-

243 install_as_name if install_as_name is not None else ".UNNAMED." 

-

244 ) 

-

245 basename = ( 

-

246 install_as_name if install_as_name is not None else owning_package_name 

-

247 ) 

-

248 if bug_950723: 248 ↛ 249line 248 didn't jump to line 249, because the condition on line 248 was never true

-

249 provided_key = f"{provided_key}@" 

-

250 basename = f"{basename}@" 

-

251 yield PackagerProvidedFile( 

-

252 path=path, 

-

253 package_name=owning_package_name, 

-

254 installed_as_basename=basename, 

-

255 provided_key=provided_key, 

-

256 definition=definition, 

-

257 match_priority=match_priority, 

-

258 fuzzy_match=fuzzy_match, 

-

259 ) 

-

260 return 

-

261 

-

262 

-

263def detect_all_packager_provided_files( 

-

264 packager_provided_files: Mapping[str, PackagerProvidedFileClassSpec], 

-

265 debian_dir: VirtualPath, 

-

266 binary_packages: Mapping[str, BinaryPackage], 

-

267 *, 

-

268 allow_fuzzy_matches: bool = False, 

-

269) -> Dict[str, PerPackagePackagerProvidedResult]: 

-

270 main_binary_package = [ 

-

271 p.name for p in binary_packages.values() if p.is_main_package 

-

272 ][0] 

-

273 provided_files: Dict[str, Dict[Tuple[str, str], PackagerProvidedFile]] = { 

-

274 n: {} for n in binary_packages 

-

275 } 

-

276 max_periods_in_package_name = max(name.count(".") for name in binary_packages) 

-

277 

-

278 for entry in debian_dir.iterdir: 

-

279 if entry.is_dir: 279 ↛ 280line 279 didn't jump to line 280, because the condition on line 279 was never true

-

280 continue 

-

281 matching_ppfs = _split_path( 

-

282 packager_provided_files, 

-

283 binary_packages, 

-

284 main_binary_package, 

-

285 max_periods_in_package_name, 

-

286 entry, 

-

287 allow_fuzzy_matches=allow_fuzzy_matches, 

-

288 ) 

-

289 for packager_provided_file in matching_ppfs: 

-

290 provided_files_for_package = provided_files[ 

-

291 packager_provided_file.package_name 

-

292 ] 

-

293 match_key = ( 

-

294 packager_provided_file.definition.stem, 

-

295 packager_provided_file.provided_key, 

-

296 ) 

-

297 existing = provided_files_for_package.get(match_key) 

-

298 if ( 

-

299 existing is not None 

-

300 and existing.match_priority > packager_provided_file.match_priority 

-

301 ): 

-

302 continue 

-

303 provided_files_for_package[match_key] = packager_provided_file 

-

304 

-

305 result = {} 

-

306 for package_name, provided_file_data in provided_files.items(): 

-

307 auto_install_list = [ 

-

308 x for x in provided_file_data.values() if not x.definition.reservation_only 

-

309 ] 

-

310 reservation_only = collections.defaultdict(list) 

-

311 for packager_provided_file in provided_file_data.values(): 

-

312 if not packager_provided_file.definition.reservation_only: 312 ↛ 314line 312 didn't jump to line 314, because the condition on line 312 was never false

-

313 continue 

-

314 reservation_only[packager_provided_file.definition.stem].append( 

-

315 packager_provided_file 

-

316 ) 

-

317 

-

318 result[package_name] = PerPackagePackagerProvidedResult( 

-

319 auto_install_list, 

-

320 reservation_only, 

-

321 ) 

-

322 

-

323 return result 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_packages_py.html b/coverage-report/d_267b6307937f1878_packages_py.html deleted file mode 100644 index a71d42b..0000000 --- a/coverage-report/d_267b6307937f1878_packages_py.html +++ /dev/null @@ -1,431 +0,0 @@ - - - - - Coverage for src/debputy/packages.py: 51% - - - - - -
-
-

- Coverage for src/debputy/packages.py: - 51% -

- -

- 167 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from typing import ( 

-

2 Dict, 

-

3 Union, 

-

4 Tuple, 

-

5 Optional, 

-

6 Set, 

-

7 cast, 

-

8 Mapping, 

-

9 FrozenSet, 

-

10 TYPE_CHECKING, 

-

11) 

-

12 

-

13from debian.deb822 import Deb822 

-

14from debian.debian_support import DpkgArchTable 

-

15 

-

16from ._deb_options_profiles import DebBuildOptionsAndProfiles 

-

17from .architecture_support import ( 

-

18 DpkgArchitectureBuildProcessValuesTable, 

-

19 dpkg_architecture_table, 

-

20) 

-

21from .util import DEFAULT_PACKAGE_TYPE, UDEB_PACKAGE_TYPE, _error, active_profiles_match 

-

22 

-

23if TYPE_CHECKING: 

-

24 from .plugin.api import VirtualPath 

-

25 

-

26 

-

27_MANDATORY_BINARY_PACKAGE_FIELD = [ 

-

28 "Package", 

-

29 "Architecture", 

-

30] 

-

31 

-

32 

-

33def parse_source_debian_control( 

-

34 debian_control: "VirtualPath", 

-

35 selected_packages: Union[Set[str], FrozenSet[str]], 

-

36 excluded_packages: Union[Set[str], FrozenSet[str]], 

-

37 select_arch_all: bool, 

-

38 select_arch_any: bool, 

-

39 dpkg_architecture_variables: Optional[ 

-

40 DpkgArchitectureBuildProcessValuesTable 

-

41 ] = None, 

-

42 dpkg_arch_query_table: Optional[DpkgArchTable] = None, 

-

43 build_env: Optional[DebBuildOptionsAndProfiles] = None, 

-

44) -> Tuple["SourcePackage", Dict[str, "BinaryPackage"]]: 

-

45 if dpkg_architecture_variables is None: 

-

46 dpkg_architecture_variables = dpkg_architecture_table() 

-

47 if dpkg_arch_query_table is None: 

-

48 dpkg_arch_query_table = DpkgArchTable.load_arch_table() 

-

49 if build_env is None: 

-

50 build_env = DebBuildOptionsAndProfiles.instance() 

-

51 

-

52 # If no selection option is set, then all packages are acted on (except the 

-

53 # excluded ones) 

-

54 if not selected_packages and not select_arch_all and not select_arch_any: 

-

55 select_arch_all = True 

-

56 select_arch_any = True 

-

57 

-

58 with debian_control.open() as fd: 

-

59 dctrl_paragraphs = list(Deb822.iter_paragraphs(fd)) 

-

60 

-

61 if len(dctrl_paragraphs) < 2: 

-

62 _error( 

-

63 "debian/control must contain at least two stanza (1 Source + 1-N Package stanza)" 

-

64 ) 

-

65 

-

66 source_package = SourcePackage(dctrl_paragraphs[0]) 

-

67 

-

68 bin_pkgs = [ 

-

69 _create_binary_package( 

-

70 p, 

-

71 selected_packages, 

-

72 excluded_packages, 

-

73 select_arch_all, 

-

74 select_arch_any, 

-

75 dpkg_architecture_variables, 

-

76 dpkg_arch_query_table, 

-

77 build_env, 

-

78 i, 

-

79 ) 

-

80 for i, p in enumerate(dctrl_paragraphs[1:], 1) 

-

81 ] 

-

82 bin_pkgs_table = {p.name: p for p in bin_pkgs} 

-

83 if not selected_packages.issubset(bin_pkgs_table.keys()): 

-

84 unknown = selected_packages - bin_pkgs_table.keys() 

-

85 _error( 

-

86 f"The following *selected* packages (-p) are not listed in debian/control: {sorted(unknown)}" 

-

87 ) 

-

88 if not excluded_packages.issubset(bin_pkgs_table.keys()): 

-

89 unknown = selected_packages - bin_pkgs_table.keys() 

-

90 _error( 

-

91 f"The following *excluded* packages (-N) are not listed in debian/control: {sorted(unknown)}" 

-

92 ) 

-

93 

-

94 return source_package, bin_pkgs_table 

-

95 

-

96 

-

97def _check_package_sets( 

-

98 provided_packages: Set[str], 

-

99 valid_package_names: Set[str], 

-

100 option_name: str, 

-

101) -> None: 

-

102 # SonarLint proposes to use `provided_packages > valid_package_names`, which is valid for boolean 

-

103 # logic, but not for set logic. We want to assert that provided_packages is a proper subset 

-

104 # of valid_package_names. The rewrite would cause no errors for {'foo'} > {'bar'} - in set logic, 

-

105 # neither is a superset / subset of the other, but we want an error for this case. 

-

106 # 

-

107 # Bug filed: 

-

108 # https://community.sonarsource.com/t/sonarlint-python-s1940-rule-does-not-seem-to-take-set-logic-into-account/79718 

-

109 if not (provided_packages <= valid_package_names): 

-

110 non_existing_packages = sorted(provided_packages - valid_package_names) 

-

111 invalid_package_list = ", ".join(non_existing_packages) 

-

112 msg = ( 

-

113 f"Invalid package names passed to {option_name}: {invalid_package_list}: " 

-

114 f'Valid package names are: {", ".join(valid_package_names)}' 

-

115 ) 

-

116 _error(msg) 

-

117 

-

118 

-

119def _create_binary_package( 

-

120 paragraph: Union[Deb822, Dict[str, str]], 

-

121 selected_packages: Union[Set[str], FrozenSet[str]], 

-

122 excluded_packages: Union[Set[str], FrozenSet[str]], 

-

123 select_arch_all: bool, 

-

124 select_arch_any: bool, 

-

125 dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable, 

-

126 dpkg_arch_query_table: DpkgArchTable, 

-

127 build_env: DebBuildOptionsAndProfiles, 

-

128 paragraph_index: int, 

-

129) -> "BinaryPackage": 

-

130 try: 

-

131 package_name = paragraph["Package"] 

-

132 except KeyError: 

-

133 _error(f'Missing mandatory field "Package" in stanza number {paragraph_index}') 

-

134 # The raise is there to help PyCharm type-checking (which fails at "NoReturn") 

-

135 raise 

-

136 

-

137 for mandatory_field in _MANDATORY_BINARY_PACKAGE_FIELD: 

-

138 if mandatory_field not in paragraph: 

-

139 _error( 

-

140 f'Missing mandatory field "{mandatory_field}" for binary package {package_name}' 

-

141 f" (stanza number {paragraph_index})" 

-

142 ) 

-

143 

-

144 architecture = paragraph["Architecture"] 

-

145 

-

146 if paragraph_index < 1: 

-

147 raise ValueError("stanza index must be 1-indexed (1, 2, ...)") 

-

148 is_main_package = paragraph_index == 1 

-

149 

-

150 if package_name in excluded_packages: 

-

151 should_act_on = False 

-

152 elif package_name in selected_packages: 

-

153 should_act_on = True 

-

154 elif architecture == "all": 

-

155 should_act_on = select_arch_all 

-

156 else: 

-

157 should_act_on = select_arch_any 

-

158 

-

159 profiles_raw = paragraph.get("Build-Profiles", "").strip() 

-

160 if should_act_on and profiles_raw: 

-

161 try: 

-

162 should_act_on = active_profiles_match( 

-

163 profiles_raw, build_env.deb_build_profiles 

-

164 ) 

-

165 except ValueError as e: 

-

166 _error(f"Invalid Build-Profiles field for {package_name}: {e.args[0]}") 

-

167 

-

168 return BinaryPackage( 

-

169 paragraph, 

-

170 dpkg_architecture_variables, 

-

171 dpkg_arch_query_table, 

-

172 should_be_acted_on=should_act_on, 

-

173 is_main_package=is_main_package, 

-

174 ) 

-

175 

-

176 

-

177def _check_binary_arch( 

-

178 arch_table: DpkgArchTable, 

-

179 binary_arch: str, 

-

180 declared_arch: str, 

-

181) -> bool: 

-

182 if binary_arch == "all": 

-

183 return True 

-

184 arch_wildcards = declared_arch.split() 

-

185 for arch_wildcard in arch_wildcards: 185 ↛ 188line 185 didn't jump to line 188, because the loop on line 185 didn't complete

-

186 if arch_table.matches_architecture(binary_arch, arch_wildcard): 186 ↛ 185line 186 didn't jump to line 185, because the condition on line 186 was never false

-

187 return True 

-

188 return False 

-

189 

-

190 

-

191class BinaryPackage: 

-

192 __slots__ = [ 

-

193 "_package_fields", 

-

194 "_dbgsym_binary_package", 

-

195 "_should_be_acted_on", 

-

196 "_dpkg_architecture_variables", 

-

197 "_declared_arch_matches_output_arch", 

-

198 "_is_main_package", 

-

199 "_substvars", 

-

200 "_maintscript_snippets", 

-

201 ] 

-

202 

-

203 def __init__( 

-

204 self, 

-

205 fields: Union[Mapping[str, str], Deb822], 

-

206 dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable, 

-

207 dpkg_arch_query: DpkgArchTable, 

-

208 *, 

-

209 is_main_package: bool = False, 

-

210 should_be_acted_on: bool = True, 

-

211 ) -> None: 

-

212 super(BinaryPackage, self).__init__() 

-

213 # Typing-wise, Deb822 is *not* a Mapping[str, str] but it behaves enough 

-

214 # like one that we rely on it and just cast it. 

-

215 self._package_fields = cast("Mapping[str, str]", fields) 

-

216 self._dbgsym_binary_package = None 

-

217 self._should_be_acted_on = should_be_acted_on 

-

218 self._dpkg_architecture_variables = dpkg_architecture_variables 

-

219 self._is_main_package = is_main_package 

-

220 self._declared_arch_matches_output_arch = _check_binary_arch( 

-

221 dpkg_arch_query, self.resolved_architecture, self.declared_architecture 

-

222 ) 

-

223 

-

224 @property 

-

225 def name(self) -> str: 

-

226 return self.fields["Package"] 

-

227 

-

228 @property 

-

229 def archive_section(self) -> str: 

-

230 value = self.fields.get("Section") 

-

231 if value is None: 231 ↛ 232line 231 didn't jump to line 232, because the condition on line 231 was never true

-

232 return "Unknown" 

-

233 return value 

-

234 

-

235 @property 

-

236 def archive_component(self) -> str: 

-

237 component = "" 

-

238 section = self.archive_section 

-

239 if "/" in section: 

-

240 component = section.rsplit("/", 1)[0] 

-

241 # The "main" component is always shortened to "" 

-

242 if component == "main": 

-

243 component = "" 

-

244 return component 

-

245 

-

246 @property 

-

247 def is_essential(self) -> bool: 

-

248 return self._package_fields.get("Essential") == "yes" 

-

249 

-

250 @property 

-

251 def is_udeb(self) -> bool: 

-

252 return self.package_type == UDEB_PACKAGE_TYPE 

-

253 

-

254 @property 

-

255 def should_be_acted_on(self) -> bool: 

-

256 return self._should_be_acted_on and self._declared_arch_matches_output_arch 

-

257 

-

258 @property 

-

259 def fields(self) -> Mapping[str, str]: 

-

260 return self._package_fields 

-

261 

-

262 @property 

-

263 def resolved_architecture(self) -> str: 

-

264 arch = self.declared_architecture 

-

265 if arch == "all": 

-

266 return arch 

-

267 if self._x_dh_build_for_type == "target": 267 ↛ 268line 267 didn't jump to line 268, because the condition on line 267 was never true

-

268 return self._dpkg_architecture_variables["DEB_TARGET_ARCH"] 

-

269 return self._dpkg_architecture_variables.current_host_arch 

-

270 

-

271 def package_deb_architecture_variable(self, variable_suffix: str) -> str: 

-

272 if self._x_dh_build_for_type == "target": 272 ↛ 273line 272 didn't jump to line 273, because the condition on line 272 was never true

-

273 return self._dpkg_architecture_variables[f"DEB_TARGET_{variable_suffix}"] 

-

274 return self._dpkg_architecture_variables[f"DEB_HOST_{variable_suffix}"] 

-

275 

-

276 @property 

-

277 def deb_multiarch(self) -> str: 

-

278 return self.package_deb_architecture_variable("MULTIARCH") 

-

279 

-

280 @property 

-

281 def _x_dh_build_for_type(self) -> str: 

-

282 v = self._package_fields.get("X-DH-Build-For-Type") 

-

283 if v is None: 283 ↛ 285line 283 didn't jump to line 285, because the condition on line 283 was never false

-

284 return "host" 

-

285 return v.lower() 

-

286 

-

287 @property 

-

288 def package_type(self) -> str: 

-

289 """Short for Package-Type (with proper default if absent)""" 

-

290 v = self.fields.get("Package-Type") 

-

291 if v is None: 

-

292 return DEFAULT_PACKAGE_TYPE 

-

293 return v 

-

294 

-

295 @property 

-

296 def is_main_package(self) -> bool: 

-

297 return self._is_main_package 

-

298 

-

299 def cross_command(self, command: str) -> str: 

-

300 arch_table = self._dpkg_architecture_variables 

-

301 if self._x_dh_build_for_type == "target": 

-

302 target_gnu_type = arch_table["DEB_TARGET_GNU_TYPE"] 

-

303 if arch_table["DEB_HOST_GNU_TYPE"] != target_gnu_type: 

-

304 return f"{target_gnu_type}-{command}" 

-

305 if arch_table.is_cross_compiling: 

-

306 return f"{arch_table['DEB_HOST_GNU_TYPE']}-{command}" 

-

307 return command 

-

308 

-

309 @property 

-

310 def declared_architecture(self) -> str: 

-

311 return self.fields["Architecture"] 

-

312 

-

313 @property 

-

314 def is_arch_all(self) -> bool: 

-

315 return self.declared_architecture == "all" 

-

316 

-

317 

-

318class SourcePackage: 

-

319 __slots__ = ("_package_fields",) 

-

320 

-

321 def __init__(self, fields: Union[Mapping[str, str], Deb822]): 

-

322 # Typing-wise, Deb822 is *not* a Mapping[str, str] but it behaves enough 

-

323 # like one that we rely on it and just cast it. 

-

324 self._package_fields = cast("Mapping[str, str]", fields) 

-

325 

-

326 @property 

-

327 def fields(self) -> Mapping[str, str]: 

-

328 return self._package_fields 

-

329 

-

330 @property 

-

331 def name(self) -> str: 

-

332 return self._package_fields["Source"] 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_path_matcher_py.html b/coverage-report/d_267b6307937f1878_path_matcher_py.html deleted file mode 100644 index 448a7aa..0000000 --- a/coverage-report/d_267b6307937f1878_path_matcher_py.html +++ /dev/null @@ -1,628 +0,0 @@ - - - - - Coverage for src/debputy/path_matcher.py: 72% - - - - - -
-
-

- Coverage for src/debputy/path_matcher.py: - 72% -

- -

- 279 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import fnmatch 

-

2import glob 

-

3import itertools 

-

4import os 

-

5import re 

-

6from enum import Enum 

-

7from typing import ( 

-

8 Callable, 

-

9 Optional, 

-

10 TypeVar, 

-

11 Iterable, 

-

12 Union, 

-

13 Sequence, 

-

14 Tuple, 

-

15) 

-

16 

-

17from debputy.intermediate_manifest import PathType 

-

18from debputy.plugin.api import VirtualPath 

-

19from debputy.substitution import Substitution, NULL_SUBSTITUTION 

-

20from debputy.types import VP 

-

21from debputy.util import _normalize_path, _error, escape_shell 

-

22 

-

23MR = TypeVar("MR") 

-

24_GLOB_PARTS = re.compile(r"[*?]|\[]?[^]]+]") 

-

25 

-

26 

-

27def _lookup_path(fs_root: VP, path: str) -> Optional[VP]: 

-

28 if not path.startswith("./"): 28 ↛ 29line 28 didn't jump to line 29, because the condition on line 28 was never true

-

29 raise ValueError("Directory must be normalized (and not the root directory)") 

-

30 if fs_root.name != "." or fs_root.parent_dir is not None: 30 ↛ 31line 30 didn't jump to line 31, because the condition on line 30 was never true

-

31 raise ValueError("Provided fs_root must be the root directory") 

-

32 # TODO: Strictly speaking, this is unsound. (E.g., FSRootDir does not return FSRootDir on a lookup) 

-

33 return fs_root.lookup(path[2:]) 

-

34 

-

35 

-

36def _compile_basename_glob( 

-

37 basename_glob: str, 

-

38) -> Tuple[Optional[str], Callable[[str], bool]]: 

-

39 remainder = None 

-

40 if not glob.has_magic(basename_glob): 40 ↛ 41line 40 didn't jump to line 41, because the condition on line 40 was never true

-

41 return escape_shell(basename_glob), lambda x: x == basename_glob 

-

42 

-

43 if basename_glob.startswith("*"): 

-

44 if basename_glob.endswith("*"): 

-

45 remainder = basename_glob[1:-1] 

-

46 possible_quick_match = lambda x: remainder in x 

-

47 escaped_pattern = "*" + escape_shell(remainder) + "*" 

-

48 else: 

-

49 remainder = basename_glob[1:] 

-

50 possible_quick_match = lambda x: x.endswith(remainder) 

-

51 escaped_pattern = "*" + escape_shell(remainder) 

-

52 else: 

-

53 remainder = basename_glob[:-1] 

-

54 possible_quick_match = lambda x: x.startswith(remainder) 

-

55 escaped_pattern = escape_shell(remainder) + "*" 

-

56 

-

57 if not glob.has_magic(remainder): 

-

58 return escaped_pattern, possible_quick_match 

-

59 slow_pattern = re.compile(fnmatch.translate(basename_glob)) 

-

60 return None, lambda x: bool(slow_pattern.match(x)) 60 ↛ exitline 60 didn't run the lambda on line 60

-

61 

-

62 

-

63def _apply_match( 

-

64 fs_path: VP, 

-

65 match_part: Union[Callable[[str], bool], str], 

-

66) -> Iterable[VP]: 

-

67 if isinstance(match_part, str): 

-

68 m = fs_path.lookup(match_part) 

-

69 if m: 

-

70 yield m 

-

71 else: 

-

72 yield from (p for p in fs_path.iterdir if match_part(p.name)) 

-

73 

-

74 

-

75class MatchRuleType(Enum): 

-

76 EXACT_MATCH = "exact" 

-

77 BASENAME_GLOB = "basename-glob" 

-

78 DIRECT_CHILDREN_OF_DIR = "direct-children-of-dir" 

-

79 ANYTHING_BENEATH_DIR = "anything-beneath-dir" 

-

80 GENERIC_GLOB = "generic-glob" 

-

81 MATCH_ANYTHING = "match-anything" 

-

82 

-

83 

-

84class MatchRule: 

-

85 __slots__ = ("_rule_type",) 

-

86 

-

87 def __init__(self, rule_type: MatchRuleType) -> None: 

-

88 self._rule_type = rule_type 

-

89 

-

90 @property 

-

91 def rule_type(self) -> MatchRuleType: 

-

92 return self._rule_type 

-

93 

-

94 def finditer( 

-

95 self, 

-

96 fs_root: VP, 

-

97 *, 

-

98 ignore_paths: Optional[Callable[[VP], bool]] = None, 

-

99 ) -> Iterable[VP]: 

-

100 # TODO: Strictly speaking, this is unsound. (E.g., FSRootDir does not return FSRootDir on a lookup) 

-

101 raise NotImplementedError 

-

102 

-

103 def _full_pattern(self) -> str: 

-

104 raise NotImplementedError 

-

105 

-

106 @property 

-

107 def path_type(self) -> Optional[PathType]: 

-

108 return None 

-

109 

-

110 def describe_match_short(self) -> str: 

-

111 return self._full_pattern() 

-

112 

-

113 def describe_match_exact(self) -> str: 

-

114 raise NotImplementedError 

-

115 

-

116 def shell_escape_pattern(self) -> str: 

-

117 raise TypeError("Pattern not suitable or not supported for shell escape") 

-

118 

-

119 @classmethod 

-

120 def recursive_beneath_directory( 

-

121 cls, 

-

122 directory: str, 

-

123 definition_source: str, 

-

124 path_type: Optional[PathType] = None, 

-

125 substitution: Substitution = NULL_SUBSTITUTION, 

-

126 ) -> "MatchRule": 

-

127 if directory in (".", "/"): 127 ↛ 128line 127 didn't jump to line 128, because the condition on line 127 was never true

-

128 return MATCH_ANYTHING 

-

129 assert not glob.has_magic(directory) 

-

130 return DirectoryBasedMatch( 

-

131 MatchRuleType.ANYTHING_BENEATH_DIR, 

-

132 substitution.substitute(_normalize_path(directory), definition_source), 

-

133 path_type=path_type, 

-

134 ) 

-

135 

-

136 @classmethod 

-

137 def from_path_or_glob( 

-

138 cls, 

-

139 path_or_glob: str, 

-

140 definition_source: str, 

-

141 path_type: Optional[PathType] = None, 

-

142 substitution: Substitution = NULL_SUBSTITUTION, 

-

143 ) -> "MatchRule": 

-

144 # TODO: Handle '{a,b,c}' patterns too 

-

145 # FIXME: Better error handling! 

-

146 normalized_no_prefix = _normalize_path(path_or_glob, with_prefix=False) 

-

147 if path_or_glob in ("*", "**/*", ".", "/"): 

-

148 assert path_type is None 

-

149 return MATCH_ANYTHING 

-

150 

-

151 # We do not support {a,b} at the moment. This check is not perfect, but it should catch the most obvious 

-

152 # unsupported usage. 

-

153 if ( 153 ↛ 158line 153 didn't jump to line 158

-

154 "{" in path_or_glob 

-

155 and ("," in path_or_glob or ".." in path_or_glob) 

-

156 and re.search(r"[{][^},.]*(?:,|[.][.])[^},.]*[}]", path_or_glob) 

-

157 ): 

-

158 m = re.search(r"(.*)[{]([^},.]*(?:,|[.][.])[^},.]*[}])", path_or_glob) 

-

159 assert m is not None 

-

160 replacement = m.group(1) + "{{OPEN_CURLY_BRACE}}" + m.group(2) 

-

161 _error( 

-

162 f'The pattern "{path_or_glob}" (defined in {definition_source}) looks like it contains a' 

-

163 f' brace expansion (such as "{{a,b}}" or "{{a..b}}"). Brace expansions are not supported.' 

-

164 " If you wanted to match the literal path a brace in it, please use a substitution to insert" 

-

165 f' the opening brace. As an example: "{replacement}"' 

-

166 ) 

-

167 

-

168 normalized_with_prefix = "./" + normalized_no_prefix 

-

169 # TODO: Check for escapes here "foo[?]/bar" can be written as an exact match for foo?/bar 

-

170 # - similar holds for "foo[?]/*" being a directory match (etc.). 

-

171 if not glob.has_magic(normalized_with_prefix): 

-

172 assert path_type is None 

-

173 return ExactFileSystemPath( 

-

174 substitution.substitute(normalized_with_prefix, definition_source) 

-

175 ) 

-

176 

-

177 directory = os.path.dirname(normalized_with_prefix) 

-

178 basename = os.path.basename(normalized_with_prefix) 

-

179 

-

180 if ("**" in directory and directory != "./**") or "**" in basename: 180 ↛ 181line 180 didn't jump to line 181, because the condition on line 180 was never true

-

181 raise ValueError( 

-

182 f'Cannot process pattern "{path_or_glob}" from {definition_source}: The double-star' 

-

183 ' glob ("**") is not supported in general. Only "**/<basename-glob>" supported.' 

-

184 ) 

-

185 

-

186 if basename == "*" and not glob.has_magic(directory): 

-

187 return DirectoryBasedMatch( 

-

188 MatchRuleType.DIRECT_CHILDREN_OF_DIR, 

-

189 substitution.substitute(directory, definition_source), 

-

190 path_type=path_type, 

-

191 ) 

-

192 elif directory == "./**" or not glob.has_magic(directory): 

-

193 basename_glob = substitution.substitute( 

-

194 basename, definition_source, escape_glob_characters=True 

-

195 ) 

-

196 if directory in (".", "./**"): 

-

197 return BasenameGlobMatch( 

-

198 basename_glob, 

-

199 path_type=path_type, 

-

200 recursive_match=True, 

-

201 ) 

-

202 return BasenameGlobMatch( 

-

203 basename_glob, 

-

204 only_when_in_directory=substitution.substitute( 

-

205 directory, definition_source 

-

206 ), 

-

207 path_type=path_type, 

-

208 recursive_match=False, 

-

209 ) 

-

210 

-

211 return GenericGlobImplementation(normalized_with_prefix, path_type=path_type) 

-

212 

-

213 

-

214def _match_file_type(path_type: PathType, path: VirtualPath) -> bool: 

-

215 if path_type == PathType.FILE and path.is_file: 

-

216 return True 

-

217 if path_type == PathType.DIRECTORY and path.is_dir: 217 ↛ 218line 217 didn't jump to line 218, because the condition on line 217 was never true

-

218 return True 

-

219 if path_type == PathType.SYMLINK and path.is_symlink: 219 ↛ 220line 219 didn't jump to line 220, because the condition on line 219 was never true

-

220 return True 

-

221 assert path_type in (PathType.FILE, PathType.DIRECTORY, PathType.SYMLINK) 

-

222 return False 

-

223 

-

224 

-

225class MatchAnything(MatchRule): 

-

226 def __init__(self) -> None: 

-

227 super().__init__(MatchRuleType.MATCH_ANYTHING) 

-

228 

-

229 def _full_pattern(self) -> str: 

-

230 return "**/*" 

-

231 

-

232 def finditer(self, fs_root: VP, *, ignore_paths=None) -> Iterable[VP]: 

-

233 if ignore_paths is not None: 

-

234 yield from (p for p in fs_root.all_paths() if not ignore_paths(p)) 

-

235 yield from fs_root.all_paths() 

-

236 

-

237 def describe_match_exact(self) -> str: 

-

238 return "**/* (Match anything)" 

-

239 

-

240 

-

241MATCH_ANYTHING: MatchRule = MatchAnything() 

-

242 

-

243del MatchAnything 

-

244 

-

245 

-

246class ExactFileSystemPath(MatchRule): 

-

247 __slots__ = "_path" 

-

248 

-

249 def __init__(self, path: str) -> None: 

-

250 super().__init__(MatchRuleType.EXACT_MATCH) 

-

251 self._path = path 

-

252 

-

253 def _full_pattern(self) -> str: 

-

254 return self._path 

-

255 

-

256 def finditer(self, fs_root: VP, *, ignore_paths=None) -> Iterable[VP]: 

-

257 p = _lookup_path(fs_root, self._path) 

-

258 if p is not None and (ignore_paths is None or not ignore_paths(p)): 

-

259 yield p 

-

260 

-

261 def describe_match_exact(self) -> str: 

-

262 return f"{self._path} (the exact path / no globbing)" 

-

263 

-

264 @property 

-

265 def path(self) -> str: 

-

266 return self._path 

-

267 

-

268 def shell_escape_pattern(self) -> str: 

-

269 return escape_shell(self._path.lstrip(".")) 

-

270 

-

271 

-

272class DirectoryBasedMatch(MatchRule): 

-

273 __slots__ = "_directory", "_path_type" 

-

274 

-

275 def __init__( 

-

276 self, 

-

277 rule_type: MatchRuleType, 

-

278 directory: str, 

-

279 path_type: Optional[PathType] = None, 

-

280 ) -> None: 

-

281 super().__init__(rule_type) 

-

282 self._directory = directory 

-

283 self._path_type = path_type 

-

284 assert rule_type in ( 

-

285 MatchRuleType.DIRECT_CHILDREN_OF_DIR, 

-

286 MatchRuleType.ANYTHING_BENEATH_DIR, 

-

287 ) 

-

288 assert not self._directory.endswith("/") 

-

289 

-

290 def _full_pattern(self) -> str: 

-

291 return self._directory 

-

292 

-

293 def finditer( 

-

294 self, 

-

295 fs_root: VP, 

-

296 *, 

-

297 ignore_paths: Optional[Callable[[VP], bool]] = None, 

-

298 ) -> Iterable[VP]: 

-

299 p = _lookup_path(fs_root, self._directory) 

-

300 if p is None or not p.is_dir: 

-

301 return 

-

302 if self._rule_type == MatchRuleType.ANYTHING_BENEATH_DIR: 302 ↛ 303line 302 didn't jump to line 303, because the condition on line 302 was never true

-

303 path_iter = p.all_paths() 

-

304 else: 

-

305 path_iter = p.iterdir 

-

306 if ignore_paths is not None: 

-

307 path_iter = (p for p in path_iter if not ignore_paths(p)) 

-

308 if self._path_type is None: 

-

309 yield from path_iter 

-

310 else: 

-

311 yield from (m for m in path_iter if _match_file_type(self._path_type, m)) 

-

312 

-

313 def describe_match_short(self) -> str: 

-

314 path_type_match = ( 

-

315 "" 

-

316 if self._path_type is None 

-

317 else f" <only for path type {self._path_type.manifest_key}>" 

-

318 ) 

-

319 if self._rule_type == MatchRuleType.ANYTHING_BENEATH_DIR: 

-

320 return f"{self._directory}/**/*{path_type_match}" 

-

321 return f"{self._directory}/*{path_type_match}" 

-

322 

-

323 def describe_match_exact(self) -> str: 

-

324 if self._rule_type == MatchRuleType.ANYTHING_BENEATH_DIR: 

-

325 return f"{self._directory}/**/* (anything below the directory)" 

-

326 return f"{self.describe_match_short()} (anything directly in the directory)" 

-

327 

-

328 @property 

-

329 def path_type(self) -> Optional[PathType]: 

-

330 return self._path_type 

-

331 

-

332 @property 

-

333 def directory(self) -> str: 

-

334 return self._directory 

-

335 

-

336 def shell_escape_pattern(self) -> str: 

-

337 if self._rule_type == MatchRuleType.ANYTHING_BENEATH_DIR: 337 ↛ 338line 337 didn't jump to line 338, because the condition on line 337 was never true

-

338 return super().shell_escape_pattern() 

-

339 return escape_shell(self._directory.lstrip(".")) + "/*" 

-

340 

-

341 

-

342class BasenameGlobMatch(MatchRule): 

-

343 __slots__ = ( 

-

344 "_basename_glob", 

-

345 "_directory", 

-

346 "_matcher", 

-

347 "_path_type", 

-

348 "_recursive_match", 

-

349 "_escaped_basename_pattern", 

-

350 ) 

-

351 

-

352 def __init__( 

-

353 self, 

-

354 basename_glob: str, 

-

355 only_when_in_directory: Optional[str] = None, 

-

356 path_type: Optional[PathType] = None, 

-

357 recursive_match: Optional[bool] = None, # TODO: Can this just be = False (?) 

-

358 ) -> None: 

-

359 super().__init__(MatchRuleType.BASENAME_GLOB) 

-

360 self._basename_glob = basename_glob 

-

361 self._directory = only_when_in_directory 

-

362 self._path_type = path_type 

-

363 self._recursive_match = recursive_match 

-

364 if self._directory is None and not recursive_match: 364 ↛ 365line 364 didn't jump to line 365, because the condition on line 364 was never true

-

365 self._recursive_match = True 

-

366 assert self._directory is None or not self._directory.endswith("/") 

-

367 assert "/" not in basename_glob # Not a basename if it contains / 

-

368 assert "**" not in basename_glob # Also not a (true) basename if it has ** 

-

369 self._escaped_basename_pattern, self._matcher = _compile_basename_glob( 

-

370 basename_glob 

-

371 ) 

-

372 

-

373 def _full_pattern(self) -> str: 

-

374 if self._directory is not None: 

-

375 maybe_recursive = "**/" if self._recursive_match else "" 

-

376 return f"{self._directory}/{maybe_recursive}{self._basename_glob}" 

-

377 return self._basename_glob 

-

378 

-

379 def finditer(self, fs_root: VP, *, ignore_paths=None) -> Iterable[VP]: 

-

380 search_root = fs_root 

-

381 if self._directory is not None: 

-

382 p = _lookup_path(fs_root, self._directory) 

-

383 if p is None or not p.is_dir: 

-

384 return 

-

385 search_root = p 

-

386 path_iter = ( 

-

387 search_root.all_paths() if self._recursive_match else search_root.iterdir 

-

388 ) 

-

389 if ignore_paths is not None: 

-

390 path_iter = (p for p in path_iter if not ignore_paths(p)) 

-

391 if self._path_type is None: 

-

392 yield from (m for m in path_iter if self._matcher(m.name)) 

-

393 else: 

-

394 yield from ( 

-

395 m 

-

396 for m in path_iter 

-

397 if self._matcher(m.name) and _match_file_type(self._path_type, m) 

-

398 ) 

-

399 

-

400 def describe_match_short(self) -> str: 

-

401 path_type_match = ( 

-

402 "" 

-

403 if self._path_type is None 

-

404 else f" <only for path type {self._path_type.manifest_key}>" 

-

405 ) 

-

406 return ( 

-

407 self._full_pattern() 

-

408 if path_type_match == "" 

-

409 else f"{self._full_pattern()}{path_type_match}" 

-

410 ) 

-

411 

-

412 def describe_match_exact(self) -> str: 

-

413 if self._directory is not None: 

-

414 return f"{self.describe_match_short()} (glob / directly in the directory)" 

-

415 return f"{self.describe_match_short()} (basename match)" 

-

416 

-

417 def __eq__(self, other: object) -> bool: 

-

418 if not isinstance(other, BasenameGlobMatch): 

-

419 return NotImplemented 

-

420 return ( 

-

421 self._basename_glob == other._basename_glob 

-

422 and self._directory == other._directory 

-

423 and self._path_type == other._path_type 

-

424 and self._recursive_match == other._recursive_match 

-

425 ) 

-

426 

-

427 @property 

-

428 def path_type(self) -> Optional[PathType]: 

-

429 return self._path_type 

-

430 

-

431 @property 

-

432 def directory(self) -> Optional[str]: 

-

433 return self._directory 

-

434 

-

435 def shell_escape_pattern(self) -> str: 

-

436 if self._directory is None or self._escaped_basename_pattern is None: 

-

437 return super().shell_escape_pattern() 

-

438 return ( 

-

439 escape_shell(self._directory.lstrip(".")) 

-

440 + f"/{self._escaped_basename_pattern}" 

-

441 ) 

-

442 

-

443 

-

444class GenericGlobImplementation(MatchRule): 

-

445 __slots__ = "_glob_pattern", "_path_type", "_match_parts" 

-

446 

-

447 def __init__( 

-

448 self, 

-

449 glob_pattern: str, 

-

450 path_type: Optional[PathType] = None, 

-

451 ) -> None: 

-

452 super().__init__(MatchRuleType.GENERIC_GLOB) 

-

453 if glob_pattern.startswith("./"): 453 ↛ 455line 453 didn't jump to line 455, because the condition on line 453 was never false

-

454 glob_pattern = glob_pattern[2:] 

-

455 self._glob_pattern = glob_pattern 

-

456 self._path_type = path_type 

-

457 assert "**" not in glob_pattern # No recursive globs 

-

458 assert glob.has_magic( 

-

459 glob_pattern 

-

460 ) # If it has no glob, then it could have been an exact match 

-

461 assert ( 

-

462 "/" in glob_pattern 

-

463 ) # If it does not have a / then a BasenameGlob could have been used instead 

-

464 self._match_parts = self._compile_glob() 

-

465 

-

466 def _full_pattern(self) -> str: 

-

467 return self._glob_pattern 

-

468 

-

469 def finditer(self, fs_root: VP, *, ignore_paths=None) -> Iterable[VP]: 

-

470 search_history = [fs_root] 

-

471 for part in self._match_parts: 

-

472 next_layer = itertools.chain.from_iterable( 

-

473 _apply_match(m, part) for m in search_history 

-

474 ) 

-

475 # TODO: Figure out why we need to materialize next_layer into a list for this to work. 

-

476 search_history = list(next_layer) 

-

477 if not search_history: 

-

478 # While we have it as a list, we might as well have an "early exit". 

-

479 return 

-

480 

-

481 if self._path_type is None: 

-

482 if ignore_paths is None: 

-

483 yield from search_history 

-

484 else: 

-

485 yield from (p for p in search_history if not ignore_paths(p)) 

-

486 elif ignore_paths is None: 

-

487 yield from ( 

-

488 m for m in search_history if _match_file_type(self._path_type, m) 

-

489 ) 

-

490 else: 

-

491 yield from ( 

-

492 m 

-

493 for m in search_history 

-

494 if _match_file_type(self._path_type, m) and not ignore_paths(m) 

-

495 ) 

-

496 

-

497 def describe_match_short(self) -> str: 

-

498 path_type_match = ( 

-

499 "" 

-

500 if self._path_type is None 

-

501 else f" <only for path type {self._path_type.manifest_key}>" 

-

502 ) 

-

503 return ( 

-

504 self._full_pattern() 

-

505 if path_type_match == "" 

-

506 else f"{self._full_pattern()}{path_type_match}" 

-

507 ) 

-

508 

-

509 def describe_match_exact(self) -> str: 

-

510 return f"{self.describe_match_short()} (glob)" 

-

511 

-

512 def _compile_glob(self) -> Sequence[Union[Callable[[str], bool], str]]: 

-

513 assert self._glob_pattern.strip("/") == self._glob_pattern 

-

514 return [ 

-

515 _compile_basename_glob(part) if glob.has_magic(part) else part 

-

516 for part in self._glob_pattern.split("/") 

-

517 ] 

-

518 

-

519 def __eq__(self, other: object) -> bool: 

-

520 if not isinstance(other, GenericGlobImplementation): 

-

521 return NotImplemented 

-

522 return ( 

-

523 self._glob_pattern == other._glob_pattern 

-

524 and self._path_type == other._path_type 

-

525 ) 

-

526 

-

527 @property 

-

528 def path_type(self) -> Optional[PathType]: 

-

529 return self._path_type 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_substitution_py.html b/coverage-report/d_267b6307937f1878_substitution_py.html deleted file mode 100644 index 07beea1..0000000 --- a/coverage-report/d_267b6307937f1878_substitution_py.html +++ /dev/null @@ -1,435 +0,0 @@ - - - - - Coverage for src/debputy/substitution.py: 85% - - - - - -
-
-

- Coverage for src/debputy/substitution.py: - 85% -

- -

- 153 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2import os 

-

3import re 

-

4from enum import IntEnum 

-

5from typing import FrozenSet, NoReturn, Optional, Set, Mapping, TYPE_CHECKING, Self 

-

6 

-

7from debputy.architecture_support import ( 

-

8 dpkg_architecture_table, 

-

9 DpkgArchitectureBuildProcessValuesTable, 

-

10) 

-

11from debputy.exceptions import DebputySubstitutionError 

-

12from debputy.util import glob_escape 

-

13 

-

14if TYPE_CHECKING: 

-

15 from debputy.plugin.api.feature_set import PluginProvidedFeatureSet 

-

16 from debputy.plugin.api import VirtualPath 

-

17 

-

18 

-

19SUBST_VAR_RE = re.compile( 

-

20 r""" 

-

21 ([{][{][ ]*) 

-

22 

-

23 ( 

-

24 _?[A-Za-z0-9]+ 

-

25 (?:[-_:][A-Za-z0-9]+)* 

-

26 ) 

-

27 

-

28 ([ ]*[}][}]) 

-

29""", 

-

30 re.VERBOSE, 

-

31) 

-

32 

-

33 

-

34class VariableNameState(IntEnum): 

-

35 UNDEFINED = 1 

-

36 RESERVED = 2 

-

37 DEFINED = 3 

-

38 

-

39 

-

40@dataclasses.dataclass(slots=True, frozen=True) 

-

41class VariableContext: 

-

42 debian_dir: "VirtualPath" 

-

43 

-

44 

-

45class Substitution: 

-

46 def substitute( 

-

47 self, 

-

48 value: str, 

-

49 definition_source: str, 

-

50 /, 

-

51 escape_glob_characters: bool = False, 

-

52 ) -> str: 

-

53 raise NotImplementedError 

-

54 

-

55 def with_extra_substitutions(self, **extra_substitutions: str) -> "Substitution": 

-

56 raise NotImplementedError 

-

57 

-

58 def with_unresolvable_substitutions( 

-

59 self, *extra_substitutions: str 

-

60 ) -> "Substitution": 

-

61 raise NotImplementedError 

-

62 

-

63 def variable_state(self, variable_name: str) -> VariableNameState: 

-

64 return VariableNameState.UNDEFINED 

-

65 

-

66 def is_used(self, variable_name: str) -> bool: 

-

67 return False 

-

68 

-

69 def _mark_used(self, variable_name: str) -> None: 

-

70 pass 

-

71 

-

72 def _replacement(self, matched_key: str, definition_source: str) -> str: 

-

73 self._error( 

-

74 "Cannot resolve {{" + matched_key + "}}." 

-

75 f" The error occurred while trying to process {definition_source}" 

-

76 ) 

-

77 

-

78 def _error( 

-

79 self, 

-

80 msg: str, 

-

81 *, 

-

82 caused_by: Optional[BaseException] = None, 

-

83 ) -> NoReturn: 

-

84 raise DebputySubstitutionError(msg) from caused_by 

-

85 

-

86 def _apply_substitution( 

-

87 self, 

-

88 pattern: re.Pattern[str], 

-

89 value: str, 

-

90 definition_source: str, 

-

91 /, 

-

92 escape_glob_characters: bool = False, 

-

93 ) -> str: 

-

94 replacement = value 

-

95 offset = 0 

-

96 for match in pattern.finditer(value): 

-

97 prefix, matched_key, suffix = match.groups() 

-

98 replacement_value = self._replacement(matched_key, definition_source) 

-

99 self._mark_used(matched_key) 

-

100 if escape_glob_characters: 100 ↛ 101line 100 didn't jump to line 101, because the condition on line 100 was never true

-

101 replacement_value = glob_escape(replacement_value) 

-

102 s, e = match.span() 

-

103 s += offset 

-

104 e += offset 

-

105 replacement = replacement[:s] + replacement_value + replacement[e:] 

-

106 token_fluff_len = len(prefix) + len(suffix) 

-

107 offset += len(replacement_value) - len(matched_key) - token_fluff_len 

-

108 return replacement 

-

109 

-

110 

-

111class NullSubstitution(Substitution): 

-

112 def substitute( 

-

113 self, 

-

114 value: str, 

-

115 definition_source: str, 

-

116 /, 

-

117 escape_glob_characters: bool = False, 

-

118 ) -> str: 

-

119 return value 

-

120 

-

121 def with_extra_substitutions(self, **extra_substitutions: str) -> "Substitution": 

-

122 return self 

-

123 

-

124 def with_unresolvable_substitutions( 

-

125 self, *extra_substitutions: str 

-

126 ) -> "Substitution": 

-

127 return self 

-

128 

-

129 

-

130NULL_SUBSTITUTION = NullSubstitution() 

-

131del NullSubstitution 

-

132 

-

133 

-

134class SubstitutionImpl(Substitution): 

-

135 __slots__ = ( 

-

136 "_used", 

-

137 "_env", 

-

138 "_plugin_feature_set", 

-

139 "_static_variables", 

-

140 "_unresolvable_substitutions", 

-

141 "_dpkg_arch_table", 

-

142 "_parent", 

-

143 "_variable_context", 

-

144 ) 

-

145 

-

146 def __init__( 

-

147 self, 

-

148 /, 

-

149 plugin_feature_set: Optional["PluginProvidedFeatureSet"] = None, 

-

150 static_variables: Optional[Mapping[str, str]] = None, 

-

151 unresolvable_substitutions: FrozenSet[str] = frozenset(), 

-

152 dpkg_arch_table: Optional[DpkgArchitectureBuildProcessValuesTable] = None, 

-

153 environment: Optional[Mapping[str, str]] = None, 

-

154 parent: Optional["SubstitutionImpl"] = None, 

-

155 variable_context: Optional[VariableContext] = None, 

-

156 ) -> None: 

-

157 self._used: Set[str] = set() 

-

158 self._plugin_feature_set = plugin_feature_set 

-

159 self._static_variables = ( 

-

160 dict(static_variables) if static_variables is not None else None 

-

161 ) 

-

162 self._unresolvable_substitutions = unresolvable_substitutions 

-

163 self._dpkg_arch_table = ( 

-

164 dpkg_arch_table 

-

165 if dpkg_arch_table is not None 

-

166 else dpkg_architecture_table() 

-

167 ) 

-

168 self._env = environment if environment is not None else os.environ 

-

169 self._parent = parent 

-

170 if variable_context is not None: 

-

171 self._variable_context = variable_context 

-

172 elif self._parent is not None: 172 ↛ 175line 172 didn't jump to line 175, because the condition on line 172 was never false

-

173 self._variable_context = self._parent._variable_context 

-

174 else: 

-

175 raise ValueError( 

-

176 "variable_context is required either directly or via the parent" 

-

177 ) 

-

178 

-

179 def copy_for_subst_test( 

-

180 self, 

-

181 plugin_feature_set: "PluginProvidedFeatureSet", 

-

182 variable_context: VariableContext, 

-

183 *, 

-

184 extra_substitutions: Optional[Mapping[str, str]] = None, 

-

185 environment: Optional[Mapping[str, str]] = None, 

-

186 ) -> "Self": 

-

187 extra_substitutions_impl = ( 

-

188 dict(self._static_variables.items()) if self._static_variables else {} 

-

189 ) 

-

190 if extra_substitutions: 190 ↛ 191line 190 didn't jump to line 191, because the condition on line 190 was never true

-

191 extra_substitutions_impl.update(extra_substitutions) 

-

192 return self.__class__( 

-

193 plugin_feature_set=plugin_feature_set, 

-

194 variable_context=variable_context, 

-

195 static_variables=extra_substitutions_impl, 

-

196 unresolvable_substitutions=self._unresolvable_substitutions, 

-

197 dpkg_arch_table=self._dpkg_arch_table, 

-

198 environment=environment if environment is not None else {}, 

-

199 ) 

-

200 

-

201 def variable_state(self, key: str) -> VariableNameState: 

-

202 if key.startswith("DEB_"): 

-

203 if key in self._dpkg_arch_table: 

-

204 return VariableNameState.DEFINED 

-

205 return VariableNameState.RESERVED 

-

206 plugin_feature_set = self._plugin_feature_set 

-

207 if ( 

-

208 plugin_feature_set is not None 

-

209 and key in plugin_feature_set.manifest_variables 

-

210 ): 

-

211 return VariableNameState.DEFINED 

-

212 if key.startswith("env:"): 

-

213 k = key[4:] 

-

214 if k in self._env: 214 ↛ 215line 214 didn't jump to line 215, because the condition on line 214 was never true

-

215 return VariableNameState.DEFINED 

-

216 return VariableNameState.RESERVED 

-

217 if self._static_variables is not None and key in self._static_variables: 217 ↛ 218line 217 didn't jump to line 218, because the condition on line 217 was never true

-

218 return VariableNameState.DEFINED 

-

219 if key in self._unresolvable_substitutions: 

-

220 return VariableNameState.RESERVED 

-

221 if self._parent is not None: 

-

222 return self._parent.variable_state(key) 

-

223 return VariableNameState.UNDEFINED 

-

224 

-

225 def is_used(self, variable_name: str) -> bool: 

-

226 if variable_name in self._used: 

-

227 return True 

-

228 parent = self._parent 

-

229 if parent is not None: 

-

230 return parent.is_used(variable_name) 

-

231 return False 

-

232 

-

233 def _mark_used(self, variable_name: str) -> None: 

-

234 p = self._parent 

-

235 while p: 

-

236 # Find the parent that has the variable if possible. This ensures that is_used works 

-

237 # correctly. 

-

238 if p._static_variables is not None and variable_name in p._static_variables: 

-

239 p._mark_used(variable_name) 

-

240 break 

-

241 plugin_feature_set = p._plugin_feature_set 

-

242 if ( 242 ↛ 249line 242 didn't jump to line 249

-

243 plugin_feature_set is not None 

-

244 and variable_name in plugin_feature_set.manifest_variables 

-

245 and not plugin_feature_set.manifest_variables[ 

-

246 variable_name 

-

247 ].is_documentation_placeholder 

-

248 ): 

-

249 p._mark_used(variable_name) 

-

250 break 

-

251 p = p._parent 

-

252 self._used.add(variable_name) 

-

253 

-

254 def _replacement(self, key: str, definition_source: str) -> str: 

-

255 if key.startswith("DEB_") and key in self._dpkg_arch_table: 

-

256 return self._dpkg_arch_table[key] 

-

257 if key.startswith("env:"): 257 ↛ 258line 257 didn't jump to line 258, because the condition on line 257 was never true

-

258 k = key[4:] 

-

259 if k in self._env: 

-

260 return self._env[k] 

-

261 self._error( 

-

262 f'The environment does not contain the variable "{key}" ' 

-

263 f"(error occurred while trying to process {definition_source})" 

-

264 ) 

-

265 

-

266 # The order between extra_substitution and plugin_feature_set is leveraged by 

-

267 # the tests to implement mocking variables. If the order needs tweaking, 

-

268 # you will need a custom resolver for the tests to support mocking. 

-

269 static_variables = self._static_variables 

-

270 if static_variables and key in static_variables: 

-

271 return static_variables[key] 

-

272 plugin_feature_set = self._plugin_feature_set 

-

273 if plugin_feature_set is not None: 

-

274 provided_var = plugin_feature_set.manifest_variables.get(key) 

-

275 if ( 

-

276 provided_var is not None 

-

277 and not provided_var.is_documentation_placeholder 

-

278 ): 

-

279 v = provided_var.resolve(self._variable_context) 

-

280 # cache it for next time. 

-

281 if static_variables is None: 

-

282 static_variables = {} 

-

283 self._static_variables = static_variables 

-

284 static_variables[key] = v 

-

285 return v 

-

286 if key in self._unresolvable_substitutions: 

-

287 self._error( 

-

288 "The variable {{" + key + "}}" 

-

289 f" is not available while processing {definition_source}." 

-

290 ) 

-

291 parent = self._parent 

-

292 if parent is not None: 

-

293 return parent._replacement(key, definition_source) 

-

294 self._error( 

-

295 "Cannot resolve {{" + key + "}}: it is not a known key." 

-

296 f" The error occurred while trying to process {definition_source}" 

-

297 ) 

-

298 

-

299 def with_extra_substitutions(self, **extra_substitutions: str) -> "Substitution": 

-

300 if not extra_substitutions: 300 ↛ 301line 300 didn't jump to line 301, because the condition on line 300 was never true

-

301 return self 

-

302 return SubstitutionImpl( 

-

303 dpkg_arch_table=self._dpkg_arch_table, 

-

304 environment=self._env, 

-

305 static_variables=extra_substitutions, 

-

306 parent=self, 

-

307 ) 

-

308 

-

309 def with_unresolvable_substitutions( 

-

310 self, 

-

311 *extra_substitutions: str, 

-

312 ) -> "Substitution": 

-

313 if not extra_substitutions: 

-

314 return self 

-

315 return SubstitutionImpl( 

-

316 dpkg_arch_table=self._dpkg_arch_table, 

-

317 environment=self._env, 

-

318 unresolvable_substitutions=frozenset(extra_substitutions), 

-

319 parent=self, 

-

320 ) 

-

321 

-

322 def substitute( 

-

323 self, 

-

324 value: str, 

-

325 definition_source: str, 

-

326 /, 

-

327 escape_glob_characters: bool = False, 

-

328 ) -> str: 

-

329 if "{{" not in value: 

-

330 return value 

-

331 return self._apply_substitution( 

-

332 SUBST_VAR_RE, 

-

333 value, 

-

334 definition_source, 

-

335 escape_glob_characters=escape_glob_characters, 

-

336 ) 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_transformation_rules_py.html b/coverage-report/d_267b6307937f1878_transformation_rules_py.html deleted file mode 100644 index a6e4f58..0000000 --- a/coverage-report/d_267b6307937f1878_transformation_rules_py.html +++ /dev/null @@ -1,695 +0,0 @@ - - - - - Coverage for src/debputy/transformation_rules.py: 73% - - - - - -
-
-

- Coverage for src/debputy/transformation_rules.py: - 73% -

- -

- 271 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2import os 

-

3from typing import ( 

-

4 NoReturn, 

-

5 Optional, 

-

6 Callable, 

-

7 Sequence, 

-

8 Tuple, 

-

9 List, 

-

10 Literal, 

-

11 Dict, 

-

12 TypeVar, 

-

13 cast, 

-

14) 

-

15 

-

16from debputy.exceptions import ( 

-

17 DebputyRuntimeError, 

-

18 PureVirtualPathError, 

-

19 TestPathWithNonExistentFSPathError, 

-

20) 

-

21from debputy.filesystem_scan import FSPath 

-

22from debputy.interpreter import ( 

-

23 extract_shebang_interpreter_from_file, 

-

24) 

-

25from debputy.manifest_conditions import ConditionContext, ManifestCondition 

-

26from debputy.manifest_parser.base_types import ( 

-

27 FileSystemMode, 

-

28 StaticFileSystemOwner, 

-

29 StaticFileSystemGroup, 

-

30 DebputyDispatchableType, 

-

31) 

-

32from debputy.manifest_parser.util import AttributePath 

-

33from debputy.path_matcher import MatchRule 

-

34from debputy.plugin.api import VirtualPath 

-

35from debputy.plugin.debputy.types import DebputyCapability 

-

36from debputy.util import _warn 

-

37 

-

38 

-

39class TransformationRuntimeError(DebputyRuntimeError): 

-

40 pass 

-

41 

-

42 

-

43CreateSymlinkReplacementRule = Literal[ 

-

44 "error-if-exists", 

-

45 "error-if-directory", 

-

46 "abort-on-non-empty-directory", 

-

47 "discard-existing", 

-

48] 

-

49 

-

50 

-

51VP = TypeVar("VP", bound=VirtualPath) 

-

52 

-

53 

-

54@dataclasses.dataclass(frozen=True, slots=True) 

-

55class PreProvidedExclusion: 

-

56 tag: str 

-

57 description: str 

-

58 pruner: Callable[[FSPath], None] 

-

59 

-

60 

-

61class TransformationRule(DebputyDispatchableType): 

-

62 __slots__ = () 

-

63 

-

64 def transform_file_system( 

-

65 self, fs_root: FSPath, condition_context: ConditionContext 

-

66 ) -> None: 

-

67 raise NotImplementedError 

-

68 

-

69 def _evaluate_condition( 

-

70 self, 

-

71 condition: Optional[ManifestCondition], 

-

72 condition_context: ConditionContext, 

-

73 result_if_condition_is_missing: bool = True, 

-

74 ) -> bool: 

-

75 if condition is None: 75 ↛ 77line 75 didn't jump to line 77, because the condition on line 75 was never false

-

76 return result_if_condition_is_missing 

-

77 return condition.evaluate(condition_context) 

-

78 

-

79 def _error( 

-

80 self, 

-

81 msg: str, 

-

82 *, 

-

83 caused_by: Optional[BaseException] = None, 

-

84 ) -> NoReturn: 

-

85 raise TransformationRuntimeError(msg) from caused_by 

-

86 

-

87 def _match_rule_had_no_matches( 

-

88 self, match_rule: MatchRule, definition_source: str 

-

89 ) -> NoReturn: 

-

90 self._error( 

-

91 f'The match rule "{match_rule.describe_match_short()}" in transformation "{definition_source}" did' 

-

92 " not match any paths. Either the definition is redundant (and can be omitted) or the match rule is" 

-

93 " incorrect." 

-

94 ) 

-

95 

-

96 def _fs_path_as_dir( 

-

97 self, 

-

98 path: VP, 

-

99 definition_source: str, 

-

100 ) -> VP: 

-

101 if path.is_dir: 101 ↛ 103line 101 didn't jump to line 103, because the condition on line 101 was never false

-

102 return path 

-

103 path_type = "file" if path.is_file else 'symlink/"special file system object"' 

-

104 self._error( 

-

105 f"The path {path.path} was expected to be a directory (or non-existing) due to" 

-

106 f" {definition_source}. However that path existed and is a {path_type}." 

-

107 f" You may need a `remove: {path.path}` prior to {definition_source} to" 

-

108 " to make this transformation succeed." 

-

109 ) 

-

110 

-

111 def _ensure_is_directory( 

-

112 self, 

-

113 fs_root: FSPath, 

-

114 path_to_directory: str, 

-

115 definition_source: str, 

-

116 ) -> FSPath: 

-

117 current, missing_parts = fs_root.attempt_lookup(path_to_directory) 

-

118 current = self._fs_path_as_dir(cast("FSPath", current), definition_source) 

-

119 if missing_parts: 

-

120 return current.mkdirs("/".join(missing_parts)) 

-

121 return current 

-

122 

-

123 

-

124class RemoveTransformationRule(TransformationRule): 

-

125 __slots__ = ( 

-

126 "_match_rules", 

-

127 "_keep_empty_parent_dirs", 

-

128 "_definition_source", 

-

129 ) 

-

130 

-

131 def __init__( 

-

132 self, 

-

133 match_rules: Sequence[MatchRule], 

-

134 keep_empty_parent_dirs: bool, 

-

135 definition_source: AttributePath, 

-

136 ) -> None: 

-

137 self._match_rules = match_rules 

-

138 self._keep_empty_parent_dirs = keep_empty_parent_dirs 

-

139 self._definition_source = definition_source.path 

-

140 

-

141 def transform_file_system( 

-

142 self, 

-

143 fs_root: FSPath, 

-

144 condition_context: ConditionContext, 

-

145 ) -> None: 

-

146 matched_any = False 

-

147 for match_rule in self._match_rules: 

-

148 # Fully resolve the matches to avoid RuntimeError caused by collection changing size as a 

-

149 # consequence of the removal: https://salsa.debian.org/debian/debputy/-/issues/52 

-

150 matches = list(match_rule.finditer(fs_root)) 

-

151 for m in matches: 

-

152 matched_any = True 

-

153 parent = m.parent_dir 

-

154 if parent is None: 154 ↛ 155line 154 didn't jump to line 155, because the condition on line 154 was never true

-

155 self._error( 

-

156 f"Cannot remove the root directory (triggered by {self._definition_source})" 

-

157 ) 

-

158 m.unlink(recursive=True) 

-

159 if not self._keep_empty_parent_dirs: 

-

160 parent.prune_if_empty_dir() 

-

161 # FIXME: `rm` should probably be forgiving or at least support a condition to avoid failures 

-

162 if not matched_any: 

-

163 self._match_rule_had_no_matches(match_rule, self._definition_source) 

-

164 

-

165 

-

166class MoveTransformationRule(TransformationRule): 

-

167 __slots__ = ( 

-

168 "_match_rule", 

-

169 "_dest_path", 

-

170 "_dest_is_dir", 

-

171 "_definition_source", 

-

172 "_condition", 

-

173 ) 

-

174 

-

175 def __init__( 

-

176 self, 

-

177 match_rule: MatchRule, 

-

178 dest_path: str, 

-

179 dest_is_dir: bool, 

-

180 definition_source: AttributePath, 

-

181 condition: Optional[ManifestCondition], 

-

182 ) -> None: 

-

183 self._match_rule = match_rule 

-

184 self._dest_path = dest_path 

-

185 self._dest_is_dir = dest_is_dir 

-

186 self._definition_source = definition_source.path 

-

187 self._condition = condition 

-

188 

-

189 def transform_file_system( 

-

190 self, fs_root: FSPath, condition_context: ConditionContext 

-

191 ) -> None: 

-

192 if not self._evaluate_condition(self._condition, condition_context): 192 ↛ 193line 192 didn't jump to line 193, because the condition on line 192 was never true

-

193 return 

-

194 # Eager resolve is necessary to avoid "self-recursive" matching in special cases (e.g., **/*.la) 

-

195 matches = list(self._match_rule.finditer(fs_root)) 

-

196 if not matches: 

-

197 self._match_rule_had_no_matches(self._match_rule, self._definition_source) 

-

198 

-

199 target_dir: Optional[VirtualPath] 

-

200 if self._dest_is_dir: 200 ↛ 201line 200 didn't jump to line 201, because the condition on line 200 was never true

-

201 target_dir = self._ensure_is_directory( 

-

202 fs_root, 

-

203 self._dest_path, 

-

204 self._definition_source, 

-

205 ) 

-

206 else: 

-

207 dir_part, basename = os.path.split(self._dest_path) 

-

208 target_parent_dir = self._ensure_is_directory( 

-

209 fs_root, 

-

210 dir_part, 

-

211 self._definition_source, 

-

212 ) 

-

213 target_dir = target_parent_dir.get(basename) 

-

214 

-

215 if target_dir is None or not target_dir.is_dir: 215 ↛ 235line 215 didn't jump to line 235, because the condition on line 215 was never false

-

216 if len(matches) > 1: 216 ↛ 217line 216 didn't jump to line 217, because the condition on line 216 was never true

-

217 self._error( 

-

218 f"Could not rename {self._match_rule.describe_match_short()} to {self._dest_path}" 

-

219 f" (from: {self._definition_source}). Multiple paths matched the pattern and the" 

-

220 " destination was not a directory. Either correct the pattern to only match only source" 

-

221 " OR define the destination to be a directory (E.g., add a trailing slash - example:" 

-

222 f' "{self._dest_path}/")' 

-

223 ) 

-

224 p = matches[0] 

-

225 if p.path == self._dest_path: 225 ↛ 226line 225 didn't jump to line 226, because the condition on line 225 was never true

-

226 self._error( 

-

227 f"Error in {self._definition_source}, the source" 

-

228 f" {self._match_rule.describe_match_short()} matched {self._dest_path} making the" 

-

229 " rename redundant!?" 

-

230 ) 

-

231 p.parent_dir = target_parent_dir 

-

232 p.name = basename 

-

233 return 

-

234 

-

235 assert target_dir is not None and target_dir.is_dir 

-

236 basenames: Dict[str, VirtualPath] = dict() 

-

237 target_dir_path = target_dir.path 

-

238 

-

239 for m in matches: 

-

240 if m.path == target_dir_path: 

-

241 self._error( 

-

242 f"Error in {self._definition_source}, the source {self._match_rule.describe_match_short()}" 

-

243 f"matched {self._dest_path} (among other), but it is not possible to copy a directory into" 

-

244 " itself" 

-

245 ) 

-

246 if m.name in basenames: 

-

247 alt_path = basenames[m.name] 

-

248 # We document "two *distinct*" paths. However, as the glob matches are written, it should not be 

-

249 # possible for a *single* glob to match the same path twice. 

-

250 assert alt_path is not m 

-

251 self._error( 

-

252 f"Could not rename {self._match_rule.describe_match_short()} to {self._dest_path}" 

-

253 f" (from: {self._definition_source}). Multiple paths matched the pattern had the" 

-

254 f' same basename "{m.name}" ("{m.path}" vs. "{alt_path.path}"). Please correct the' 

-

255 f" pattern, so it only matches one path with that basename to avoid this conflict." 

-

256 ) 

-

257 existing = m.get(m.name) 

-

258 if existing and existing.is_dir: 

-

259 self._error( 

-

260 f"Could not rename {self._match_rule.describe_match_short()} to {self._dest_path}" 

-

261 f" (from: {self._definition_source}). The pattern matched {m.path} which would replace" 

-

262 f" the existing directory {existing.path}. If this replacement is intentional, then please" 

-

263 f' remove "{existing.path}" first (e.g., via `- remove: "{existing.path}"`)' 

-

264 ) 

-

265 basenames[m.name] = m 

-

266 m.parent_dir = target_dir 

-

267 

-

268 

-

269class CreateSymlinkPathTransformationRule(TransformationRule): 

-

270 __slots__ = ( 

-

271 "_link_dest", 

-

272 "_link_target", 

-

273 "_replacement_rule", 

-

274 "_definition_source", 

-

275 "_condition", 

-

276 ) 

-

277 

-

278 def __init__( 

-

279 self, 

-

280 link_target: str, 

-

281 link_dest: str, 

-

282 replacement_rule: CreateSymlinkReplacementRule, 

-

283 definition_source: AttributePath, 

-

284 condition: Optional[ManifestCondition], 

-

285 ) -> None: 

-

286 self._link_target = link_target 

-

287 self._link_dest = link_dest 

-

288 self._replacement_rule = replacement_rule 

-

289 self._definition_source = definition_source.path 

-

290 self._condition = condition 

-

291 

-

292 def transform_file_system( 

-

293 self, 

-

294 fs_root: FSPath, 

-

295 condition_context: ConditionContext, 

-

296 ) -> None: 

-

297 if not self._evaluate_condition(self._condition, condition_context): 297 ↛ 298line 297 didn't jump to line 298, because the condition on line 297 was never true

-

298 return 

-

299 dir_path_part, link_name = os.path.split(self._link_dest) 

-

300 dir_path = self._ensure_is_directory( 

-

301 fs_root, 

-

302 dir_path_part, 

-

303 self._definition_source, 

-

304 ) 

-

305 existing = dir_path.get(link_name) 

-

306 if existing: 

-

307 self._handle_existing_path(existing) 

-

308 dir_path.add_symlink(link_name, self._link_target) 

-

309 

-

310 def _handle_existing_path(self, existing: VirtualPath) -> None: 

-

311 replacement_rule = self._replacement_rule 

-

312 if replacement_rule == "abort-on-non-empty-directory": 

-

313 unlink = not existing.is_dir or not any(existing.iterdir) 

-

314 reason = "the path is a non-empty directory" 

-

315 elif replacement_rule == "discard-existing": 315 ↛ 316line 315 didn't jump to line 316, because the condition on line 315 was never true

-

316 unlink = True 

-

317 reason = "<<internal error: you should not see an error with this message>>" 

-

318 elif replacement_rule == "error-if-directory": 

-

319 unlink = not existing.is_dir 

-

320 reason = "the path is a directory" 

-

321 else: 

-

322 assert replacement_rule == "error-if-exists" 

-

323 unlink = False 

-

324 reason = "the path exists" 

-

325 

-

326 if unlink: 

-

327 existing.unlink(recursive=True) 

-

328 else: 

-

329 self._error( 

-

330 f"Refusing to replace {existing.path} with a symlink; {reason} and" 

-

331 f" the active replacement-rule was {self._replacement_rule}. You can" 

-

332 f' set the replacement-rule to "discard-existing", if you are not interested' 

-

333 f" in the contents of {existing.path}. This error was triggered by {self._definition_source}." 

-

334 ) 

-

335 

-

336 

-

337class CreateDirectoryTransformationRule(TransformationRule): 

-

338 __slots__ = ( 

-

339 "_directories", 

-

340 "_owner", 

-

341 "_group", 

-

342 "_mode", 

-

343 "_definition_source", 

-

344 "_condition", 

-

345 ) 

-

346 

-

347 def __init__( 

-

348 self, 

-

349 directories: Sequence[str], 

-

350 owner: Optional[StaticFileSystemOwner], 

-

351 group: Optional[StaticFileSystemGroup], 

-

352 mode: Optional[FileSystemMode], 

-

353 definition_source: str, 

-

354 condition: Optional[ManifestCondition], 

-

355 ) -> None: 

-

356 super().__init__() 

-

357 self._directories = directories 

-

358 self._owner = owner 

-

359 self._group = group 

-

360 self._mode = mode 

-

361 self._definition_source = definition_source 

-

362 self._condition = condition 

-

363 

-

364 def transform_file_system( 

-

365 self, 

-

366 fs_root: FSPath, 

-

367 condition_context: ConditionContext, 

-

368 ) -> None: 

-

369 if not self._evaluate_condition(self._condition, condition_context): 369 ↛ 370line 369 didn't jump to line 370, because the condition on line 369 was never true

-

370 return 

-

371 owner = self._owner 

-

372 group = self._group 

-

373 mode = self._mode 

-

374 for directory in self._directories: 

-

375 dir_path = self._ensure_is_directory( 

-

376 fs_root, 

-

377 directory, 

-

378 self._definition_source, 

-

379 ) 

-

380 

-

381 if mode is not None: 

-

382 try: 

-

383 desired_mode = mode.compute_mode(dir_path.mode, dir_path.is_dir) 

-

384 except ValueError as e: 

-

385 self._error( 

-

386 f"Could not compute desired mode for {dir_path.path} as" 

-

387 f" requested in {self._definition_source}: {e.args[0]}", 

-

388 caused_by=e, 

-

389 ) 

-

390 dir_path.mode = desired_mode 

-

391 dir_path.chown(owner, group) 

-

392 

-

393 

-

394def _apply_owner_and_mode( 

-

395 path: VirtualPath, 

-

396 owner: Optional[StaticFileSystemOwner], 

-

397 group: Optional[StaticFileSystemGroup], 

-

398 mode: Optional[FileSystemMode], 

-

399 capabilities: Optional[str], 

-

400 capability_mode: Optional[FileSystemMode], 

-

401 definition_source: str, 

-

402) -> None: 

-

403 if owner is not None or group is not None: 403 ↛ 405line 403 didn't jump to line 405, because the condition on line 403 was never false

-

404 path.chown(owner, group) 

-

405 if mode is not None: 405 ↛ 415line 405 didn't jump to line 415, because the condition on line 405 was never false

-

406 try: 

-

407 desired_mode = mode.compute_mode(path.mode, path.is_dir) 

-

408 except ValueError as e: 

-

409 raise TransformationRuntimeError( 

-

410 f"Could not compute desired mode for {path.path} as" 

-

411 f" requested in {definition_source}: {e.args[0]}" 

-

412 ) from e 

-

413 path.mode = desired_mode 

-

414 

-

415 if path.is_file and capabilities is not None: 415 ↛ 416line 415 didn't jump to line 416, because the condition on line 415 was never true

-

416 cap_ref = path.metadata(DebputyCapability) 

-

417 cap_value = cap_ref.value 

-

418 if cap_value is not None: 

-

419 _warn( 

-

420 f"Replacing the capabilities set on path {path.path} from {cap_value.definition_source} due" 

-

421 f" to {definition_source}." 

-

422 ) 

-

423 assert capability_mode is not None 

-

424 cap_ref.value = DebputyCapability( 

-

425 capabilities, 

-

426 capability_mode, 

-

427 definition_source, 

-

428 ) 

-

429 

-

430 

-

431class PathMetadataTransformationRule(TransformationRule): 

-

432 __slots__ = ( 

-

433 "_match_rules", 

-

434 "_owner", 

-

435 "_group", 

-

436 "_mode", 

-

437 "_capabilities", 

-

438 "_capability_mode", 

-

439 "_recursive", 

-

440 "_definition_source", 

-

441 "_condition", 

-

442 ) 

-

443 

-

444 def __init__( 

-

445 self, 

-

446 match_rules: Sequence[MatchRule], 

-

447 owner: Optional[StaticFileSystemOwner], 

-

448 group: Optional[StaticFileSystemGroup], 

-

449 mode: Optional[FileSystemMode], 

-

450 recursive: bool, 

-

451 capabilities: Optional[str], 

-

452 capability_mode: Optional[FileSystemMode], 

-

453 definition_source: str, 

-

454 condition: Optional[ManifestCondition], 

-

455 ) -> None: 

-

456 super().__init__() 

-

457 self._match_rules = match_rules 

-

458 self._owner = owner 

-

459 self._group = group 

-

460 self._mode = mode 

-

461 self._capabilities = capabilities 

-

462 self._capability_mode = capability_mode 

-

463 self._recursive = recursive 

-

464 self._definition_source = definition_source 

-

465 self._condition = condition 

-

466 if self._capabilities is None and self._capability_mode is not None: 466 ↛ 467line 466 didn't jump to line 467, because the condition on line 466 was never true

-

467 raise ValueError("capability_mode without capabilities") 

-

468 if self._capabilities is not None and self._capability_mode is None: 468 ↛ 469line 468 didn't jump to line 469, because the condition on line 468 was never true

-

469 raise ValueError("capabilities without capability_mode") 

-

470 

-

471 def transform_file_system( 

-

472 self, 

-

473 fs_root: FSPath, 

-

474 condition_context: ConditionContext, 

-

475 ) -> None: 

-

476 if not self._evaluate_condition(self._condition, condition_context): 476 ↛ 477line 476 didn't jump to line 477, because the condition on line 476 was never true

-

477 return 

-

478 owner = self._owner 

-

479 group = self._group 

-

480 mode = self._mode 

-

481 capabilities = self._capabilities 

-

482 capability_mode = self._capability_mode 

-

483 definition_source = self._definition_source 

-

484 d: Optional[List[FSPath]] = [] if self._recursive else None 

-

485 needs_file_match = False 

-

486 if self._owner is not None or self._group is not None or self._mode is not None: 486 ↛ 489line 486 didn't jump to line 489, because the condition on line 486 was never false

-

487 needs_file_match = True 

-

488 

-

489 for match_rule in self._match_rules: 

-

490 match_ok = False 

-

491 saw_symlink = False 

-

492 saw_directory = False 

-

493 

-

494 for path in match_rule.finditer(fs_root): 

-

495 if path.is_symlink: 495 ↛ 496line 495 didn't jump to line 496, because the condition on line 495 was never true

-

496 saw_symlink = True 

-

497 continue 

-

498 if path.is_file or not needs_file_match: 498 ↛ 500line 498 didn't jump to line 500, because the condition on line 498 was never false

-

499 match_ok = True 

-

500 if path.is_dir: 500 ↛ 501line 500 didn't jump to line 501, because the condition on line 500 was never true

-

501 saw_directory = True 

-

502 if not match_ok and needs_file_match and self._recursive: 

-

503 match_ok = any(p.is_file for p in path.all_paths()) 

-

504 _apply_owner_and_mode( 

-

505 path, 

-

506 owner, 

-

507 group, 

-

508 mode, 

-

509 capabilities, 

-

510 capability_mode, 

-

511 definition_source, 

-

512 ) 

-

513 if path.is_dir and d is not None: 513 ↛ 514line 513 didn't jump to line 514, because the condition on line 513 was never true

-

514 d.append(path) 

-

515 

-

516 if not match_ok: 516 ↛ 517line 516 didn't jump to line 517, because the condition on line 516 was never true

-

517 if needs_file_match and (saw_directory or saw_symlink): 

-

518 _warn( 

-

519 f"The match rule {match_rule.describe_match_short()} (from {self._definition_source})" 

-

520 " did not match any files, but given the attributes it can only apply to files." 

-

521 ) 

-

522 elif saw_symlink: 

-

523 _warn( 

-

524 f"The match rule {match_rule.describe_match_short()} (from {self._definition_source})" 

-

525 ' matched symlinks, but "path-metadata" cannot apply to symlinks.' 

-

526 ) 

-

527 self._match_rule_had_no_matches(match_rule, self._definition_source) 

-

528 

-

529 if not d: 529 ↛ 531line 529 didn't jump to line 531, because the condition on line 529 was never false

-

530 return 

-

531 for recurse_dir in d: 

-

532 for path in recurse_dir.all_paths(): 

-

533 if path.is_symlink: 

-

534 continue 

-

535 _apply_owner_and_mode( 

-

536 path, 

-

537 owner, 

-

538 group, 

-

539 mode, 

-

540 capabilities, 

-

541 capability_mode, 

-

542 definition_source, 

-

543 ) 

-

544 

-

545 

-

546class ModeNormalizationTransformationRule(TransformationRule): 

-

547 __slots__ = ("_normalizations",) 

-

548 

-

549 def __init__( 

-

550 self, 

-

551 normalizations: Sequence[Tuple[MatchRule, FileSystemMode]], 

-

552 ) -> None: 

-

553 self._normalizations = normalizations 

-

554 

-

555 def transform_file_system( 

-

556 self, 

-

557 fs_root: FSPath, 

-

558 condition_context: ConditionContext, 

-

559 ) -> None: 

-

560 seen = set() 

-

561 for match_rule, fs_mode in self._normalizations: 

-

562 for path in match_rule.finditer( 

-

563 fs_root, ignore_paths=lambda p: p.path in seen 

-

564 ): 

-

565 if path.is_symlink or path.path in seen: 

-

566 continue 

-

567 seen.add(path.path) 

-

568 try: 

-

569 desired_mode = fs_mode.compute_mode(path.mode, path.is_dir) 

-

570 except ValueError as e: 

-

571 raise AssertionError( 

-

572 "Error while applying built-in mode normalization rule" 

-

573 ) from e 

-

574 path.mode = desired_mode 

-

575 

-

576 

-

577class NormalizeShebangLineTransformation(TransformationRule): 

-

578 def transform_file_system( 

-

579 self, 

-

580 fs_root: VirtualPath, 

-

581 condition_context: ConditionContext, 

-

582 ) -> None: 

-

583 for path in fs_root.all_paths(): 

-

584 if not path.is_file: 

-

585 continue 

-

586 try: 

-

587 with path.open(byte_io=True, buffering=4096) as fd: 

-

588 interpreter = extract_shebang_interpreter_from_file(fd) 

-

589 except (PureVirtualPathError, TestPathWithNonExistentFSPathError): 

-

590 # Do not make tests unnecessarily complex to write 

-

591 continue 

-

592 if interpreter is None: 

-

593 continue 

-

594 

-

595 if interpreter.fixup_needed: 

-

596 interpreter.replace_shebang_line(path) 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_types_py.html b/coverage-report/d_267b6307937f1878_types_py.html deleted file mode 100644 index 20b664a..0000000 --- a/coverage-report/d_267b6307937f1878_types_py.html +++ /dev/null @@ -1,108 +0,0 @@ - - - - - Coverage for src/debputy/types.py: 100% - - - - - -
-
-

- Coverage for src/debputy/types.py: - 100% -

- -

- 3 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from typing import TypeVar, TYPE_CHECKING 

-

2 

-

3if TYPE_CHECKING: 

-

4 from debputy.plugin.api import VirtualPath 

-

5 from debputy.filesystem_scan import FSPath 

-

6 

-

7 

-

8VP = TypeVar("VP", "VirtualPath", "FSPath") 

-

9S = TypeVar("S", str, bytes) 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_util_py.html b/coverage-report/d_267b6307937f1878_util_py.html deleted file mode 100644 index 5381dcb..0000000 --- a/coverage-report/d_267b6307937f1878_util_py.html +++ /dev/null @@ -1,903 +0,0 @@ - - - - - Coverage for src/debputy/util.py: 65% - - - - - -
-
-

- Coverage for src/debputy/util.py: - 65% -

- -

- 426 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import argparse 

-

2import collections 

-

3import functools 

-

4import glob 

-

5import logging 

-

6import os 

-

7import re 

-

8import shutil 

-

9import subprocess 

-

10import sys 

-

11import time 

-

12from itertools import zip_longest 

-

13from pathlib import Path 

-

14from typing import ( 

-

15 NoReturn, 

-

16 TYPE_CHECKING, 

-

17 Union, 

-

18 Set, 

-

19 FrozenSet, 

-

20 Optional, 

-

21 TypeVar, 

-

22 Dict, 

-

23 Iterator, 

-

24 Iterable, 

-

25 Literal, 

-

26 Tuple, 

-

27 Sequence, 

-

28 List, 

-

29 Mapping, 

-

30 Any, 

-

31) 

-

32 

-

33from debian.deb822 import Deb822 

-

34 

-

35from debputy.architecture_support import DpkgArchitectureBuildProcessValuesTable 

-

36from debputy.exceptions import DebputySubstitutionError 

-

37 

-

38if TYPE_CHECKING: 

-

39 from debputy.packages import BinaryPackage 

-

40 from debputy.substitution import Substitution 

-

41 

-

42 

-

43T = TypeVar("T") 

-

44 

-

45 

-

46SLASH_PRUNE = re.compile("//+") 

-

47PKGNAME_REGEX = re.compile(r"[a-z0-9][-+.a-z0-9]+", re.ASCII) 

-

48PKGVERSION_REGEX = re.compile( 

-

49 r""" 

-

50 (?: \d+ : )? # Optional epoch 

-

51 \d[0-9A-Za-z.+:~]* # Upstream version (with no hyphens) 

-

52 (?: - [0-9A-Za-z.+:~]+ )* # Optional debian revision (+ upstreams versions with hyphens) 

-

53""", 

-

54 re.VERBOSE | re.ASCII, 

-

55) 

-

56DEFAULT_PACKAGE_TYPE = "deb" 

-

57DBGSYM_PACKAGE_TYPE = "deb" 

-

58UDEB_PACKAGE_TYPE = "udeb" 

-

59 

-

60POSTINST_DEFAULT_CONDITION = ( 

-

61 '[ "$1" = "configure" ]' 

-

62 ' || [ "$1" = "abort-upgrade" ]' 

-

63 ' || [ "$1" = "abort-deconfigure" ]' 

-

64 ' || [ "$1" = "abort-remove" ]' 

-

65) 

-

66 

-

67 

-

68_SPACE_RE = re.compile(r"\s") 

-

69_DOUBLE_ESCAPEES = re.compile(r'([\n`$"\\])') 

-

70_REGULAR_ESCAPEES = re.compile(r'([\s!"$()*+#;<>?@\[\]\\`|~])') 

-

71_PROFILE_GROUP_SPLIT = re.compile(r">\s+<") 

-

72_DEFAULT_LOGGER: Optional[logging.Logger] = None 

-

73_STDOUT_HANDLER: Optional[logging.StreamHandler] = None 

-

74_STDERR_HANDLER: Optional[logging.StreamHandler] = None 

-

75 

-

76 

-

77def assume_not_none(x: Optional[T]) -> T: 

-

78 if x is None: # pragma: no cover 

-

79 raise ValueError( 

-

80 'Internal error: None was given, but the receiver assumed "not None" here' 

-

81 ) 

-

82 return x 

-

83 

-

84 

-

85def _info(msg: str) -> None: 

-

86 global _DEFAULT_LOGGER 

-

87 logger = _DEFAULT_LOGGER 

-

88 if logger: 

-

89 logger.info(msg) 

-

90 # No fallback print for info 

-

91 

-

92 

-

93def _error(msg: str, *, prog: Optional[str] = None) -> "NoReturn": 

-

94 global _DEFAULT_LOGGER 

-

95 logger = _DEFAULT_LOGGER 

-

96 if logger: 

-

97 logger.error(msg) 

-

98 else: 

-

99 me = os.path.basename(sys.argv[0]) if prog is None else prog 

-

100 print( 

-

101 f"{me}: error: {msg}", 

-

102 file=sys.stderr, 

-

103 ) 

-

104 sys.exit(1) 

-

105 

-

106 

-

107def _warn(msg: str, *, prog: Optional[str] = None) -> None: 

-

108 global _DEFAULT_LOGGER 

-

109 logger = _DEFAULT_LOGGER 

-

110 if logger: 110 ↛ 111line 110 didn't jump to line 111, because the condition on line 110 was never true

-

111 logger.warning(msg) 

-

112 else: 

-

113 me = os.path.basename(sys.argv[0]) if prog is None else prog 

-

114 

-

115 print( 

-

116 f"{me}: warning: {msg}", 

-

117 file=sys.stderr, 

-

118 ) 

-

119 

-

120 

-

121class ColorizedArgumentParser(argparse.ArgumentParser): 

-

122 def error(self, message: str) -> NoReturn: 

-

123 self.print_usage(sys.stderr) 

-

124 _error(message, prog=self.prog) 

-

125 

-

126 

-

127def ensure_dir(path: str) -> None: 

-

128 if not os.path.isdir(path): 128 ↛ 129line 128 didn't jump to line 129, because the condition on line 128 was never true

-

129 os.makedirs(path, mode=0o755, exist_ok=True) 

-

130 

-

131 

-

132def _clean_path(orig_p: str) -> str: 

-

133 p = SLASH_PRUNE.sub("/", orig_p) 

-

134 if "." in p: 134 ↛ 147line 134 didn't jump to line 147, because the condition on line 134 was never false

-

135 path_base = p 

-

136 # We permit a single leading "./" because we add that when we normalize a path, and we want normalization 

-

137 # of a normalized path to be a no-op. 

-

138 if path_base.startswith("./"): 

-

139 path_base = path_base[2:] 

-

140 assert path_base 

-

141 for segment in path_base.split("/"): 

-

142 if segment in (".", ".."): 

-

143 raise ValueError( 

-

144 'Please provide paths that are normalized (i.e., no ".." or ".").' 

-

145 f' Offending input "{orig_p}"' 

-

146 ) 

-

147 return p 

-

148 

-

149 

-

150def _normalize_path(path: str, with_prefix: bool = True) -> str: 

-

151 path = path.strip("/") 

-

152 if not path or path == ".": 152 ↛ 153line 152 didn't jump to line 153, because the condition on line 152 was never true

-

153 return "." 

-

154 if "//" in path or "." in path: 

-

155 path = _clean_path(path) 

-

156 if with_prefix ^ path.startswith("./"): 

-

157 if with_prefix: 157 ↛ 160line 157 didn't jump to line 160, because the condition on line 157 was never false

-

158 path = "./" + path 

-

159 else: 

-

160 path = path[2:] 

-

161 return path 

-

162 

-

163 

-

164def _normalize_link_target(link_target: str) -> str: 

-

165 link_target = SLASH_PRUNE.sub("/", link_target.lstrip("/")) 

-

166 result: List[str] = [] 

-

167 for segment in link_target.split("/"): 

-

168 if segment in (".", ""): 

-

169 # Ignore these - the empty string is generally a trailing slash 

-

170 continue 

-

171 if segment == "..": 

-

172 # We ignore "root escape attempts" like the OS would (mapping /.. -> /) 

-

173 if result: 173 ↛ 167line 173 didn't jump to line 167, because the condition on line 173 was never false

-

174 result.pop() 

-

175 else: 

-

176 result.append(segment) 

-

177 return "/".join(result) 

-

178 

-

179 

-

180def _backslash_escape(m: re.Match[str]) -> str: 

-

181 return "\\" + m.group(0) 

-

182 

-

183 

-

184def _escape_shell_word(w: str) -> str: 

-

185 if _SPACE_RE.match(w): 185 ↛ 186line 185 didn't jump to line 186, because the condition on line 185 was never true

-

186 w = _DOUBLE_ESCAPEES.sub(_backslash_escape, w) 

-

187 return f'"{w}"' 

-

188 return _REGULAR_ESCAPEES.sub(_backslash_escape, w) 

-

189 

-

190 

-

191def escape_shell(*args: str) -> str: 

-

192 return " ".join(_escape_shell_word(w) for w in args) 

-

193 

-

194 

-

195def print_command(*args: str) -> None: 

-

196 print(f" {escape_shell(*args)}") 

-

197 

-

198 

-

199def debian_policy_normalize_symlink_target( 

-

200 link_path: str, 

-

201 link_target: str, 

-

202 normalize_link_path: bool = False, 

-

203) -> str: 

-

204 if normalize_link_path: 

-

205 link_path = _normalize_path(link_path) 

-

206 elif not link_path.startswith("./"): 206 ↛ 207line 206 didn't jump to line 207, because the condition on line 206 was never true

-

207 raise ValueError("Link part was not normalized") 

-

208 

-

209 link_path = link_path[2:] 

-

210 

-

211 if not link_target.startswith("/"): 

-

212 link_target = "/" + os.path.dirname(link_path) + "/" + link_target 

-

213 

-

214 link_path_parts = link_path.split("/") 

-

215 link_target_parts = [ 

-

216 s for s in _normalize_link_target(link_target).split("/") if s != "." 

-

217 ] 

-

218 

-

219 assert link_path_parts 

-

220 

-

221 if link_target_parts and link_path_parts[0] == link_target_parts[0]: 

-

222 # Per Debian Policy, must be relative 

-

223 

-

224 # First determine the length of the overlap 

-

225 common_segment_count = 1 

-

226 shortest_path_length = min(len(link_target_parts), len(link_path_parts)) 

-

227 while ( 

-

228 common_segment_count < shortest_path_length 

-

229 and link_target_parts[common_segment_count] 

-

230 == link_path_parts[common_segment_count] 

-

231 ): 

-

232 common_segment_count += 1 

-

233 

-

234 if common_segment_count == shortest_path_length and len( 

-

235 link_path_parts 

-

236 ) - 1 == len(link_target_parts): 

-

237 normalized_link_target = "." 

-

238 else: 

-

239 up_dir_count = len(link_path_parts) - 1 - common_segment_count 

-

240 normalized_link_target_parts = [] 

-

241 if up_dir_count: 

-

242 up_dir_part = "../" * up_dir_count 

-

243 # We overshoot with a single '/', so rstrip it away 

-

244 normalized_link_target_parts.append(up_dir_part.rstrip("/")) 

-

245 # Add the relevant down parts 

-

246 normalized_link_target_parts.extend( 

-

247 link_target_parts[common_segment_count:] 

-

248 ) 

-

249 

-

250 normalized_link_target = "/".join(normalized_link_target_parts) 

-

251 else: 

-

252 # Per Debian Policy, must be absolute 

-

253 normalized_link_target = "/" + "/".join(link_target_parts) 

-

254 

-

255 return normalized_link_target 

-

256 

-

257 

-

258def has_glob_magic(pattern: str) -> bool: 

-

259 return glob.has_magic(pattern) or "{" in pattern 

-

260 

-

261 

-

262def glob_escape(replacement_value: str) -> str: 

-

263 if not glob.has_magic(replacement_value) or "{" not in replacement_value: 

-

264 return replacement_value 

-

265 return ( 

-

266 replacement_value.replace("[", "[[]") 

-

267 .replace("]", "[]]") 

-

268 .replace("*", "[*]") 

-

269 .replace("?", "[?]") 

-

270 .replace("{", "[{]") 

-

271 .replace("}", "[}]") 

-

272 ) 

-

273 

-

274 

-

275# TODO: This logic should probably be moved to `python-debian` 

-

276def active_profiles_match( 

-

277 profiles_raw: str, 

-

278 active_build_profiles: Union[Set[str], FrozenSet[str]], 

-

279) -> bool: 

-

280 profiles_raw = profiles_raw.strip() 

-

281 if profiles_raw[0] != "<" or profiles_raw[-1] != ">" or profiles_raw == "<>": 281 ↛ 282line 281 didn't jump to line 282, because the condition on line 281 was never true

-

282 raise ValueError( 

-

283 'Invalid Build-Profiles: Must start start and end with "<" + ">" but cannot be a literal "<>"' 

-

284 ) 

-

285 profile_groups = _PROFILE_GROUP_SPLIT.split(profiles_raw[1:-1]) 

-

286 for profile_group_raw in profile_groups: 286 ↛ 302line 286 didn't jump to line 302, because the loop on line 286 didn't complete

-

287 should_process_package = True 

-

288 for profile_name in profile_group_raw.split(): 

-

289 negation = False 

-

290 if profile_name[0] == "!": 290 ↛ 294line 290 didn't jump to line 294, because the condition on line 290 was never false

-

291 negation = True 

-

292 profile_name = profile_name[1:] 

-

293 

-

294 matched_profile = profile_name in active_build_profiles 

-

295 if matched_profile == negation: 295 ↛ 296line 295 didn't jump to line 296, because the condition on line 295 was never true

-

296 should_process_package = False 

-

297 break 

-

298 

-

299 if should_process_package: 299 ↛ 286line 299 didn't jump to line 286, because the condition on line 299 was never false

-

300 return True 

-

301 

-

302 return False 

-

303 

-

304 

-

305def _parse_build_profiles(build_profiles_raw: str) -> FrozenSet[FrozenSet[str]]: 

-

306 profiles_raw = build_profiles_raw.strip() 

-

307 if profiles_raw[0] != "<" or profiles_raw[-1] != ">" or profiles_raw == "<>": 307 ↛ 308line 307 didn't jump to line 308, because the condition on line 307 was never true

-

308 raise ValueError( 

-

309 'Invalid Build-Profiles: Must start start and end with "<" + ">" but cannot be a literal "<>"' 

-

310 ) 

-

311 profile_groups = _PROFILE_GROUP_SPLIT.split(profiles_raw[1:-1]) 

-

312 return frozenset(frozenset(g.split()) for g in profile_groups) 

-

313 

-

314 

-

315def resolve_source_date_epoch( 

-

316 command_line_value: Optional[int], 

-

317 *, 

-

318 substitution: Optional["Substitution"] = None, 

-

319) -> int: 

-

320 mtime = command_line_value 

-

321 if mtime is None and "SOURCE_DATE_EPOCH" in os.environ: 

-

322 sde_raw = os.environ["SOURCE_DATE_EPOCH"] 

-

323 if sde_raw == "": 

-

324 _error("SOURCE_DATE_EPOCH is set but empty.") 

-

325 mtime = int(sde_raw) 

-

326 if mtime is None and substitution is not None: 

-

327 try: 

-

328 sde_raw = substitution.substitute( 

-

329 "{{SOURCE_DATE_EPOCH}}", 

-

330 "Internal resolution", 

-

331 ) 

-

332 mtime = int(sde_raw) 

-

333 except (DebputySubstitutionError, ValueError): 

-

334 pass 

-

335 if mtime is None: 

-

336 mtime = int(time.time()) 

-

337 os.environ["SOURCE_DATE_EPOCH"] = str(mtime) 

-

338 return mtime 

-

339 

-

340 

-

341def compute_output_filename(control_root_dir: str, is_udeb: bool) -> str: 

-

342 with open(os.path.join(control_root_dir, "control"), "rt") as fd: 

-

343 control_file = Deb822(fd) 

-

344 

-

345 package_name = control_file["Package"] 

-

346 package_version = control_file["Version"] 

-

347 package_architecture = control_file["Architecture"] 

-

348 extension = control_file.get("Package-Type") or "deb" 

-

349 if ":" in package_version: 

-

350 package_version = package_version.split(":", 1)[1] 

-

351 if is_udeb: 

-

352 extension = "udeb" 

-

353 

-

354 return f"{package_name}_{package_version}_{package_architecture}.{extension}" 

-

355 

-

356 

-

357_SCRATCH_DIR = None 

-

358_DH_INTEGRATION_MODE = False 

-

359 

-

360 

-

361def integrated_with_debhelper() -> None: 

-

362 global _DH_INTEGRATION_MODE 

-

363 _DH_INTEGRATION_MODE = True 

-

364 

-

365 

-

366def scratch_dir() -> str: 

-

367 global _SCRATCH_DIR 

-

368 if _SCRATCH_DIR is not None: 

-

369 return _SCRATCH_DIR 

-

370 debputy_scratch_dir = "debian/.debputy/scratch-dir" 

-

371 is_debputy_dir = True 

-

372 if os.path.isdir("debian/.debputy") and not _DH_INTEGRATION_MODE: 372 ↛ 374line 372 didn't jump to line 374, because the condition on line 372 was never false

-

373 _SCRATCH_DIR = debputy_scratch_dir 

-

374 elif os.path.isdir("debian/.debhelper") or _DH_INTEGRATION_MODE: 

-

375 _SCRATCH_DIR = "debian/.debhelper/_debputy/scratch-dir" 

-

376 is_debputy_dir = False 

-

377 else: 

-

378 _SCRATCH_DIR = debputy_scratch_dir 

-

379 ensure_dir(_SCRATCH_DIR) 

-

380 if is_debputy_dir: 380 ↛ 382line 380 didn't jump to line 382, because the condition on line 380 was never false

-

381 Path("debian/.debputy/.gitignore").write_text("*\n") 

-

382 return _SCRATCH_DIR 

-

383 

-

384 

-

385_RUNTIME_CONTAINER_DIR_KEY: Optional[str] = None 

-

386 

-

387 

-

388def generated_content_dir( 

-

389 *, 

-

390 package: Optional["BinaryPackage"] = None, 

-

391 subdir_key: Optional[str] = None, 

-

392) -> str: 

-

393 global _RUNTIME_CONTAINER_DIR_KEY 

-

394 container_dir = _RUNTIME_CONTAINER_DIR_KEY 

-

395 first_run = False 

-

396 

-

397 if container_dir is None: 

-

398 first_run = True 

-

399 container_dir = f"_pb-{os.getpid()}" 

-

400 _RUNTIME_CONTAINER_DIR_KEY = container_dir 

-

401 

-

402 directory = os.path.join(scratch_dir(), container_dir) 

-

403 

-

404 if first_run and os.path.isdir(directory): 404 ↛ 409line 404 didn't jump to line 409, because the condition on line 404 was never true

-

405 # In the unlikely case there is a re-run with exactly the same pid, `debputy` should not 

-

406 # see "stale" data. 

-

407 # TODO: Ideally, we would always clean up this directory on failure, but `atexit` is not 

-

408 # reliable enough for that and we do not have an obvious hook for it. 

-

409 shutil.rmtree(directory) 

-

410 

-

411 directory = os.path.join( 

-

412 directory, 

-

413 "generated-fs-content", 

-

414 f"pkg_{package.name}" if package else "no-package", 

-

415 ) 

-

416 if subdir_key is not None: 

-

417 directory = os.path.join(directory, subdir_key) 

-

418 

-

419 os.makedirs(directory, exist_ok=True) 

-

420 return directory 

-

421 

-

422 

-

423PerlIncDir = collections.namedtuple("PerlIncDir", ["vendorlib", "vendorarch"]) 

-

424PerlConfigData = collections.namedtuple("PerlConfigData", ["version", "debian_abi"]) 

-

425_PERL_MODULE_DIRS: Dict[str, PerlIncDir] = {} 

-

426 

-

427 

-

428@functools.lru_cache(1) 

-

429def _perl_config_data() -> PerlConfigData: 

-

430 d = ( 

-

431 subprocess.check_output( 

-

432 [ 

-

433 "perl", 

-

434 "-MConfig", 

-

435 "-e", 

-

436 'print "$Config{version}\n$Config{debian_abi}\n"', 

-

437 ] 

-

438 ) 

-

439 .decode("utf-8") 

-

440 .splitlines() 

-

441 ) 

-

442 return PerlConfigData(*d) 

-

443 

-

444 

-

445def _perl_version() -> str: 

-

446 return _perl_config_data().version 

-

447 

-

448 

-

449def perlxs_api_dependency() -> str: 

-

450 # dh_perl used the build version of perl for this, so we will too. Most of the perl cross logic 

-

451 # assumes that the major version of build variant of Perl is the same as the host variant of Perl. 

-

452 config = _perl_config_data() 

-

453 if config.debian_abi is not None and config.debian_abi != "": 

-

454 return f"perlapi-{config.debian_abi}" 

-

455 return f"perlapi-{config.version}" 

-

456 

-

457 

-

458def perl_module_dirs( 

-

459 dpkg_architecture_variables: DpkgArchitectureBuildProcessValuesTable, 

-

460 dctrl_bin: "BinaryPackage", 

-

461) -> PerlIncDir: 

-

462 global _PERL_MODULE_DIRS 

-

463 arch = ( 

-

464 dctrl_bin.resolved_architecture 

-

465 if dpkg_architecture_variables.is_cross_compiling 

-

466 else "_default_" 

-

467 ) 

-

468 module_dir = _PERL_MODULE_DIRS.get(arch) 

-

469 if module_dir is None: 

-

470 cmd = ["perl"] 

-

471 if dpkg_architecture_variables.is_cross_compiling: 471 ↛ 472line 471 didn't jump to line 472, because the condition on line 471 was never true

-

472 version = _perl_version() 

-

473 inc_dir = f"/usr/lib/{dctrl_bin.deb_multiarch}/perl/cross-config-{version}" 

-

474 # FIXME: This should not fallback to "build-arch" but on the other hand, we use the perl module dirs 

-

475 # for every package at the moment. So mandating correct perl dirs implies mandating perl-xs-dev in 

-

476 # cross builds... meh. 

-

477 if os.path.exists(os.path.join(inc_dir, "Config.pm")): 

-

478 cmd.append(f"-I{inc_dir}") 

-

479 cmd.extend( 

-

480 ["-MConfig", "-e", 'print "$Config{vendorlib}\n$Config{vendorarch}\n"'] 

-

481 ) 

-

482 output = subprocess.check_output(cmd).decode("utf-8").splitlines(keepends=False) 

-

483 if len(output) != 2: 483 ↛ 484line 483 didn't jump to line 484, because the condition on line 483 was never true

-

484 raise ValueError( 

-

485 "Internal error: Unable to determine the perl include directories:" 

-

486 f" Raw output from perl snippet: {output}" 

-

487 ) 

-

488 module_dir = PerlIncDir( 

-

489 vendorlib=_normalize_path(output[0]), 

-

490 vendorarch=_normalize_path(output[1]), 

-

491 ) 

-

492 _PERL_MODULE_DIRS[arch] = module_dir 

-

493 return module_dir 

-

494 

-

495 

-

496@functools.lru_cache(1) 

-

497def detect_fakeroot() -> bool: 

-

498 if os.getuid() != 0 or "LD_PRELOAD" not in os.environ: 

-

499 return False 

-

500 env = dict(os.environ) 

-

501 del env["LD_PRELOAD"] 

-

502 try: 

-

503 return subprocess.check_output(["id", "-u"], env=env).strip() != b"0" 

-

504 except subprocess.CalledProcessError: 

-

505 print( 

-

506 'Could not run "id -u" with LD_PRELOAD unset; assuming we are not run under fakeroot', 

-

507 file=sys.stderr, 

-

508 ) 

-

509 return False 

-

510 

-

511 

-

512@functools.lru_cache(1) 

-

513def _sc_arg_max() -> Optional[int]: 

-

514 try: 

-

515 return os.sysconf("SC_ARG_MAX") 

-

516 except RuntimeError: 

-

517 _warn("Could not resolve SC_ARG_MAX, falling back to a hard-coded limit") 

-

518 return None 

-

519 

-

520 

-

521def _split_xargs_args( 

-

522 static_cmd: Sequence[str], 

-

523 max_args_byte_len: int, 

-

524 varargs: Iterable[str], 

-

525 reuse_list_ok: bool, 

-

526) -> Iterator[List[str]]: 

-

527 static_cmd_len = len(static_cmd) 

-

528 remaining_len = max_args_byte_len 

-

529 pending_args = list(static_cmd) 

-

530 for arg in varargs: 

-

531 arg_len = len(arg.encode("utf-8")) + 1 # +1 for leading space 

-

532 remaining_len -= arg_len 

-

533 if not remaining_len: 

-

534 if len(pending_args) <= static_cmd_len: 

-

535 raise ValueError( 

-

536 f"Could not fit a single argument into the command line !?" 

-

537 f" {max_args_byte_len} (variable argument limit) < {arg_len} (argument length)" 

-

538 ) 

-

539 yield pending_args 

-

540 remaining_len = max_args_byte_len - arg_len 

-

541 if reuse_list_ok: 

-

542 pending_args.clear() 

-

543 pending_args.extend(static_cmd) 

-

544 else: 

-

545 pending_args = list(static_cmd) 

-

546 pending_args.append(arg) 

-

547 

-

548 if len(pending_args) > static_cmd_len: 

-

549 yield pending_args 

-

550 

-

551 

-

552def xargs( 

-

553 static_cmd: Sequence[str], 

-

554 varargs: Iterable[str], 

-

555 *, 

-

556 env: Optional[Mapping[str, str]] = None, 

-

557 reuse_list_ok: bool = False, 

-

558) -> Iterator[List[str]]: 

-

559 max_args_bytes = _sc_arg_max() 

-

560 # len overshoots with one space explaining the -1. The _split_xargs_args 

-

561 # will account for the space for the first argument 

-

562 static_byte_len = ( 

-

563 len(static_cmd) - 1 + sum(len(a.encode("utf-8")) for a in static_cmd) 

-

564 ) 

-

565 if max_args_bytes is not None: 

-

566 if env is None: 

-

567 # +2 for nul bytes after key and value 

-

568 static_byte_len += sum(len(k) + len(v) + 2 for k, v in os.environb.items()) 

-

569 else: 

-

570 # +2 for nul bytes after key and value 

-

571 static_byte_len += sum( 

-

572 len(k.encode("utf-8")) + len(v.encode("utf-8")) + 2 

-

573 for k, v in env.items() 

-

574 ) 

-

575 # Add a fixed buffer for OS overhead here (in case env and cmd both must be page-aligned or something like 

-

576 # that) 

-

577 static_byte_len += 2 * 4096 

-

578 else: 

-

579 # The 20 000 limit is from debhelper, and it did not account for environment. So neither will we here. 

-

580 max_args_bytes = 20_000 

-

581 remain_len = max_args_bytes - static_byte_len 

-

582 yield from _split_xargs_args(static_cmd, remain_len, varargs, reuse_list_ok) 

-

583 

-

584 

-

585# itertools recipe 

-

586def grouper( 

-

587 iterable: Iterable[T], 

-

588 n: int, 

-

589 *, 

-

590 incomplete: Literal["fill", "strict", "ignore"] = "fill", 

-

591 fillvalue: Optional[T] = None, 

-

592) -> Iterator[Tuple[T, ...]]: 

-

593 """Collect data into non-overlapping fixed-length chunks or blocks""" 

-

594 # grouper('ABCDEFG', 3, fillvalue='x') --> ABC DEF Gxx 

-

595 # grouper('ABCDEFG', 3, incomplete='strict') --> ABC DEF ValueError 

-

596 # grouper('ABCDEFG', 3, incomplete='ignore') --> ABC DEF 

-

597 args = [iter(iterable)] * n 

-

598 if incomplete == "fill": 

-

599 return zip_longest(*args, fillvalue=fillvalue) 

-

600 if incomplete == "strict": 

-

601 return zip(*args, strict=True) 

-

602 if incomplete == "ignore": 

-

603 return zip(*args) 

-

604 else: 

-

605 raise ValueError("Expected fill, strict, or ignore") 

-

606 

-

607 

-

608_LOGGING_SET_UP = False 

-

609 

-

610 

-

611def _check_color() -> Tuple[bool, bool, Optional[str]]: 

-

612 dpkg_or_default = os.environ.get( 

-

613 "DPKG_COLORS", "never" if "NO_COLOR" in os.environ else "auto" 

-

614 ) 

-

615 requested_color = os.environ.get("DEBPUTY_COLORS", dpkg_or_default) 

-

616 bad_request = None 

-

617 if requested_color not in {"auto", "always", "never"}: 617 ↛ 618line 617 didn't jump to line 618, because the condition on line 617 was never true

-

618 bad_request = requested_color 

-

619 requested_color = "auto" 

-

620 

-

621 if requested_color == "auto": 621 ↛ 625line 621 didn't jump to line 625, because the condition on line 621 was never false

-

622 stdout_color = sys.stdout.isatty() 

-

623 stderr_color = sys.stdout.isatty() 

-

624 else: 

-

625 enable = requested_color == "always" 

-

626 stdout_color = enable 

-

627 stderr_color = enable 

-

628 return stdout_color, stderr_color, bad_request 

-

629 

-

630 

-

631def program_name() -> str: 

-

632 name = os.path.basename(sys.argv[0]) 

-

633 if name.endswith(".py"): 633 ↛ 634line 633 didn't jump to line 634, because the condition on line 633 was never true

-

634 name = name[:-3] 

-

635 if name == "__main__": 635 ↛ 636line 635 didn't jump to line 636, because the condition on line 635 was never true

-

636 name = os.path.basename(os.path.dirname(sys.argv[0])) 

-

637 # FIXME: Not optimal that we have to hardcode these kind of things here 

-

638 if name == "debputy_cmd": 638 ↛ 639line 638 didn't jump to line 639, because the condition on line 638 was never true

-

639 name = "debputy" 

-

640 return name 

-

641 

-

642 

-

643def package_cross_check_precheck( 

-

644 pkg_a: "BinaryPackage", 

-

645 pkg_b: "BinaryPackage", 

-

646) -> Tuple[bool, bool]: 

-

647 """Whether these two packages can do content cross-checks 

-

648 

-

649 :param pkg_a: The first package 

-

650 :param pkg_b: The second package 

-

651 :return: A tuple if two booleans. If the first is True, then binary_package_a may do content cross-checks 

-

652 that invoĺves binary_package_b. If the second is True, then binary_package_b may do content cross-checks 

-

653 that involves binary_package_a. Both can be True and both can be False at the same time, which 

-

654 happens in common cases (arch:all + arch:any cases both to be False as a common example). 

-

655 """ 

-

656 

-

657 # Handle the two most obvious base-cases 

-

658 if not pkg_a.should_be_acted_on or not pkg_b.should_be_acted_on: 

-

659 return False, False 

-

660 if pkg_a.is_arch_all ^ pkg_b.is_arch_all: 

-

661 return False, False 

-

662 

-

663 a_may_see_b = True 

-

664 b_may_see_a = True 

-

665 

-

666 a_bp = pkg_a.fields.get("Build-Profiles", "") 

-

667 b_bp = pkg_b.fields.get("Build-Profiles", "") 

-

668 

-

669 if a_bp != b_bp: 

-

670 a_bp_set = _parse_build_profiles(a_bp) if a_bp != "" else frozenset() 

-

671 b_bp_set = _parse_build_profiles(b_bp) if b_bp != "" else frozenset() 

-

672 

-

673 # Check for build profiles being identically but just ordered differently. 

-

674 if a_bp_set != b_bp_set: 

-

675 # For simplicity, we let groups cancel each other out. If one side has no clauses 

-

676 # left, then it will always be built when the other is built. 

-

677 # 

-

678 # Eventually, someone will be here with a special case where more complex logic is 

-

679 # required. Good luck to you! Remember to add test cases for it (the existing logic 

-

680 # has some for a reason and if the logic is going to be more complex, it will need 

-

681 # tests cases to assert it fixes the problem and does not regress) 

-

682 if a_bp_set - b_bp_set: 

-

683 a_may_see_b = False 

-

684 if b_bp_set - a_bp_set: 

-

685 b_may_see_a = False 

-

686 

-

687 if pkg_a.declared_architecture != pkg_b.declared_architecture: 

-

688 # Also here we could do a subset check, but wildcards vs. non-wildcards make that a pain 

-

689 if pkg_a.declared_architecture != "any": 689 ↛ 691line 689 didn't jump to line 691, because the condition on line 689 was never false

-

690 b_may_see_a = False 

-

691 if pkg_a.declared_architecture != "any": 691 ↛ 694line 691 didn't jump to line 694, because the condition on line 691 was never false

-

692 a_may_see_b = False 

-

693 

-

694 return a_may_see_b, b_may_see_a 

-

695 

-

696 

-

697def setup_logging( 

-

698 *, log_only_to_stderr: bool = False, reconfigure_logging: bool = False 

-

699) -> None: 

-

700 global _LOGGING_SET_UP, _DEFAULT_LOGGER, _STDOUT_HANDLER, _STDERR_HANDLER 

-

701 if _LOGGING_SET_UP and not reconfigure_logging: 701 ↛ 702line 701 didn't jump to line 702, because the condition on line 701 was never true

-

702 raise RuntimeError( 

-

703 "Logging has already been configured." 

-

704 " Use reconfigure_logging=True if you need to reconfigure it" 

-

705 ) 

-

706 stdout_color, stderr_color, bad_request = _check_color() 

-

707 

-

708 if stdout_color or stderr_color: 708 ↛ 709line 708 didn't jump to line 709, because the condition on line 708 was never true

-

709 try: 

-

710 import colorlog 

-

711 except ImportError: 

-

712 stdout_color = False 

-

713 stderr_color = False 

-

714 

-

715 if log_only_to_stderr: 

-

716 stdout = sys.stderr 

-

717 stdout_color = stderr_color 

-

718 else: 

-

719 stdout = sys.stderr 

-

720 

-

721 class LogLevelFilter(logging.Filter): 

-

722 def __init__(self, threshold: int, above: bool): 

-

723 super().__init__() 

-

724 self.threshold = threshold 

-

725 self.above = above 

-

726 

-

727 def filter(self, record: logging.LogRecord) -> bool: 

-

728 if self.above: 

-

729 return record.levelno >= self.threshold 

-

730 else: 

-

731 return record.levelno < self.threshold 

-

732 

-

733 color_format = ( 

-

734 "{bold}{name}{reset}: {bold}{log_color}{levelnamelower}{reset}: {message}" 

-

735 ) 

-

736 colorless_format = "{name}: {levelnamelower}: {message}" 

-

737 

-

738 existing_stdout_handler = _STDOUT_HANDLER 

-

739 existing_stderr_handler = _STDERR_HANDLER 

-

740 

-

741 if stdout_color: 741 ↛ 742line 741 didn't jump to line 742, because the condition on line 741 was never true

-

742 stdout_handler = colorlog.StreamHandler(stdout) 

-

743 stdout_handler.setFormatter( 

-

744 colorlog.ColoredFormatter(color_format, style="{", force_color=True) 

-

745 ) 

-

746 logger = colorlog.getLogger() 

-

747 if existing_stdout_handler is not None: 

-

748 logger.removeHandler(existing_stdout_handler) 

-

749 _STDOUT_HANDLER = stdout_handler 

-

750 logger.addHandler(stdout_handler) 

-

751 else: 

-

752 stdout_handler = logging.StreamHandler(stdout) 

-

753 stdout_handler.setFormatter(logging.Formatter(colorless_format, style="{")) 

-

754 logger = logging.getLogger() 

-

755 if existing_stdout_handler is not None: 

-

756 logger.removeHandler(existing_stdout_handler) 

-

757 _STDOUT_HANDLER = stdout_handler 

-

758 logger.addHandler(stdout_handler) 

-

759 

-

760 if stderr_color: 760 ↛ 761line 760 didn't jump to line 761, because the condition on line 760 was never true

-

761 stderr_handler = colorlog.StreamHandler(sys.stderr) 

-

762 stderr_handler.setFormatter( 

-

763 colorlog.ColoredFormatter(color_format, style="{", force_color=True) 

-

764 ) 

-

765 logger = logging.getLogger() 

-

766 if existing_stdout_handler is not None: 

-

767 logger.removeHandler(existing_stderr_handler) 

-

768 _STDERR_HANDLER = stderr_handler 

-

769 logger.addHandler(stderr_handler) 

-

770 else: 

-

771 stderr_handler = logging.StreamHandler(sys.stderr) 

-

772 stderr_handler.setFormatter(logging.Formatter(colorless_format, style="{")) 

-

773 logger = logging.getLogger() 

-

774 if existing_stdout_handler is not None: 

-

775 logger.removeHandler(existing_stderr_handler) 

-

776 _STDERR_HANDLER = stderr_handler 

-

777 logger.addHandler(stderr_handler) 

-

778 

-

779 stdout_handler.addFilter(LogLevelFilter(logging.WARN, False)) 

-

780 stderr_handler.addFilter(LogLevelFilter(logging.WARN, True)) 

-

781 

-

782 name = program_name() 

-

783 

-

784 old_factory = logging.getLogRecordFactory() 

-

785 

-

786 def record_factory( 

-

787 *args: Any, **kwargs: Any 

-

788 ) -> logging.LogRecord: # pragma: no cover 

-

789 record = old_factory(*args, **kwargs) 

-

790 record.levelnamelower = record.levelname.lower() 

-

791 return record 

-

792 

-

793 logging.setLogRecordFactory(record_factory) 

-

794 

-

795 logging.getLogger().setLevel(logging.INFO) 

-

796 _DEFAULT_LOGGER = logging.getLogger(name) 

-

797 

-

798 if bad_request: 798 ↛ 799line 798 didn't jump to line 799, because the condition on line 798 was never true

-

799 _DEFAULT_LOGGER.warning( 

-

800 f'Invalid color request for "{bad_request}" in either DEBPUTY_COLORS or DPKG_COLORS.' 

-

801 ' Resetting to "auto".' 

-

802 ) 

-

803 

-

804 _LOGGING_SET_UP = True 

-
- - - diff --git a/coverage-report/d_267b6307937f1878_version_py.html b/coverage-report/d_267b6307937f1878_version_py.html deleted file mode 100644 index e8a9143..0000000 --- a/coverage-report/d_267b6307937f1878_version_py.html +++ /dev/null @@ -1,166 +0,0 @@ - - - - - Coverage for src/debputy/version.py: 75% - - - - - -
-
-

- Coverage for src/debputy/version.py: - 75% -

- -

- 38 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from typing import Optional, Callable 

-

2 

-

3__version__ = "N/A" 

-

4 

-

5IS_RELEASE_BUILD = False 

-

6 

-

7if __version__ in ("N/A",): 7 ↛ 67line 7 didn't jump to line 67, because the condition on line 7 was never false

-

8 import subprocess 

-

9 

-

10 class LazyString: 

-

11 def __init__(self, initializer: Callable[[], str]) -> None: 

-

12 self._initializer = initializer 

-

13 self._value: Optional[str] = None 

-

14 

-

15 def __str__(self) -> str: 

-

16 value = object.__getattribute__(self, "_value") 

-

17 if value is None: 

-

18 value = object.__getattribute__(self, "_initializer")() 

-

19 object.__setattr__(self, "_value", value) 

-

20 return value 

-

21 

-

22 def __getattribute__(self, item): 

-

23 value = str(self) 

-

24 return getattr(value, item) 

-

25 

-

26 def __contains__(self, item): 

-

27 return item in str(self) 

-

28 

-

29 def _initialize_version() -> str: 

-

30 try: 

-

31 devnull: Optional[int] = subprocess.DEVNULL 

-

32 except AttributeError: 

-

33 devnull = None # Not supported, but not critical 

-

34 

-

35 try: 

-

36 v = ( 

-

37 subprocess.check_output( 

-

38 ["git", "describe", "--tags"], 

-

39 stderr=devnull, 

-

40 ) 

-

41 .strip() 

-

42 .decode("utf-8") 

-

43 ) 

-

44 except (subprocess.CalledProcessError, FileNotFoundError): 

-

45 try: 

-

46 v = ( 

-

47 subprocess.check_output( 

-

48 ["dpkg-parsechangelog", "-SVersion"], 

-

49 stderr=devnull, 

-

50 ) 

-

51 .strip() 

-

52 .decode("utf-8") 

-

53 ) 

-

54 

-

55 except (subprocess.CalledProcessError, FileNotFoundError): 

-

56 v = "N/A" 

-

57 

-

58 if v.startswith("debian/"): 58 ↛ 60line 58 didn't jump to line 60, because the condition on line 58 was never false

-

59 v = v[7:] 

-

60 return v 

-

61 

-

62 __version__ = LazyString(_initialize_version) 

-

63 IS_RELEASE_BUILD = False 

-

64 

-

65else: 

-

66 # Disregard snapshot versions (gbp dch -S) as "release builds" 

-

67 IS_RELEASE_BUILD = ".gbp" not in __version__ 

-
- - - diff --git a/coverage-report/d_2882d0a735873825___init___py.html b/coverage-report/d_2882d0a735873825___init___py.html deleted file mode 100644 index bc5e7fc..0000000 --- a/coverage-report/d_2882d0a735873825___init___py.html +++ /dev/null @@ -1,99 +0,0 @@ - - - - - Coverage for src/debputy/commands/__init__.py: 100% - - - - - -
-
-

- Coverage for src/debputy/commands/__init__.py: - 100% -

- -

- 0 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-
- - - diff --git a/coverage-report/d_2882d0a735873825_deb_materialization_py.html b/coverage-report/d_2882d0a735873825_deb_materialization_py.html deleted file mode 100644 index 3dabaf3..0000000 --- a/coverage-report/d_2882d0a735873825_deb_materialization_py.html +++ /dev/null @@ -1,686 +0,0 @@ - - - - - Coverage for src/debputy/commands/deb_materialization.py: 9% - - - - - -
-
-

- Coverage for src/debputy/commands/deb_materialization.py: - 9% -

- -

- 236 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1#!/usr/bin/python3 -B 

-

2import argparse 

-

3import collections 

-

4import contextlib 

-

5import json 

-

6import os 

-

7import subprocess 

-

8import sys 

-

9import tempfile 

-

10import textwrap 

-

11from datetime import datetime 

-

12from typing import Optional, List, Iterator, Dict, Tuple 

-

13 

-

14from debputy import DEBPUTY_ROOT_DIR 

-

15from debputy.intermediate_manifest import ( 

-

16 TarMember, 

-

17 PathType, 

-

18 output_intermediate_manifest, 

-

19 output_intermediate_manifest_to_fd, 

-

20) 

-

21from debputy.util import ( 

-

22 _error, 

-

23 _info, 

-

24 compute_output_filename, 

-

25 resolve_source_date_epoch, 

-

26 ColorizedArgumentParser, 

-

27 setup_logging, 

-

28 detect_fakeroot, 

-

29 print_command, 

-

30 program_name, 

-

31) 

-

32from debputy.version import __version__ 

-

33 

-

34 

-

35def parse_args() -> argparse.Namespace: 

-

36 description = textwrap.dedent( 

-

37 """\ 

-

38 This is a low level tool for materializing deb packages from intermediate debputy manifests or assembling 

-

39 the deb from a materialization. 

-

40 

-

41 The tool is not intended to be run directly by end users. 

-

42 """ 

-

43 ) 

-

44 

-

45 parser = ColorizedArgumentParser( 

-

46 description=description, 

-

47 formatter_class=argparse.RawDescriptionHelpFormatter, 

-

48 allow_abbrev=False, 

-

49 prog=program_name(), 

-

50 ) 

-

51 

-

52 parser.add_argument("--version", action="version", version=__version__) 

-

53 

-

54 subparsers = parser.add_subparsers(dest="command", required=True) 

-

55 

-

56 materialize_deb_parser = subparsers.add_parser( 

-

57 "materialize-deb", 

-

58 allow_abbrev=False, 

-

59 help="Generate .deb/.udebs structure from a root directory and" 

-

60 " a *intermediate* debputy manifest", 

-

61 ) 

-

62 materialize_deb_parser.add_argument( 

-

63 "control_root_dir", 

-

64 metavar="control-root-dir", 

-

65 help="A directory that contains the control files (usually debian/<pkg>/DEBIAN)", 

-

66 ) 

-

67 materialize_deb_parser.add_argument( 

-

68 "materialization_output", 

-

69 metavar="materialization_output", 

-

70 help="Where to place the resulting structure should be placed. Should not exist", 

-

71 ) 

-

72 materialize_deb_parser.add_argument( 

-

73 "--discard-existing-output", 

-

74 dest="discard_existing_output", 

-

75 default=False, 

-

76 action="store_true", 

-

77 help="If passed, then the output location may exist." 

-

78 " If it does, it will be *deleted*.", 

-

79 ) 

-

80 materialize_deb_parser.add_argument( 

-

81 "--source-date-epoch", 

-

82 dest="source_date_epoch", 

-

83 action="store", 

-

84 type=int, 

-

85 default=None, 

-

86 help="Source date epoch (can also be given via the SOURCE_DATE_EPOCH environ" 

-

87 " variable", 

-

88 ) 

-

89 materialize_deb_parser.add_argument( 

-

90 "--may-move-control-files", 

-

91 dest="may_move_control_files", 

-

92 action="store_true", 

-

93 default=False, 

-

94 help="Whether the command may optimize by moving (rather than copying) DEBIAN files", 

-

95 ) 

-

96 materialize_deb_parser.add_argument( 

-

97 "--may-move-data-files", 

-

98 dest="may_move_data_files", 

-

99 action="store_true", 

-

100 default=False, 

-

101 help="Whether the command may optimize by moving (rather than copying) when materializing", 

-

102 ) 

-

103 

-

104 materialize_deb_parser.add_argument( 

-

105 "--intermediate-package-manifest", 

-

106 dest="package_manifest", 

-

107 metavar="JSON_FILE", 

-

108 action="store", 

-

109 default=None, 

-

110 help="INTERMEDIATE package manifest (JSON!)", 

-

111 ) 

-

112 

-

113 materialize_deb_parser.add_argument( 

-

114 "--udeb", 

-

115 dest="udeb", 

-

116 default=False, 

-

117 action="store_true", 

-

118 help="Whether this is udeb package. Affects extension and default compression", 

-

119 ) 

-

120 

-

121 materialize_deb_parser.add_argument( 

-

122 "--build-method", 

-

123 dest="build_method", 

-

124 choices=["debputy", "dpkg-deb"], 

-

125 type=str, 

-

126 default=None, 

-

127 help="Immediately assemble the deb as well using the selected method", 

-

128 ) 

-

129 materialize_deb_parser.add_argument( 

-

130 "--assembled-deb-output", 

-

131 dest="assembled_deb_output", 

-

132 type=str, 

-

133 default=None, 

-

134 help="Where to place the resulting deb. Only applicable with --build-method", 

-

135 ) 

-

136 

-

137 # Added for "help only" - you cannot trigger this option in practice 

-

138 materialize_deb_parser.add_argument( 

-

139 "--", 

-

140 metavar="DPKG_DEB_ARGS", 

-

141 action="extend", 

-

142 nargs="+", 

-

143 dest="unused", 

-

144 help="Arguments to be passed to dpkg-deb" 

-

145 " (same as you might pass to dh_builddeb).", 

-

146 ) 

-

147 

-

148 build_deb_structure = subparsers.add_parser( 

-

149 "build-materialized-deb", 

-

150 allow_abbrev=False, 

-

151 help="Produce a .deb from a directory produced by the" 

-

152 " materialize-deb-structure command", 

-

153 ) 

-

154 build_deb_structure.add_argument( 

-

155 "materialized_deb_root_dir", 

-

156 metavar="materialized-deb-root-dir", 

-

157 help="The output directory of the materialize-deb-structure command", 

-

158 ) 

-

159 build_deb_structure.add_argument( 

-

160 "build_method", 

-

161 metavar="build-method", 

-

162 choices=["debputy", "dpkg-deb"], 

-

163 type=str, 

-

164 default="dpkg-deb", 

-

165 help="Which tool should assemble the deb", 

-

166 ) 

-

167 build_deb_structure.add_argument( 

-

168 "--output", type=str, default=None, help="Where to place the resulting deb" 

-

169 ) 

-

170 

-

171 argv = sys.argv 

-

172 try: 

-

173 i = argv.index("--") 

-

174 upstream_args = argv[i + 1 :] 

-

175 argv = argv[:i] 

-

176 except (IndexError, ValueError): 

-

177 upstream_args = [] 

-

178 parsed_args = parser.parse_args(argv[1:]) 

-

179 setattr(parsed_args, "upstream_args", upstream_args) 

-

180 

-

181 return parsed_args 

-

182 

-

183 

-

184def _run(cmd: List[str]) -> None: 

-

185 print_command(*cmd) 

-

186 subprocess.check_call(cmd) 

-

187 

-

188 

-

189def strip_path_prefix(member_path: str) -> str: 

-

190 if not member_path.startswith("./"): 

-

191 _error( 

-

192 f'Invalid manifest: "{member_path}" does not start with "./", but all paths should' 

-

193 ) 

-

194 return member_path[2:] 

-

195 

-

196 

-

197def _perform_data_tar_materialization( 

-

198 output_packaging_root: str, 

-

199 intermediate_manifest: List[TarMember], 

-

200 may_move_data_files: bool, 

-

201) -> List[Tuple[str, TarMember]]: 

-

202 start_time = datetime.now() 

-

203 replacement_manifest_paths = [] 

-

204 _info("Materializing data.tar part of the deb:") 

-

205 

-

206 directories = ["mkdir"] 

-

207 symlinks = [] 

-

208 bulk_copies: Dict[str, List[str]] = collections.defaultdict(list) 

-

209 copies = [] 

-

210 renames = [] 

-

211 

-

212 for tar_member in intermediate_manifest: 

-

213 member_path = strip_path_prefix(tar_member.member_path) 

-

214 new_fs_path = ( 

-

215 os.path.join("deb-root", member_path) if member_path else "deb-root" 

-

216 ) 

-

217 materialization_path = ( 

-

218 f"{output_packaging_root}/{member_path}" 

-

219 if member_path 

-

220 else output_packaging_root 

-

221 ) 

-

222 replacement_tar_member = tar_member 

-

223 materialization_parent_dir = os.path.dirname(materialization_path.rstrip("/")) 

-

224 if tar_member.path_type == PathType.DIRECTORY: 

-

225 directories.append(materialization_path) 

-

226 elif tar_member.path_type == PathType.SYMLINK: 

-

227 symlinks.append((tar_member.link_target, materialization_path)) 

-

228 elif tar_member.fs_path is not None: 

-

229 if tar_member.link_target: 

-

230 # Not sure if hardlinks gets here yet as we do not support hardlinks 

-

231 _error("Internal error; hardlink not supported") 

-

232 

-

233 if may_move_data_files and tar_member.may_steal_fs_path: 

-

234 renames.append((tar_member.fs_path, materialization_path)) 

-

235 elif os.path.basename(tar_member.fs_path) == os.path.basename( 

-

236 materialization_path 

-

237 ): 

-

238 bulk_copies[materialization_parent_dir].append(tar_member.fs_path) 

-

239 else: 

-

240 copies.append((tar_member.fs_path, materialization_path)) 

-

241 else: 

-

242 _error(f"Internal error; unsupported path type {tar_member.path_type}") 

-

243 

-

244 if tar_member.fs_path is not None: 

-

245 replacement_tar_member = tar_member.clone_and_replace( 

-

246 fs_path=new_fs_path, may_steal_fs_path=False 

-

247 ) 

-

248 

-

249 replacement_manifest_paths.append( 

-

250 (materialization_path, replacement_tar_member) 

-

251 ) 

-

252 

-

253 if len(directories) > 1: 

-

254 _run(directories) 

-

255 

-

256 for dest_dir, files in bulk_copies.items(): 

-

257 cmd = ["cp", "--reflink=auto", "-t", dest_dir] 

-

258 cmd.extend(files) 

-

259 _run(cmd) 

-

260 

-

261 for source, dest in copies: 

-

262 _run(["cp", "--reflink=auto", source, dest]) 

-

263 

-

264 for source, dest in renames: 

-

265 print_command("mv", source, dest) 

-

266 os.rename(source, dest) 

-

267 

-

268 for link_target, link_path in symlinks: 

-

269 print_command("ln", "-s", link_target, link_path) 

-

270 os.symlink(link_target, link_path) 

-

271 

-

272 end_time = datetime.now() 

-

273 

-

274 _info(f"Materialization of data.tar finished, took: {end_time - start_time}") 

-

275 

-

276 return replacement_manifest_paths 

-

277 

-

278 

-

279def materialize_deb( 

-

280 control_root_dir: str, 

-

281 intermediate_manifest_path: Optional[str], 

-

282 source_date_epoch: int, 

-

283 dpkg_deb_options: List[str], 

-

284 is_udeb: bool, 

-

285 output_dir: str, 

-

286 may_move_control_files: bool, 

-

287 may_move_data_files: bool, 

-

288) -> None: 

-

289 if not os.path.isfile(f"{control_root_dir}/control"): 

-

290 _error( 

-

291 f'The directory "{control_root_dir}" does not look like a package root dir (there is no control file)' 

-

292 ) 

-

293 intermediate_manifest: List[TarMember] = parse_manifest(intermediate_manifest_path) 

-

294 

-

295 output_packaging_root = os.path.join(output_dir, "deb-root") 

-

296 os.mkdir(output_dir) 

-

297 

-

298 replacement_manifest_paths = _perform_data_tar_materialization( 

-

299 output_packaging_root, intermediate_manifest, may_move_data_files 

-

300 ) 

-

301 for materialization_path, tar_member in reversed(replacement_manifest_paths): 

-

302 # TODO: Hardlinks should probably skip these commands 

-

303 if tar_member.path_type != PathType.SYMLINK: 

-

304 os.chmod(materialization_path, tar_member.mode, follow_symlinks=False) 

-

305 os.utime( 

-

306 materialization_path, 

-

307 (tar_member.mtime, tar_member.mtime), 

-

308 follow_symlinks=False, 

-

309 ) 

-

310 

-

311 materialized_ctrl_dir = f"{output_packaging_root}/DEBIAN" 

-

312 if may_move_control_files: 

-

313 print_command("mv", control_root_dir, materialized_ctrl_dir) 

-

314 os.rename(control_root_dir, materialized_ctrl_dir) 

-

315 else: 

-

316 os.mkdir(materialized_ctrl_dir) 

-

317 copy_cmd = ["cp", "-a"] 

-

318 copy_cmd.extend( 

-

319 os.path.join(control_root_dir, f) for f in os.listdir(control_root_dir) 

-

320 ) 

-

321 copy_cmd.append(materialized_ctrl_dir) 

-

322 _run(copy_cmd) 

-

323 

-

324 output_intermediate_manifest( 

-

325 os.path.join(output_dir, "deb-structure-intermediate-manifest.json"), 

-

326 [t[1] for t in replacement_manifest_paths], 

-

327 ) 

-

328 

-

329 with open(os.path.join(output_dir, "env-and-cli.json"), "w") as fd: 

-

330 serial_format = { 

-

331 "env": { 

-

332 "SOURCE_DATE_EPOCH": str(source_date_epoch), 

-

333 "DPKG_DEB_COMPRESSOR_LEVEL": os.environ.get( 

-

334 "DPKG_DEB_COMPRESSOR_LEVEL" 

-

335 ), 

-

336 "DPKG_DEB_COMPRESSOR_TYPE": os.environ.get("DPKG_DEB_COMPRESSOR_TYPE"), 

-

337 "DPKG_DEB_THREADS_MAX": os.environ.get("DPKG_DEB_THREADS_MAX"), 

-

338 }, 

-

339 "cli": {"dpkg-deb": dpkg_deb_options}, 

-

340 "udeb": is_udeb, 

-

341 } 

-

342 json.dump(serial_format, fd) 

-

343 

-

344 

-

345def apply_fs_metadata( 

-

346 materialized_path: str, 

-

347 tar_member: TarMember, 

-

348 apply_ownership: bool, 

-

349 is_using_fakeroot: bool, 

-

350) -> None: 

-

351 if apply_ownership: 

-

352 os.chown( 

-

353 materialized_path, tar_member.uid, tar_member.gid, follow_symlinks=False 

-

354 ) 

-

355 # To avoid surprises, align these with the manifest. Just in case the transport did not preserve the metadata. 

-

356 # Also, unsure whether metadata changes cause directory mtimes to change, so resetting them unconditionally 

-

357 # also prevents that problem. 

-

358 if tar_member.path_type != PathType.SYMLINK: 

-

359 os.chmod(materialized_path, tar_member.mode, follow_symlinks=False) 

-

360 os.utime( 

-

361 materialized_path, (tar_member.mtime, tar_member.mtime), follow_symlinks=False 

-

362 ) 

-

363 if is_using_fakeroot: 

-

364 st = os.stat(materialized_path, follow_symlinks=False) 

-

365 if st.st_uid != tar_member.uid or st.st_gid != tar_member.gid: 

-

366 _error( 

-

367 'Change of ownership failed. The chown call "succeeded" but stat does not give the right result.' 

-

368 " Most likely a fakeroot bug. Note, when verifying this, use os.chown + os.stat from python" 

-

369 " (the chmod/stat shell commands might use a different syscall that fakeroot accurately emulates)" 

-

370 ) 

-

371 

-

372 

-

373def _dpkg_deb_root_requirements( 

-

374 intermediate_manifest: List[TarMember], 

-

375) -> Tuple[List[str], bool, bool]: 

-

376 needs_root = any(tm.uid != 0 or tm.gid != 0 for tm in intermediate_manifest) 

-

377 if needs_root: 

-

378 if os.getuid() != 0: 

-

379 _error( 

-

380 'Must be run as root/fakeroot when using the method "dpkg-deb" due to the contents' 

-

381 ) 

-

382 is_using_fakeroot = detect_fakeroot() 

-

383 deb_cmd = ["dpkg-deb"] 

-

384 _info("Applying ownership, mode, and utime from the intermediate manifest...") 

-

385 else: 

-

386 # fakeroot does not matter in this case 

-

387 is_using_fakeroot = False 

-

388 deb_cmd = ["dpkg-deb", "--root-owner-group"] 

-

389 _info("Applying mode and utime from the intermediate manifest...") 

-

390 return deb_cmd, needs_root, is_using_fakeroot 

-

391 

-

392 

-

393@contextlib.contextmanager 

-

394def maybe_with_materialized_manifest( 

-

395 content: Optional[List[TarMember]], 

-

396) -> Iterator[Optional[str]]: 

-

397 if content is not None: 

-

398 with tempfile.NamedTemporaryFile( 

-

399 prefix="debputy-mat-build", 

-

400 mode="w+t", 

-

401 suffix=".json", 

-

402 encoding="utf-8", 

-

403 ) as fd: 

-

404 output_intermediate_manifest_to_fd(fd, content) 

-

405 fd.flush() 

-

406 yield fd.name 

-

407 else: 

-

408 yield None 

-

409 

-

410 

-

411def _prep_assembled_deb_output_path( 

-

412 output_path: Optional[str], 

-

413 materialized_deb_structure: str, 

-

414 deb_root: str, 

-

415 method: str, 

-

416 is_udeb: bool, 

-

417) -> str: 

-

418 if output_path is None: 

-

419 ext = "udeb" if is_udeb else "deb" 

-

420 output_dir = os.path.join(materialized_deb_structure, "output") 

-

421 if not os.path.isdir(output_dir): 

-

422 os.mkdir(output_dir) 

-

423 output = os.path.join(output_dir, f"{method}.{ext}") 

-

424 elif os.path.isdir(output_path): 

-

425 output = os.path.join( 

-

426 output_path, 

-

427 compute_output_filename(os.path.join(deb_root, "DEBIAN"), is_udeb), 

-

428 ) 

-

429 else: 

-

430 output = output_path 

-

431 return output 

-

432 

-

433 

-

434def _apply_env(env: Dict[str, Optional[str]]) -> None: 

-

435 for name, value in env.items(): 

-

436 if value is not None: 

-

437 os.environ[name] = value 

-

438 else: 

-

439 try: 

-

440 del os.environ[name] 

-

441 except KeyError: 

-

442 pass 

-

443 

-

444 

-

445def assemble_deb( 

-

446 materialized_deb_structure: str, 

-

447 method: str, 

-

448 output_path: Optional[str], 

-

449 combined_materialization_and_assembly: bool, 

-

450) -> None: 

-

451 deb_root = os.path.join(materialized_deb_structure, "deb-root") 

-

452 

-

453 with open(os.path.join(materialized_deb_structure, "env-and-cli.json"), "r") as fd: 

-

454 serial_format = json.load(fd) 

-

455 

-

456 env = serial_format.get("env") or {} 

-

457 cli = serial_format.get("cli") or {} 

-

458 is_udeb = serial_format.get("udeb") 

-

459 source_date_epoch = env.get("SOURCE_DATE_EPOCH") 

-

460 dpkg_deb_options = cli.get("dpkg-deb") or [] 

-

461 intermediate_manifest_path = os.path.join( 

-

462 materialized_deb_structure, "deb-structure-intermediate-manifest.json" 

-

463 ) 

-

464 original_intermediate_manifest = TarMember.parse_intermediate_manifest( 

-

465 intermediate_manifest_path 

-

466 ) 

-

467 _info( 

-

468 "Rebasing relative paths in the intermediate manifest so they are relative to current working directory ..." 

-

469 ) 

-

470 intermediate_manifest = [ 

-

471 ( 

-

472 tar_member.clone_and_replace( 

-

473 fs_path=os.path.join(materialized_deb_structure, tar_member.fs_path) 

-

474 ) 

-

475 if tar_member.fs_path is not None and not tar_member.fs_path.startswith("/") 

-

476 else tar_member 

-

477 ) 

-

478 for tar_member in original_intermediate_manifest 

-

479 ] 

-

480 materialized_manifest = None 

-

481 if method == "debputy": 

-

482 materialized_manifest = intermediate_manifest 

-

483 

-

484 if source_date_epoch is None: 

-

485 _error( 

-

486 "Cannot reproduce the deb. No source date epoch provided in the materialized deb root." 

-

487 ) 

-

488 _apply_env(env) 

-

489 

-

490 output = _prep_assembled_deb_output_path( 

-

491 output_path, 

-

492 materialized_deb_structure, 

-

493 deb_root, 

-

494 method, 

-

495 is_udeb, 

-

496 ) 

-

497 

-

498 with maybe_with_materialized_manifest(materialized_manifest) as tmp_file: 

-

499 if method == "dpkg-deb": 

-

500 deb_cmd, needs_root, is_using_fakeroot = _dpkg_deb_root_requirements( 

-

501 intermediate_manifest 

-

502 ) 

-

503 if needs_root or not combined_materialization_and_assembly: 

-

504 for tar_member in reversed(intermediate_manifest): 

-

505 p = os.path.join( 

-

506 deb_root, strip_path_prefix(tar_member.member_path) 

-

507 ) 

-

508 apply_fs_metadata(p, tar_member, needs_root, is_using_fakeroot) 

-

509 elif method == "debputy": 

-

510 deb_packer = os.path.join(DEBPUTY_ROOT_DIR, "deb_packer.py") 

-

511 assert tmp_file is not None 

-

512 deb_cmd = [ 

-

513 deb_packer, 

-

514 "--intermediate-package-manifest", 

-

515 tmp_file, 

-

516 "--source-date-epoch", 

-

517 source_date_epoch, 

-

518 ] 

-

519 else: 

-

520 _error(f"Internal error: Unsupported assembly method: {method}") 

-

521 

-

522 if is_udeb: 

-

523 deb_cmd.extend(["-z6", "-Zxz", "-Sextreme"]) 

-

524 deb_cmd.extend(dpkg_deb_options) 

-

525 deb_cmd.extend(["--build", deb_root, output]) 

-

526 start_time = datetime.now() 

-

527 _run(deb_cmd) 

-

528 end_time = datetime.now() 

-

529 _info(f" - assembly command took {end_time - start_time}") 

-

530 

-

531 

-

532def parse_manifest(manifest_path: "Optional[str]") -> "List[TarMember]": 

-

533 if manifest_path is None: 

-

534 _error("--intermediate-package-manifest is mandatory for now") 

-

535 return TarMember.parse_intermediate_manifest(manifest_path) 

-

536 

-

537 

-

538def main() -> None: 

-

539 setup_logging() 

-

540 parsed_args = parse_args() 

-

541 if parsed_args.command == "materialize-deb": 

-

542 mtime = resolve_source_date_epoch(parsed_args.source_date_epoch) 

-

543 dpkg_deb_args = parsed_args.upstream_args or [] 

-

544 output_dir = parsed_args.materialization_output 

-

545 if os.path.exists(output_dir): 

-

546 if not parsed_args.discard_existing_output: 

-

547 _error( 

-

548 "The output path already exists. Please either choose a non-existing path, delete the path" 

-

549 " or use --discard-existing-output (to have this command remove it as necessary)." 

-

550 ) 

-

551 _info( 

-

552 f'Removing existing path "{output_dir}" as requested by --discard-existing-output' 

-

553 ) 

-

554 _run(["rm", "-fr", output_dir]) 

-

555 

-

556 materialize_deb( 

-

557 parsed_args.control_root_dir, 

-

558 parsed_args.package_manifest, 

-

559 mtime, 

-

560 dpkg_deb_args, 

-

561 parsed_args.udeb, 

-

562 output_dir, 

-

563 parsed_args.may_move_control_files, 

-

564 parsed_args.may_move_data_files, 

-

565 ) 

-

566 

-

567 if parsed_args.build_method is not None: 

-

568 assemble_deb( 

-

569 output_dir, 

-

570 parsed_args.build_method, 

-

571 parsed_args.assembled_deb_output, 

-

572 True, 

-

573 ) 

-

574 

-

575 elif parsed_args.command == "build-materialized-deb": 

-

576 assemble_deb( 

-

577 parsed_args.materialized_deb_root_dir, 

-

578 parsed_args.build_method, 

-

579 parsed_args.output, 

-

580 False, 

-

581 ) 

-

582 else: 

-

583 _error(f'Internal error: Unimplemented command "{parsed_args.command}"') 

-

584 

-

585 

-

586if __name__ == "__main__": 

-

587 main() 

-
- - - diff --git a/coverage-report/d_2882d0a735873825_deb_packer_py.html b/coverage-report/d_2882d0a735873825_deb_packer_py.html deleted file mode 100644 index 865a9fe..0000000 --- a/coverage-report/d_2882d0a735873825_deb_packer_py.html +++ /dev/null @@ -1,656 +0,0 @@ - - - - - Coverage for src/debputy/commands/deb_packer.py: 58% - - - - - -
-
-

- Coverage for src/debputy/commands/deb_packer.py: - 58% -

- -

- 197 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1#!/usr/bin/python3 -B 

-

2import argparse 

-

3import errno 

-

4import operator 

-

5import os 

-

6import stat 

-

7import subprocess 

-

8import tarfile 

-

9import textwrap 

-

10from typing import Optional, List, FrozenSet, Iterable, Callable, BinaryIO, cast 

-

11 

-

12from debputy.intermediate_manifest import TarMember, PathType 

-

13from debputy.util import ( 

-

14 _error, 

-

15 compute_output_filename, 

-

16 resolve_source_date_epoch, 

-

17 ColorizedArgumentParser, 

-

18 setup_logging, 

-

19 program_name, 

-

20 assume_not_none, 

-

21) 

-

22from debputy.version import __version__ 

-

23 

-

24 

-

25# AR header / start of a deb file for reference 

-

26# 00000000 21 3c 61 72 63 68 3e 0a 64 65 62 69 61 6e 2d 62 |!<arch>.debian-b| 

-

27# 00000010 69 6e 61 72 79 20 20 20 31 36 36 38 39 37 33 36 |inary 16689736| 

-

28# 00000020 39 35 20 20 30 20 20 20 20 20 30 20 20 20 20 20 |95 0 0 | 

-

29# 00000030 31 30 30 36 34 34 20 20 34 20 20 20 20 20 20 20 |100644 4 | 

-

30# 00000040 20 20 60 0a 32 2e 30 0a 63 6f 6e 74 72 6f 6c 2e | `.2.0.control.| 

-

31# 00000050 74 61 72 2e 78 7a 20 20 31 36 36 38 39 37 33 36 |tar.xz 16689736| 

-

32# 00000060 39 35 20 20 30 20 20 20 20 20 30 20 20 20 20 20 |95 0 0 | 

-

33# 00000070 31 30 30 36 34 34 20 20 39 33 36 38 20 20 20 20 |100644 9368 | 

-

34# 00000080 20 20 60 0a fd 37 7a 58 5a 00 00 04 e6 d6 b4 46 | `..7zXZ......F| 

-

35 

-

36 

-

37class ArMember: 

-

38 def __init__( 

-

39 self, 

-

40 name: str, 

-

41 mtime: int, 

-

42 fixed_binary: Optional[bytes] = None, 

-

43 write_to_impl: Optional[Callable[[BinaryIO], None]] = None, 

-

44 ) -> None: 

-

45 self.name = name 

-

46 self._mtime = mtime 

-

47 self._write_to_impl = write_to_impl 

-

48 self.fixed_binary = fixed_binary 

-

49 

-

50 @property 

-

51 def is_fixed_binary(self) -> bool: 

-

52 return self.fixed_binary is not None 

-

53 

-

54 @property 

-

55 def mtime(self) -> int: 

-

56 return self.mtime 

-

57 

-

58 def write_to(self, fd: BinaryIO) -> None: 

-

59 writer = self._write_to_impl 

-

60 assert writer is not None 

-

61 writer(fd) 

-

62 

-

63 

-

64AR_HEADER_LEN = 60 

-

65AR_HEADER = b" " * AR_HEADER_LEN 

-

66 

-

67 

-

68def write_header( 

-

69 fd: BinaryIO, 

-

70 member: ArMember, 

-

71 member_len: int, 

-

72 mtime: int, 

-

73) -> None: 

-

74 header = b"%-16s%-12d0 0 100644 %-10d\x60\n" % ( 

-

75 member.name.encode("ascii"), 

-

76 mtime, 

-

77 member_len, 

-

78 ) 

-

79 fd.write(header) 

-

80 

-

81 

-

82def generate_ar_archive( 

-

83 output_filename: str, 

-

84 mtime: int, 

-

85 members: Iterable[ArMember], 

-

86 prefer_raw_exceptions: bool, 

-

87) -> None: 

-

88 try: 

-

89 with open(output_filename, "wb", buffering=0) as fd: 

-

90 fd.write(b"!<arch>\n") 

-

91 for member in members: 

-

92 if member.is_fixed_binary: 

-

93 fixed_binary = assume_not_none(member.fixed_binary) 

-

94 write_header(fd, member, len(fixed_binary), mtime) 

-

95 fd.write(fixed_binary) 

-

96 else: 

-

97 header_pos = fd.tell() 

-

98 fd.write(AR_HEADER) 

-

99 member.write_to(fd) 

-

100 current_pos = fd.tell() 

-

101 fd.seek(header_pos, os.SEEK_SET) 

-

102 content_len = current_pos - header_pos - AR_HEADER_LEN 

-

103 assert content_len >= 0 

-

104 write_header(fd, member, content_len, mtime) 

-

105 fd.seek(current_pos, os.SEEK_SET) 

-

106 except OSError as e: 

-

107 if prefer_raw_exceptions: 

-

108 raise 

-

109 if e.errno == errno.ENOSPC: 

-

110 _error( 

-

111 f"Unable to write {output_filename}. The file system device reported disk full: {str(e)}" 

-

112 ) 

-

113 elif e.errno == errno.EIO: 

-

114 _error( 

-

115 f"Unable to write {output_filename}. The file system reported a generic I/O error: {str(e)}" 

-

116 ) 

-

117 elif e.errno == errno.EROFS: 

-

118 _error( 

-

119 f"Unable to write {output_filename}. The file system is read-only: {str(e)}" 

-

120 ) 

-

121 raise 

-

122 print(f"Generated {output_filename}") 

-

123 

-

124 

-

125def _generate_tar_file( 

-

126 tar_members: Iterable[TarMember], 

-

127 compression_cmd: List[str], 

-

128 write_to: BinaryIO, 

-

129) -> None: 

-

130 with ( 

-

131 subprocess.Popen( 

-

132 compression_cmd, stdin=subprocess.PIPE, stdout=write_to 

-

133 ) as compress_proc, 

-

134 tarfile.open( 

-

135 mode="w|", 

-

136 fileobj=compress_proc.stdin, 

-

137 format=tarfile.GNU_FORMAT, 

-

138 errorlevel=1, 

-

139 ) as tar_fd, 

-

140 ): 

-

141 for tar_member in tar_members: 

-

142 tar_info: tarfile.TarInfo = tar_member.create_tar_info(tar_fd) 

-

143 if tar_member.path_type == PathType.FILE: 

-

144 with open(assume_not_none(tar_member.fs_path), "rb") as mfd: 

-

145 tar_fd.addfile(tar_info, fileobj=mfd) 

-

146 else: 

-

147 tar_fd.addfile(tar_info) 

-

148 compress_proc.wait() 

-

149 if compress_proc.returncode != 0: 149 ↛ 150line 149 didn't jump to line 150, because the condition on line 149 was never true

-

150 _error( 

-

151 f"Compression command {compression_cmd} failed with code {compress_proc.returncode}" 

-

152 ) 

-

153 

-

154 

-

155def generate_tar_file_member( 

-

156 tar_members: Iterable[TarMember], 

-

157 compression_cmd: List[str], 

-

158) -> Callable[[BinaryIO], None]: 

-

159 def _impl(fd: BinaryIO) -> None: 

-

160 _generate_tar_file( 

-

161 tar_members, 

-

162 compression_cmd, 

-

163 fd, 

-

164 ) 

-

165 

-

166 return _impl 

-

167 

-

168 

-

169def _xz_cmdline( 

-

170 compression_rule: "Compression", 

-

171 parsed_args: Optional[argparse.Namespace], 

-

172) -> List[str]: 

-

173 compression_level = compression_rule.effective_compression_level(parsed_args) 

-

174 cmdline = ["xz", "-T2", "-" + str(compression_level)] 

-

175 strategy = None if parsed_args is None else parsed_args.compression_strategy 

-

176 if strategy is None: 176 ↛ 178line 176 didn't jump to line 178, because the condition on line 176 was never false

-

177 strategy = "none" 

-

178 if strategy != "none": 178 ↛ 179line 178 didn't jump to line 179, because the condition on line 178 was never true

-

179 cmdline.append("--" + strategy) 

-

180 cmdline.append("--no-adjust") 

-

181 return cmdline 

-

182 

-

183 

-

184def _gzip_cmdline( 

-

185 compression_rule: "Compression", 

-

186 parsed_args: Optional[argparse.Namespace], 

-

187) -> List[str]: 

-

188 compression_level = compression_rule.effective_compression_level(parsed_args) 

-

189 cmdline = ["gzip", "-n" + str(compression_level)] 

-

190 strategy = None if parsed_args is None else parsed_args.compression_strategy 

-

191 if strategy is not None and strategy != "none": 

-

192 raise ValueError( 

-

193 f"Not implemented: Compression strategy {strategy}" 

-

194 " for gzip is currently unsupported (but dpkg-deb does)" 

-

195 ) 

-

196 return cmdline 

-

197 

-

198 

-

199def _uncompressed_cmdline( 

-

200 _unused_a: "Compression", 

-

201 _unused_b: Optional[argparse.Namespace], 

-

202) -> List[str]: 

-

203 return ["cat"] 

-

204 

-

205 

-

206class Compression: 

-

207 def __init__( 

-

208 self, 

-

209 default_compression_level: int, 

-

210 extension: str, 

-

211 allowed_strategies: FrozenSet[str], 

-

212 cmdline_builder: Callable[ 

-

213 ["Compression", Optional[argparse.Namespace]], List[str] 

-

214 ], 

-

215 ) -> None: 

-

216 self.default_compression_level = default_compression_level 

-

217 self.extension = extension 

-

218 self.allowed_strategies = allowed_strategies 

-

219 self.cmdline_builder = cmdline_builder 

-

220 

-

221 def __repr__(self) -> str: 

-

222 return f"<{self.__class__.__name__} {self.extension}>" 

-

223 

-

224 def effective_compression_level( 

-

225 self, parsed_args: Optional[argparse.Namespace] 

-

226 ) -> int: 

-

227 if parsed_args and parsed_args.compression_level is not None: 227 ↛ 228line 227 didn't jump to line 228, because the condition on line 227 was never true

-

228 return cast("int", parsed_args.compression_level) 

-

229 return self.default_compression_level 

-

230 

-

231 def as_cmdline(self, parsed_args: Optional[argparse.Namespace]) -> List[str]: 

-

232 return self.cmdline_builder(self, parsed_args) 

-

233 

-

234 def with_extension(self, filename: str) -> str: 

-

235 return filename + self.extension 

-

236 

-

237 

-

238COMPRESSIONS = { 

-

239 "xz": Compression(6, ".xz", frozenset({"none", "extreme"}), _xz_cmdline), 

-

240 "gzip": Compression( 

-

241 9, 

-

242 ".gz", 

-

243 frozenset({"none", "filtered", "huffman", "rle", "fixed"}), 

-

244 _gzip_cmdline, 

-

245 ), 

-

246 "none": Compression(0, "", frozenset({"none"}), _uncompressed_cmdline), 

-

247} 

-

248 

-

249 

-

250def _normalize_compression_args(parsed_args: argparse.Namespace) -> argparse.Namespace: 

-

251 if ( 

-

252 parsed_args.compression_level == 0 

-

253 and parsed_args.compression_algorithm == "gzip" 

-

254 ): 

-

255 print( 

-

256 "Note: Mapping compression algorithm to none for compatibility with dpkg-deb (due to -Zgzip -z0)" 

-

257 ) 

-

258 setattr(parsed_args, "compression_algorithm", "none") 

-

259 

-

260 compression = COMPRESSIONS[parsed_args.compression_algorithm] 

-

261 strategy = parsed_args.compression_strategy 

-

262 if strategy is not None and strategy not in compression.allowed_strategies: 

-

263 _error( 

-

264 f'Compression algorithm "{parsed_args.compression_algorithm}" does not support compression strategy' 

-

265 f' "{strategy}". Allowed values: {", ".join(sorted(compression.allowed_strategies))}' 

-

266 ) 

-

267 return parsed_args 

-

268 

-

269 

-

270def parse_args() -> argparse.Namespace: 

-

271 try: 

-

272 compression_level_default = int(os.environ["DPKG_DEB_COMPRESSOR_LEVEL"]) 

-

273 except (KeyError, ValueError): 

-

274 compression_level_default = None 

-

275 

-

276 try: 

-

277 compression_type = os.environ["DPKG_DEB_COMPRESSOR_TYPE"] 

-

278 except (KeyError, ValueError): 

-

279 compression_type = "xz" 

-

280 

-

281 try: 

-

282 threads_max = int(os.environ["DPKG_DEB_THREADS_MAX"]) 

-

283 except (KeyError, ValueError): 

-

284 threads_max = None 

-

285 

-

286 description = textwrap.dedent( 

-

287 """\ 

-

288 THIS IS A PROTOTYPE "dpkg-deb -b" emulator with basic manifest support 

-

289 

-

290 DO NOT USE THIS TOOL DIRECTLY. It has not stability guarantees and will be removed as 

-

291 soon as "dpkg-deb -b" grows support for the relevant features. 

-

292 

-

293 This tool is a prototype "dpkg-deb -b"-like interface for compiling a Debian package 

-

294 without requiring root even for static ownership. It is a temporary stand-in for 

-

295 "dpkg-deb -b" until "dpkg-deb -b" will get support for a manifest. 

-

296 

-

297 The tool operates on an internal JSON based manifest for now, because it was faster 

-

298 than building an mtree parser (which is the format that dpkg will likely end up 

-

299 using). 

-

300 

-

301 As the tool is not meant to be used directly, it is full of annoying paper cuts that 

-

302 I refuse to fix or maintain. Use the high level tool instead. 

-

303 

-

304 """ 

-

305 ) 

-

306 

-

307 parser = ColorizedArgumentParser( 

-

308 description=description, 

-

309 formatter_class=argparse.RawDescriptionHelpFormatter, 

-

310 allow_abbrev=False, 

-

311 prog=program_name(), 

-

312 ) 

-

313 parser.add_argument("--version", action="version", version=__version__) 

-

314 parser.add_argument( 

-

315 "package_root_dir", 

-

316 metavar="PACKAGE_ROOT_DIR", 

-

317 help="Root directory of the package. Must contain a DEBIAN directory", 

-

318 ) 

-

319 parser.add_argument( 

-

320 "package_output_path", 

-

321 metavar="PATH", 

-

322 help="Path where the package should be placed. If it is directory," 

-

323 " the base name will be determined from the package metadata", 

-

324 ) 

-

325 

-

326 parser.add_argument( 

-

327 "--intermediate-package-manifest", 

-

328 dest="package_manifest", 

-

329 metavar="JSON_FILE", 

-

330 action="store", 

-

331 default=None, 

-

332 help="INTERMEDIATE package manifest (JSON!)", 

-

333 ) 

-

334 parser.add_argument( 

-

335 "--root-owner-group", 

-

336 dest="root_owner_group", 

-

337 action="store_true", 

-

338 help="Ignored. Accepted for compatibility with dpkg-deb -b", 

-

339 ) 

-

340 parser.add_argument( 

-

341 "-b", 

-

342 "--build", 

-

343 dest="build_param", 

-

344 action="store_true", 

-

345 help="Ignored. Accepted for compatibility with dpkg-deb", 

-

346 ) 

-

347 parser.add_argument( 

-

348 "--source-date-epoch", 

-

349 dest="source_date_epoch", 

-

350 action="store", 

-

351 type=int, 

-

352 default=None, 

-

353 help="Source date epoch (can also be given via the SOURCE_DATE_EPOCH environ variable", 

-

354 ) 

-

355 parser.add_argument( 

-

356 "-Z", 

-

357 dest="compression_algorithm", 

-

358 choices=COMPRESSIONS, 

-

359 default=compression_type, 

-

360 help="The compression algorithm to be used", 

-

361 ) 

-

362 parser.add_argument( 

-

363 "-z", 

-

364 dest="compression_level", 

-

365 metavar="{0-9}", 

-

366 choices=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], 

-

367 default=compression_level_default, 

-

368 type=int, 

-

369 help="The compression level to be used", 

-

370 ) 

-

371 parser.add_argument( 

-

372 "-S", 

-

373 dest="compression_strategy", 

-

374 # We have a different default for xz when strategy is unset and we are building a udeb 

-

375 action="store", 

-

376 default=None, 

-

377 help="The compression algorithm to be used. Concrete values depend on the compression" 

-

378 ' algorithm, but the value "none" is always allowed', 

-

379 ) 

-

380 parser.add_argument( 

-

381 "--uniform-compression", 

-

382 dest="uniform_compression", 

-

383 action="store_true", 

-

384 default=True, 

-

385 help="Whether to use the same compression for the control.tar and the data.tar." 

-

386 " The default is to use uniform compression.", 

-

387 ) 

-

388 parser.add_argument( 

-

389 "--no-uniform-compression", 

-

390 dest="uniform_compression", 

-

391 action="store_false", 

-

392 default=True, 

-

393 help="Disable uniform compression (see --uniform-compression)", 

-

394 ) 

-

395 parser.add_argument( 

-

396 "--threads-max", 

-

397 dest="threads_max", 

-

398 default=threads_max, 

-

399 # TODO: Support this properly 

-

400 type=int, 

-

401 help="Ignored; accepted for compatibility", 

-

402 ) 

-

403 parser.add_argument( 

-

404 "-d", 

-

405 "--debug", 

-

406 dest="debug_mode", 

-

407 action="store_true", 

-

408 default=False, 

-

409 help="Enable debug logging and raw stack traces on errors", 

-

410 ) 

-

411 

-

412 parsed_args = parser.parse_args() 

-

413 parsed_args = _normalize_compression_args(parsed_args) 

-

414 

-

415 return parsed_args 

-

416 

-

417 

-

418def _ctrl_member( 

-

419 member_path: str, 

-

420 fs_path: Optional[str] = None, 

-

421 path_type: PathType = PathType.FILE, 

-

422 mode: int = 0o644, 

-

423 mtime: int = 0, 

-

424) -> TarMember: 

-

425 if fs_path is None: 425 ↛ 426line 425 didn't jump to line 426, because the condition on line 425 was never true

-

426 assert member_path.startswith("./") 

-

427 fs_path = "DEBIAN" + member_path[1:] 

-

428 return TarMember( 

-

429 member_path=member_path, 

-

430 path_type=path_type, 

-

431 fs_path=fs_path, 

-

432 mode=mode, 

-

433 owner="root", 

-

434 uid=0, 

-

435 group="root", 

-

436 gid=0, 

-

437 mtime=mtime, 

-

438 ) 

-

439 

-

440 

-

441CTRL_MEMBER_SCRIPTS = { 

-

442 "postinst", 

-

443 "preinst", 

-

444 "postrm", 

-

445 "prerm", 

-

446 "config", 

-

447 "isinstallable", 

-

448} 

-

449 

-

450 

-

451def _ctrl_tar_members(package_root_dir: str, mtime: int) -> Iterable[TarMember]: 

-

452 debian_root = os.path.join(package_root_dir, "DEBIAN") 

-

453 dir_st = os.stat(debian_root) 

-

454 dir_mtime = int(dir_st.st_mtime) 

-

455 yield _ctrl_member( 

-

456 "./", 

-

457 debian_root, 

-

458 path_type=PathType.DIRECTORY, 

-

459 mode=0o0755, 

-

460 mtime=min(mtime, dir_mtime), 

-

461 ) 

-

462 with os.scandir(debian_root) as dir_iter: 

-

463 for ctrl_member in sorted(dir_iter, key=operator.attrgetter("name")): 

-

464 st = os.stat(ctrl_member) 

-

465 if not stat.S_ISREG(st.st_mode): 465 ↛ 466line 465 didn't jump to line 466, because the condition on line 465 was never true

-

466 _error( 

-

467 f"{ctrl_member.path} is not a file and all control.tar members ought to be files!" 

-

468 ) 

-

469 file_mtime = int(st.st_mtime) 

-

470 yield _ctrl_member( 

-

471 f"./{ctrl_member.name}", 

-

472 path_type=PathType.FILE, 

-

473 fs_path=ctrl_member.path, 

-

474 mode=0o0755 if ctrl_member.name in CTRL_MEMBER_SCRIPTS else 0o0644, 

-

475 mtime=min(mtime, file_mtime), 

-

476 ) 

-

477 

-

478 

-

479def parse_manifest(manifest_path: "Optional[str]") -> "List[TarMember]": 

-

480 if manifest_path is None: 480 ↛ 481line 480 didn't jump to line 481, because the condition on line 480 was never true

-

481 _error(f"--intermediate-package-manifest is mandatory for now") 

-

482 return TarMember.parse_intermediate_manifest(manifest_path) 

-

483 

-

484 

-

485def main() -> None: 

-

486 setup_logging() 

-

487 parsed_args = parse_args() 

-

488 root_dir: str = parsed_args.package_root_dir 

-

489 output_path: str = parsed_args.package_output_path 

-

490 mtime = resolve_source_date_epoch(parsed_args.source_date_epoch) 

-

491 

-

492 data_compression: Compression = COMPRESSIONS[parsed_args.compression_algorithm] 

-

493 data_compression_cmd = data_compression.as_cmdline(parsed_args) 

-

494 if parsed_args.uniform_compression: 

-

495 ctrl_compression = data_compression 

-

496 ctrl_compression_cmd = data_compression_cmd 

-

497 else: 

-

498 ctrl_compression = COMPRESSIONS["gzip"] 

-

499 ctrl_compression_cmd = COMPRESSIONS["gzip"].as_cmdline(None) 

-

500 

-

501 if output_path.endswith("/") or os.path.isdir(output_path): 

-

502 deb_file = os.path.join( 

-

503 output_path, 

-

504 compute_output_filename(os.path.join(root_dir, "DEBIAN"), False), 

-

505 ) 

-

506 else: 

-

507 deb_file = output_path 

-

508 

-

509 pack( 

-

510 deb_file, 

-

511 ctrl_compression, 

-

512 data_compression, 

-

513 root_dir, 

-

514 parsed_args.package_manifest, 

-

515 mtime, 

-

516 ctrl_compression_cmd, 

-

517 data_compression_cmd, 

-

518 prefer_raw_exceptions=not parsed_args.debug_mode, 

-

519 ) 

-

520 

-

521 

-

522def pack( 

-

523 deb_file: str, 

-

524 ctrl_compression: Compression, 

-

525 data_compression: Compression, 

-

526 root_dir: str, 

-

527 package_manifest: "Optional[str]", 

-

528 mtime: int, 

-

529 ctrl_compression_cmd: List[str], 

-

530 data_compression_cmd: List[str], 

-

531 prefer_raw_exceptions: bool = False, 

-

532) -> None: 

-

533 data_tar_members = parse_manifest(package_manifest) 

-

534 members = [ 

-

535 ArMember("debian-binary", mtime, fixed_binary=b"2.0\n"), 

-

536 ArMember( 

-

537 ctrl_compression.with_extension("control.tar"), 

-

538 mtime, 

-

539 write_to_impl=generate_tar_file_member( 

-

540 _ctrl_tar_members(root_dir, mtime), 

-

541 ctrl_compression_cmd, 

-

542 ), 

-

543 ), 

-

544 ArMember( 

-

545 data_compression.with_extension("data.tar"), 

-

546 mtime, 

-

547 write_to_impl=generate_tar_file_member( 

-

548 data_tar_members, 

-

549 data_compression_cmd, 

-

550 ), 

-

551 ), 

-

552 ] 

-

553 generate_ar_archive(deb_file, mtime, members, prefer_raw_exceptions) 

-

554 

-

555 

-

556if __name__ == "__main__": 

-

557 main() 

-
- - - diff --git a/coverage-report/d_36a196ce5f578895___init___py.html b/coverage-report/d_36a196ce5f578895___init___py.html deleted file mode 100644 index 08fe16e..0000000 --- a/coverage-report/d_36a196ce5f578895___init___py.html +++ /dev/null @@ -1,99 +0,0 @@ - - - - - Coverage for src/debputy/packaging/__init__.py: 100% - - - - - -
-
-

- Coverage for src/debputy/packaging/__init__.py: - 100% -

- -

- 0 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-
- - - diff --git a/coverage-report/d_36a196ce5f578895_alternatives_py.html b/coverage-report/d_36a196ce5f578895_alternatives_py.html deleted file mode 100644 index 4409759..0000000 --- a/coverage-report/d_36a196ce5f578895_alternatives_py.html +++ /dev/null @@ -1,324 +0,0 @@ - - - - - Coverage for src/debputy/packaging/alternatives.py: 74% - - - - - -
-
-

- Coverage for src/debputy/packaging/alternatives.py: - 74% -

- -

- 75 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import textwrap 

-

2from typing import List, Dict, Tuple, Mapping 

-

3 

-

4from debian.deb822 import Deb822 

-

5 

-

6from debputy.maintscript_snippet import MaintscriptSnippetContainer, MaintscriptSnippet 

-

7from debputy.packager_provided_files import PackagerProvidedFile 

-

8from debputy.packages import BinaryPackage 

-

9from debputy.packaging.makeshlibs import resolve_reserved_provided_file 

-

10from debputy.plugin.api import VirtualPath 

-

11from debputy.util import _error, escape_shell, POSTINST_DEFAULT_CONDITION 

-

12 

-

13# Match debhelper (minus one space in each end, which comes 

-

14# via join). 

-

15LINE_PREFIX = "\\\n " 

-

16 

-

17 

-

18def process_alternatives( 

-

19 binary_package: BinaryPackage, 

-

20 fs_root: VirtualPath, 

-

21 reserved_packager_provided_files: Dict[str, List[PackagerProvidedFile]], 

-

22 maintscript_snippets: Dict[str, MaintscriptSnippetContainer], 

-

23) -> None: 

-

24 if binary_package.is_udeb: 24 ↛ 25line 24 didn't jump to line 25, because the condition on line 24 was never true

-

25 return 

-

26 

-

27 provided_alternatives_file = resolve_reserved_provided_file( 

-

28 "alternatives", 

-

29 reserved_packager_provided_files, 

-

30 ) 

-

31 if provided_alternatives_file is None: 31 ↛ 32line 31 didn't jump to line 32, because the condition on line 31 was never true

-

32 return 

-

33 

-

34 with provided_alternatives_file.open() as fd: 

-

35 alternatives = list(Deb822.iter_paragraphs(fd)) 

-

36 

-

37 for no, alternative in enumerate(alternatives): 

-

38 process_alternative( 

-

39 provided_alternatives_file.fs_path, 

-

40 fs_root, 

-

41 alternative, 

-

42 no, 

-

43 maintscript_snippets, 

-

44 ) 

-

45 

-

46 

-

47def process_alternative( 

-

48 provided_alternatives_fs_path: str, 

-

49 fs_root: VirtualPath, 

-

50 alternative_deb822: Deb822, 

-

51 no: int, 

-

52 maintscript_snippets: Dict[str, MaintscriptSnippetContainer], 

-

53) -> None: 

-

54 name = _mandatory_key( 

-

55 "Name", 

-

56 alternative_deb822, 

-

57 provided_alternatives_fs_path, 

-

58 f"Stanza number {no}", 

-

59 ) 

-

60 error_context = f"Alternative named {name}" 

-

61 link_path = _mandatory_key( 

-

62 "Link", 

-

63 alternative_deb822, 

-

64 provided_alternatives_fs_path, 

-

65 error_context, 

-

66 ) 

-

67 impl_path = _mandatory_key( 

-

68 "Alternative", 

-

69 alternative_deb822, 

-

70 provided_alternatives_fs_path, 

-

71 error_context, 

-

72 ) 

-

73 priority = _mandatory_key( 

-

74 "Priority", 

-

75 alternative_deb822, 

-

76 provided_alternatives_fs_path, 

-

77 error_context, 

-

78 ) 

-

79 if "/" in name: 79 ↛ 80line 79 didn't jump to line 80, because the condition on line 79 was never true

-

80 _error( 

-

81 f'The "Name" ({link_path}) key must be a basename and cannot contain slashes' 

-

82 f" ({error_context} in {provided_alternatives_fs_path})" 

-

83 ) 

-

84 if link_path == impl_path: 84 ↛ 85line 84 didn't jump to line 85, because the condition on line 84 was never true

-

85 _error( 

-

86 f'The "Link" key and the "Alternative" key must not have the same value' 

-

87 f" ({error_context} in {provided_alternatives_fs_path})" 

-

88 ) 

-

89 impl = fs_root.lookup(impl_path) 

-

90 if impl is None or impl.is_dir: 90 ↛ 91line 90 didn't jump to line 91, because the condition on line 90 was never true

-

91 _error( 

-

92 f'The path listed in "Alternative" ("{impl_path}") does not exist' 

-

93 f" in the package. ({error_context} in {provided_alternatives_fs_path})" 

-

94 ) 

-

95 for key in ["Slave", "Slaves", "Slave-Links"]: 

-

96 if key in alternative_deb822: 96 ↛ 97line 96 didn't jump to line 97, because the condition on line 96 was never true

-

97 _error( 

-

98 f'Please use "Dependents" instead of "{key}".' 

-

99 f" ({error_context} in {provided_alternatives_fs_path})" 

-

100 ) 

-

101 dependents = alternative_deb822.get("Dependents") 

-

102 install_command = [ 

-

103 escape_shell( 

-

104 "update-alternatives", 

-

105 "--install", 

-

106 link_path, 

-

107 name, 

-

108 impl_path, 

-

109 priority, 

-

110 ) 

-

111 ] 

-

112 remove_command = [ 

-

113 escape_shell( 

-

114 "update-alternatives", 

-

115 "--remove", 

-

116 name, 

-

117 impl_path, 

-

118 ) 

-

119 ] 

-

120 if dependents: 120 ↛ 153line 120 didn't jump to line 153, because the condition on line 120 was never false

-

121 seen_link_path = set() 

-

122 for line in dependents.splitlines(): 

-

123 line = line.strip() 

-

124 if not line: # First line is usually empty 

-

125 continue 

-

126 dlink_path, dlink_name, dimpl_path = parse_dependent_link( 

-

127 line, 

-

128 error_context, 

-

129 provided_alternatives_fs_path, 

-

130 ) 

-

131 if dlink_path in seen_link_path: 131 ↛ 132line 131 didn't jump to line 132, because the condition on line 131 was never true

-

132 _error( 

-

133 f'The Dependent link path "{dlink_path}" was used twice.' 

-

134 f" ({error_context} in {provided_alternatives_fs_path})" 

-

135 ) 

-

136 dimpl = fs_root.lookup(dimpl_path) 

-

137 if dimpl is None or dimpl.is_dir: 137 ↛ 138line 137 didn't jump to line 138, because the condition on line 137 was never true

-

138 _error( 

-

139 f'The path listed in "Dependents" ("{dimpl_path}") does not exist' 

-

140 f" in the package. ({error_context} in {provided_alternatives_fs_path})" 

-

141 ) 

-

142 seen_link_path.add(dlink_path) 

-

143 install_command.append(LINE_PREFIX) 

-

144 install_command.append( 

-

145 escape_shell( 

-

146 # update-alternatives still uses this old option name :-/ 

-

147 "--slave", 

-

148 dlink_path, 

-

149 dlink_name, 

-

150 dimpl_path, 

-

151 ) 

-

152 ) 

-

153 postinst = textwrap.dedent( 

-

154 """\ 

-

155 if {CONDITION}; then 

-

156 {COMMAND} 

-

157 fi 

-

158 """ 

-

159 ).format( 

-

160 CONDITION=POSTINST_DEFAULT_CONDITION, 

-

161 COMMAND=" ".join(install_command), 

-

162 ) 

-

163 

-

164 prerm = textwrap.dedent( 

-

165 """\ 

-

166 if [ "$1" = "remove" ]; then 

-

167 {COMMAND} 

-

168 fi 

-

169 """ 

-

170 ).format(COMMAND=" ".join(remove_command)) 

-

171 maintscript_snippets["postinst"].append( 

-

172 MaintscriptSnippet( 

-

173 f"debputy (via {provided_alternatives_fs_path})", 

-

174 snippet=postinst, 

-

175 ) 

-

176 ) 

-

177 maintscript_snippets["prerm"].append( 

-

178 MaintscriptSnippet( 

-

179 f"debputy (via {provided_alternatives_fs_path})", 

-

180 snippet=prerm, 

-

181 ) 

-

182 ) 

-

183 

-

184 

-

185def parse_dependent_link( 

-

186 line: str, 

-

187 error_context: str, 

-

188 provided_alternatives_file: str, 

-

189) -> Tuple[str, str, str]: 

-

190 parts = line.split() 

-

191 if len(parts) != 3: 191 ↛ 192line 191 didn't jump to line 192, because the condition on line 191 was never true

-

192 if len(parts) > 1: 

-

193 pass 

-

194 _error( 

-

195 f"The each line in Dependents links must have exactly 3 space separated parts." 

-

196 f' The "{line}" split into {len(parts)} part(s).' 

-

197 f" ({error_context} in {provided_alternatives_file})" 

-

198 ) 

-

199 

-

200 dlink_path, dlink_name, dimpl_path = parts 

-

201 if "/" in dlink_name: 201 ↛ 202line 201 didn't jump to line 202, because the condition on line 201 was never true

-

202 _error( 

-

203 f'The Dependent link name "{dlink_path}" must be a basename and cannot contain slashes' 

-

204 f" ({error_context} in {provided_alternatives_file})" 

-

205 ) 

-

206 if dlink_path == dimpl_path: 206 ↛ 207line 206 didn't jump to line 207, because the condition on line 206 was never true

-

207 _error( 

-

208 f'The Dependent Link path and Alternative must not have the same value ["{dlink_path}"]' 

-

209 f" ({error_context} in {provided_alternatives_file})" 

-

210 ) 

-

211 return dlink_path, dlink_name, dimpl_path 

-

212 

-

213 

-

214def _mandatory_key( 

-

215 key: str, 

-

216 alternative_deb822: Mapping[str, str], 

-

217 provided_alternatives_file: str, 

-

218 error_context: str, 

-

219) -> str: 

-

220 try: 

-

221 return alternative_deb822[key] 

-

222 except KeyError: 

-

223 _error( 

-

224 f'Missing mandatory key "{key}" in {provided_alternatives_file} ({error_context})' 

-

225 ) 

-
- - - diff --git a/coverage-report/d_36a196ce5f578895_debconf_templates_py.html b/coverage-report/d_36a196ce5f578895_debconf_templates_py.html deleted file mode 100644 index 9f17baa..0000000 --- a/coverage-report/d_36a196ce5f578895_debconf_templates_py.html +++ /dev/null @@ -1,176 +0,0 @@ - - - - - Coverage for src/debputy/packaging/debconf_templates.py: 32% - - - - - -
-
-

- Coverage for src/debputy/packaging/debconf_templates.py: - 32% -

- -

- 32 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import os.path 

-

2import shutil 

-

3import subprocess 

-

4import textwrap 

-

5from typing import List, Dict 

-

6 

-

7from debputy.maintscript_snippet import MaintscriptSnippetContainer, MaintscriptSnippet 

-

8from debputy.packager_provided_files import PackagerProvidedFile 

-

9from debputy.packages import BinaryPackage 

-

10from debputy.packaging.makeshlibs import resolve_reserved_provided_file 

-

11from debputy.plugin.api.spec import FlushableSubstvars 

-

12from debputy.util import _error, escape_shell 

-

13 

-

14# Match debhelper (minus one space in each end, which comes 

-

15# via join). 

-

16LINE_PREFIX = "\\\n " 

-

17 

-

18 

-

19def process_debconf_templates( 

-

20 binary_package: BinaryPackage, 

-

21 reserved_packager_provided_files: Dict[str, List[PackagerProvidedFile]], 

-

22 maintscript_snippets: Dict[str, MaintscriptSnippetContainer], 

-

23 substvars: FlushableSubstvars, 

-

24 control_output_dir: str, 

-

25) -> None: 

-

26 provided_templates_file = resolve_reserved_provided_file( 

-

27 "templates", 

-

28 reserved_packager_provided_files, 

-

29 ) 

-

30 if provided_templates_file is None: 

-

31 return 

-

32 

-

33 templates_file = os.path.join(control_output_dir, "templates") 

-

34 debian_dir = provided_templates_file.parent_dir 

-

35 po_template_dir = debian_dir.get("po") if debian_dir is not None else None 

-

36 if po_template_dir is not None and po_template_dir.is_dir: 

-

37 with open(templates_file, "wb") as fd: 

-

38 cmd = [ 

-

39 "po2debconf", 

-

40 provided_templates_file.fs_path, 

-

41 ] 

-

42 print(f" {escape_shell(*cmd)} > {templates_file}") 

-

43 try: 

-

44 subprocess.check_call( 

-

45 cmd, 

-

46 stdout=fd.fileno(), 

-

47 ) 

-

48 except subprocess.CalledProcessError: 

-

49 _error( 

-

50 f"Failed to generate the templates files for {binary_package.name}. Please review " 

-

51 f" the output of {escape_shell('po-debconf', provided_templates_file.fs_path)}" 

-

52 " to understand the issue." 

-

53 ) 

-

54 else: 

-

55 shutil.copyfile(provided_templates_file.fs_path, templates_file) 

-

56 

-

57 dependency = ( 

-

58 "cdebconf-udeb" if binary_package.is_udeb else "debconf (>= 0.5) | debconf-2.0" 

-

59 ) 

-

60 substvars.add_dependency("misc:Depends", dependency) 

-

61 if not binary_package.is_udeb: 

-

62 # udebs do not have `postrm` scripts 

-

63 maintscript_snippets["postrm"].append( 

-

64 MaintscriptSnippet( 

-

65 f"debputy (due to {provided_templates_file.fs_path})", 

-

66 # FIXME: `debconf` sourcing should be an overarching feature 

-

67 snippet=textwrap.dedent( 

-

68 """\ 

-

69 if [ "$1" = purge ] && [ -e /usr/share/debconf/confmodule ]; then 

-

70 . /usr/share/debconf/confmodule 

-

71 db_purge 

-

72 db_stop 

-

73 fi 

-

74 """ 

-

75 ), 

-

76 ) 

-

77 ) 

-
- - - diff --git a/coverage-report/d_36a196ce5f578895_makeshlibs_py.html b/coverage-report/d_36a196ce5f578895_makeshlibs_py.html deleted file mode 100644 index 1411d28..0000000 --- a/coverage-report/d_36a196ce5f578895_makeshlibs_py.html +++ /dev/null @@ -1,413 +0,0 @@ - - - - - Coverage for src/debputy/packaging/makeshlibs.py: 18% - - - - - -
-
-

- Coverage for src/debputy/packaging/makeshlibs.py: - 18% -

- -

- 182 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import collections 

-

2import dataclasses 

-

3import os 

-

4import re 

-

5import shutil 

-

6import stat 

-

7import subprocess 

-

8import tempfile 

-

9from contextlib import suppress 

-

10from typing import Optional, Set, List, Tuple, TYPE_CHECKING, Dict, IO 

-

11 

-

12from debputy import elf_util 

-

13from debputy.elf_util import ELF_LINKING_TYPE_DYNAMIC 

-

14from debputy.exceptions import DebputyDpkgGensymbolsError 

-

15from debputy.packager_provided_files import PackagerProvidedFile 

-

16from debputy.packages import BinaryPackage 

-

17from debputy.plugin.api import VirtualPath, PackageProcessingContext, BinaryCtrlAccessor 

-

18from debputy.util import ( 

-

19 print_command, 

-

20 escape_shell, 

-

21 assume_not_none, 

-

22 _normalize_link_target, 

-

23 _warn, 

-

24 _error, 

-

25) 

-

26 

-

27if TYPE_CHECKING: 

-

28 from debputy.highlevel_manifest import HighLevelManifest 

-

29 

-

30 

-

31HAS_SONAME = re.compile(r"\s+SONAME\s+(\S+)") 

-

32SHLIBS_LINE_READER = re.compile(r"^(?:(\S*):)?\s*(\S+)\s*(\S+)\s*(\S.+)$") 

-

33SONAME_FORMATS = [ 

-

34 re.compile(r"\s+SONAME\s+((.*)[.]so[.](.*))"), 

-

35 re.compile(r"\s+SONAME\s+((.*)-(\d.*)[.]so)"), 

-

36] 

-

37 

-

38 

-

39@dataclasses.dataclass 

-

40class SONAMEInfo: 

-

41 path: VirtualPath 

-

42 full_soname: str 

-

43 library: str 

-

44 major_version: Optional[str] 

-

45 

-

46 

-

47class ShlibsContent: 

-

48 def __init__(self) -> None: 

-

49 self._deb_lines: List[str] = [] 

-

50 self._udeb_lines: List[str] = [] 

-

51 self._seen: Set[Tuple[str, str, str]] = set() 

-

52 

-

53 def add_library( 

-

54 self, 

-

55 library: str, 

-

56 major_version: str, 

-

57 dependency: str, 

-

58 *, 

-

59 udeb_dependency: Optional[str] = None, 

-

60 ) -> None: 

-

61 line = f"{library} {major_version} {dependency}\n" 

-

62 seen_key = ("deb", library, major_version) 

-

63 if seen_key not in self._seen: 

-

64 self._deb_lines.append(line) 

-

65 self._seen.add(seen_key) 

-

66 if udeb_dependency is not None: 

-

67 seen_key = ("udeb", library, major_version) 

-

68 udeb_line = f"udeb: {library} {major_version} {udeb_dependency}\n" 

-

69 if seen_key not in self._seen: 

-

70 self._udeb_lines.append(udeb_line) 

-

71 self._seen.add(seen_key) 

-

72 

-

73 def __bool__(self) -> bool: 

-

74 return bool(self._deb_lines) or bool(self._udeb_lines) 

-

75 

-

76 def add_entries_from_shlibs_file(self, fd: IO[str]) -> None: 

-

77 for line in fd: 

-

78 if line.startswith("#") or line.isspace(): 

-

79 continue 

-

80 m = SHLIBS_LINE_READER.match(line) 

-

81 if not m: 

-

82 continue 

-

83 shtype, library, major_version, dependency = m.groups() 

-

84 if shtype is None or shtype == "": 

-

85 shtype = "deb" 

-

86 seen_key = (shtype, library, major_version) 

-

87 if seen_key in self._seen: 

-

88 continue 

-

89 self._seen.add(seen_key) 

-

90 if shtype == "udeb": 

-

91 self._udeb_lines.append(line) 

-

92 else: 

-

93 self._deb_lines.append(line) 

-

94 

-

95 def write_to(self, fd: IO[str]) -> None: 

-

96 fd.writelines(self._deb_lines) 

-

97 fd.writelines(self._udeb_lines) 

-

98 

-

99 

-

100def extract_so_name( 

-

101 binary_package: BinaryPackage, 

-

102 path: VirtualPath, 

-

103) -> Optional[SONAMEInfo]: 

-

104 objdump = binary_package.cross_command("objdump") 

-

105 output = subprocess.check_output([objdump, "-p", path.fs_path], encoding="utf-8") 

-

106 for r in SONAME_FORMATS: 

-

107 m = r.search(output) 

-

108 if m: 

-

109 full_soname, library, major_version = m.groups() 

-

110 return SONAMEInfo(path, full_soname, library, major_version) 

-

111 m = HAS_SONAME.search(output) 

-

112 if not m: 

-

113 return None 

-

114 full_soname = m.group(1) 

-

115 return SONAMEInfo(path, full_soname, full_soname, None) 

-

116 

-

117 

-

118def extract_soname_info( 

-

119 binary_package: BinaryPackage, 

-

120 fs_root: VirtualPath, 

-

121) -> List[SONAMEInfo]: 

-

122 so_files = elf_util.find_all_elf_files( 

-

123 fs_root, 

-

124 with_linking_type=ELF_LINKING_TYPE_DYNAMIC, 

-

125 ) 

-

126 result = [] 

-

127 for so_file in so_files: 

-

128 soname_info = extract_so_name(binary_package, so_file) 

-

129 if not soname_info: 

-

130 continue 

-

131 result.append(soname_info) 

-

132 return result 

-

133 

-

134 

-

135def _compute_shlibs_content( 

-

136 binary_package: BinaryPackage, 

-

137 manifest: "HighLevelManifest", 

-

138 soname_info_list: List[SONAMEInfo], 

-

139 udeb_package_name: Optional[str], 

-

140 combined_shlibs: ShlibsContent, 

-

141) -> Tuple[ShlibsContent, bool]: 

-

142 shlibs_file_contents = ShlibsContent() 

-

143 unversioned_so_seen = False 

-

144 strict_version = manifest.package_state_for(binary_package.name).binary_version 

-

145 if strict_version is not None: 

-

146 upstream_version = re.sub(r"-[^-]+$", "", strict_version) 

-

147 else: 

-

148 strict_version = manifest.substitution.substitute( 

-

149 "{{DEB_VERSION}}", "<internal-usage>" 

-

150 ) 

-

151 upstream_version = manifest.substitution.substitute( 

-

152 "{{DEB_VERSION_EPOCH_UPSTREAM}}", "<internal-usage>" 

-

153 ) 

-

154 

-

155 dependency = f"{binary_package.name} (>= {upstream_version})" 

-

156 strict_dependency = f"{binary_package.name} (= {strict_version})" 

-

157 udeb_dependency = None 

-

158 

-

159 if udeb_package_name is not None: 

-

160 udeb_dependency = f"{udeb_package_name} (>= {upstream_version})" 

-

161 

-

162 for soname_info in soname_info_list: 

-

163 if soname_info.major_version is None: 

-

164 unversioned_so_seen = True 

-

165 continue 

-

166 shlibs_file_contents.add_library( 

-

167 soname_info.library, 

-

168 soname_info.major_version, 

-

169 dependency, 

-

170 udeb_dependency=udeb_dependency, 

-

171 ) 

-

172 combined_shlibs.add_library( 

-

173 soname_info.library, 

-

174 soname_info.major_version, 

-

175 strict_dependency, 

-

176 udeb_dependency=udeb_dependency, 

-

177 ) 

-

178 

-

179 return shlibs_file_contents, unversioned_so_seen 

-

180 

-

181 

-

182def resolve_reserved_provided_file( 

-

183 basename: str, 

-

184 reserved_packager_provided_files: Dict[str, List[PackagerProvidedFile]], 

-

185) -> Optional[VirtualPath]: 

-

186 matches = reserved_packager_provided_files.get(basename) 

-

187 if matches is None: 187 ↛ 188line 187 didn't jump to line 188, because the condition on line 187 was never true

-

188 return None 

-

189 assert len(matches) < 2 

-

190 if matches: 190 ↛ 192line 190 didn't jump to line 192, because the condition on line 190 was never false

-

191 return matches[0].path 

-

192 return None 

-

193 

-

194 

-

195def generate_shlib_dirs( 

-

196 pkg: BinaryPackage, 

-

197 root_dir: str, 

-

198 soname_info_list: List[SONAMEInfo], 

-

199 materialized_dirs: List[str], 

-

200) -> None: 

-

201 dir_scanned: Dict[str, Dict[str, Set[str]]] = {} 

-

202 dirs: Dict[str, str] = {} 

-

203 

-

204 for soname_info in soname_info_list: 

-

205 elf_binary = soname_info.path 

-

206 p = assume_not_none(elf_binary.parent_dir) 

-

207 matches = dir_scanned.get(p.absolute) 

-

208 materialized_dir = dirs.get(p.absolute) 

-

209 if matches is None: 

-

210 matches = collections.defaultdict(set) 

-

211 for child in p.iterdir: 

-

212 if not child.is_symlink: 

-

213 continue 

-

214 target = _normalize_link_target(child.readlink()) 

-

215 if "/" in target: 

-

216 # The shlib symlinks (we are interested in) are relative to the same folder 

-

217 continue 

-

218 matches[target].add(child.name) 

-

219 dir_scanned[p.absolute] = matches 

-

220 symlinks = matches.get(elf_binary.name) 

-

221 if not symlinks: 

-

222 _warn( 

-

223 f"Could not find any SO symlinks pointing to {elf_binary.absolute} in {pkg.name} !?" 

-

224 ) 

-

225 continue 

-

226 if materialized_dir is None: 

-

227 materialized_dir = tempfile.mkdtemp(prefix=f"{pkg.name}_", dir=root_dir) 

-

228 materialized_dirs.append(materialized_dir) 

-

229 dirs[p.absolute] = materialized_dir 

-

230 

-

231 os.symlink(elf_binary.fs_path, os.path.join(materialized_dir, elf_binary.name)) 

-

232 for link in symlinks: 

-

233 os.symlink(elf_binary.name, os.path.join(materialized_dir, link)) 

-

234 

-

235 

-

236def compute_shlibs( 

-

237 binary_package: BinaryPackage, 

-

238 control_output_dir: str, 

-

239 fs_root: VirtualPath, 

-

240 manifest: "HighLevelManifest", 

-

241 udeb_package_name: Optional[str], 

-

242 ctrl: BinaryCtrlAccessor, 

-

243 reserved_packager_provided_files: Dict[str, List[PackagerProvidedFile]], 

-

244 combined_shlibs: ShlibsContent, 

-

245) -> List[SONAMEInfo]: 

-

246 assert not binary_package.is_udeb 

-

247 shlibs_file = os.path.join(control_output_dir, "shlibs") 

-

248 need_ldconfig = False 

-

249 so_files = elf_util.find_all_elf_files( 

-

250 fs_root, 

-

251 with_linking_type=ELF_LINKING_TYPE_DYNAMIC, 

-

252 ) 

-

253 sonames = extract_soname_info(binary_package, fs_root) 

-

254 provided_shlibs_file = resolve_reserved_provided_file( 

-

255 "shlibs", 

-

256 reserved_packager_provided_files, 

-

257 ) 

-

258 symbols_template_file = resolve_reserved_provided_file( 

-

259 "symbols", 

-

260 reserved_packager_provided_files, 

-

261 ) 

-

262 

-

263 if provided_shlibs_file: 

-

264 need_ldconfig = True 

-

265 unversioned_so_seen = False 

-

266 shutil.copyfile(provided_shlibs_file.fs_path, shlibs_file) 

-

267 with open(shlibs_file) as fd: 

-

268 combined_shlibs.add_entries_from_shlibs_file(fd) 

-

269 else: 

-

270 shlibs_file_contents, unversioned_so_seen = _compute_shlibs_content( 

-

271 binary_package, 

-

272 manifest, 

-

273 sonames, 

-

274 udeb_package_name, 

-

275 combined_shlibs, 

-

276 ) 

-

277 

-

278 if shlibs_file_contents: 

-

279 need_ldconfig = True 

-

280 with open(shlibs_file, "wt", encoding="utf-8") as fd: 

-

281 shlibs_file_contents.write_to(fd) 

-

282 

-

283 if symbols_template_file: 

-

284 symbols_file = os.path.join(control_output_dir, "symbols") 

-

285 symbols_cmd = [ 

-

286 "dpkg-gensymbols", 

-

287 f"-p{binary_package.name}", 

-

288 f"-I{symbols_template_file.fs_path}", 

-

289 f"-P{control_output_dir}", 

-

290 f"-O{symbols_file}", 

-

291 ] 

-

292 

-

293 if so_files: 

-

294 symbols_cmd.extend(f"-e{x.fs_path}" for x in so_files) 

-

295 print_command(*symbols_cmd) 

-

296 try: 

-

297 subprocess.check_call(symbols_cmd) 

-

298 except subprocess.CalledProcessError as e: 

-

299 # Wrap in a special error, so debputy can run the other packages. 

-

300 # The kde symbols helper relies on this behaviour 

-

301 raise DebputyDpkgGensymbolsError( 

-

302 f"Error while running command for {binary_package.name}: {escape_shell(*symbols_cmd)}" 

-

303 ) from e 

-

304 

-

305 with suppress(FileNotFoundError): 

-

306 st = os.stat(symbols_file) 

-

307 if stat.S_ISREG(st.st_mode) and st.st_size == 0: 

-

308 os.unlink(symbols_file) 

-

309 elif unversioned_so_seen: 

-

310 need_ldconfig = True 

-

311 

-

312 if need_ldconfig: 

-

313 ctrl.dpkg_trigger("activate-noawait", "ldconfig") 

-

314 return sonames 

-
- - - diff --git a/coverage-report/d_4b9be07fb6071cd2___init___py.html b/coverage-report/d_4b9be07fb6071cd2___init___py.html deleted file mode 100644 index e69e477..0000000 --- a/coverage-report/d_4b9be07fb6071cd2___init___py.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - Coverage for src/debputy/plugin/api/test_api/__init__.py: 100% - - - - - -
-
-

- Coverage for src/debputy/plugin/api/test_api/__init__.py: - 100% -

- -

- 3 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from debputy.plugin.api.test_api.test_impl import ( 

-

2 package_metadata_context, 

-

3 initialize_plugin_under_test, 

-

4 manifest_variable_resolution_context, 

-

5) 

-

6from debputy.plugin.api.test_api.test_spec import ( 

-

7 RegisteredPackagerProvidedFile, 

-

8 build_virtual_file_system, 

-

9 InitializedPluginUnderTest, 

-

10 DEBPUTY_TEST_AGAINST_INSTALLED_PLUGINS, 

-

11) 

-

12 

-

13__all__ = [ 

-

14 "initialize_plugin_under_test", 

-

15 "RegisteredPackagerProvidedFile", 

-

16 "build_virtual_file_system", 

-

17 "InitializedPluginUnderTest", 

-

18 "package_metadata_context", 

-

19 "manifest_variable_resolution_context", 

-

20 "DEBPUTY_TEST_AGAINST_INSTALLED_PLUGINS", 

-

21] 

-
- - - diff --git a/coverage-report/d_4b9be07fb6071cd2_test_impl_py.html b/coverage-report/d_4b9be07fb6071cd2_test_impl_py.html deleted file mode 100644 index 3e1b2fc..0000000 --- a/coverage-report/d_4b9be07fb6071cd2_test_impl_py.html +++ /dev/null @@ -1,902 +0,0 @@ - - - - - Coverage for src/debputy/plugin/api/test_api/test_impl.py: 82% - - - - - -
-
-

- Coverage for src/debputy/plugin/api/test_api/test_impl.py: - 82% -

- -

- 296 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import contextlib 

-

2import dataclasses 

-

3import inspect 

-

4import os.path 

-

5from io import BytesIO 

-

6from typing import ( 

-

7 Mapping, 

-

8 Dict, 

-

9 Optional, 

-

10 Tuple, 

-

11 List, 

-

12 cast, 

-

13 FrozenSet, 

-

14 Sequence, 

-

15 Union, 

-

16 Type, 

-

17 Iterator, 

-

18 Set, 

-

19 KeysView, 

-

20 Callable, 

-

21) 

-

22 

-

23from debian.deb822 import Deb822 

-

24from debian.substvars import Substvars 

-

25 

-

26from debputy import DEBPUTY_PLUGIN_ROOT_DIR 

-

27from debputy.architecture_support import faked_arch_table 

-

28from debputy.filesystem_scan import FSROOverlay, FSRootDir 

-

29from debputy.packages import BinaryPackage 

-

30from debputy.plugin.api import ( 

-

31 PluginInitializationEntryPoint, 

-

32 VirtualPath, 

-

33 PackageProcessingContext, 

-

34 DpkgTriggerType, 

-

35 Maintscript, 

-

36) 

-

37from debputy.plugin.api.example_processing import process_discard_rule_example 

-

38from debputy.plugin.api.impl import ( 

-

39 plugin_metadata_for_debputys_own_plugin, 

-

40 DebputyPluginInitializerProvider, 

-

41 parse_json_plugin_desc, 

-

42 MaintscriptAccessorProviderBase, 

-

43 BinaryCtrlAccessorProviderBase, 

-

44 PLUGIN_TEST_SUFFIX, 

-

45 find_json_plugin, 

-

46 ServiceDefinitionImpl, 

-

47) 

-

48from debputy.plugin.api.impl_types import ( 

-

49 PackagerProvidedFileClassSpec, 

-

50 DebputyPluginMetadata, 

-

51 PluginProvidedTrigger, 

-

52 ServiceManagerDetails, 

-

53) 

-

54from debputy.plugin.api.feature_set import PluginProvidedFeatureSet 

-

55from debputy.plugin.api.spec import ( 

-

56 MaintscriptAccessor, 

-

57 FlushableSubstvars, 

-

58 ServiceRegistry, 

-

59 DSD, 

-

60 ServiceUpgradeRule, 

-

61) 

-

62from debputy.plugin.api.test_api.test_spec import ( 

-

63 InitializedPluginUnderTest, 

-

64 RegisteredPackagerProvidedFile, 

-

65 RegisteredTrigger, 

-

66 RegisteredMaintscript, 

-

67 DEBPUTY_TEST_AGAINST_INSTALLED_PLUGINS, 

-

68 ADRExampleIssue, 

-

69 DetectedService, 

-

70 RegisteredMetadata, 

-

71) 

-

72from debputy.plugin.debputy.debputy_plugin import initialize_debputy_features 

-

73from debputy.substitution import SubstitutionImpl, VariableContext, Substitution 

-

74from debputy.util import package_cross_check_precheck 

-

75 

-

76RegisteredPackagerProvidedFile.register(PackagerProvidedFileClassSpec) 

-

77 

-

78 

-

79@dataclasses.dataclass(frozen=True, slots=True) 

-

80class PackageProcessingContextTestProvider(PackageProcessingContext): 

-

81 binary_package: BinaryPackage 

-

82 binary_package_version: str 

-

83 related_udeb_package: Optional[BinaryPackage] 

-

84 related_udeb_package_version: Optional[str] 

-

85 accessible_package_roots: Callable[[], Sequence[Tuple[BinaryPackage, VirtualPath]]] 

-

86 

-

87 

-

88def _initialize_plugin_under_test( 

-

89 plugin_metadata: DebputyPluginMetadata, 

-

90 load_debputy_plugin: bool = True, 

-

91) -> "InitializedPluginUnderTest": 

-

92 feature_set = PluginProvidedFeatureSet() 

-

93 substitution = SubstitutionImpl( 

-

94 unresolvable_substitutions=frozenset(["SOURCE_DATE_EPOCH", "PACKAGE"]), 

-

95 variable_context=VariableContext( 

-

96 FSROOverlay.create_root_dir("debian", "debian"), 

-

97 ), 

-

98 plugin_feature_set=feature_set, 

-

99 ) 

-

100 

-

101 if load_debputy_plugin: 

-

102 debputy_plugin_metadata = plugin_metadata_for_debputys_own_plugin( 

-

103 initialize_debputy_features 

-

104 ) 

-

105 # Load debputy's own plugin first, so conflicts with debputy's plugin are detected early 

-

106 debputy_provider = DebputyPluginInitializerProvider( 

-

107 debputy_plugin_metadata, 

-

108 feature_set, 

-

109 substitution, 

-

110 ) 

-

111 debputy_provider.load_plugin() 

-

112 

-

113 plugin_under_test_provider = DebputyPluginInitializerProvider( 

-

114 plugin_metadata, 

-

115 feature_set, 

-

116 substitution, 

-

117 ) 

-

118 plugin_under_test_provider.load_plugin() 

-

119 

-

120 return InitializedPluginUnderTestImpl( 

-

121 plugin_metadata.plugin_name, 

-

122 feature_set, 

-

123 substitution, 

-

124 ) 

-

125 

-

126 

-

127def _auto_load_plugin_from_filename( 

-

128 py_test_filename: str, 

-

129) -> "InitializedPluginUnderTest": 

-

130 dirname, basename = os.path.split(py_test_filename) 

-

131 plugin_name = PLUGIN_TEST_SUFFIX.sub("", basename).replace("_", "-") 

-

132 

-

133 test_location = os.environ.get("DEBPUTY_TEST_PLUGIN_LOCATION", "uninstalled") 

-

134 if test_location == "uninstalled": 

-

135 json_basename = f"{plugin_name}.json" 

-

136 json_desc_file = os.path.join(dirname, json_basename) 

-

137 if "/" not in json_desc_file: 137 ↛ 138line 137 didn't jump to line 138, because the condition on line 137 was never true

-

138 json_desc_file = f"./{json_desc_file}" 

-

139 

-

140 if os.path.isfile(json_desc_file): 140 ↛ 143line 140 didn't jump to line 143, because the condition on line 140 was never false

-

141 return _initialize_plugin_from_desc(json_desc_file) 

-

142 

-

143 json_desc_file_in = f"{json_desc_file}.in" 

-

144 if os.path.isfile(json_desc_file_in): 

-

145 return _initialize_plugin_from_desc(json_desc_file) 

-

146 raise FileNotFoundError( 

-

147 f"Cannot determine the plugin JSON metadata descriptor: Expected it to be" 

-

148 f" {json_desc_file} or {json_desc_file_in}" 

-

149 ) 

-

150 

-

151 if test_location == "installed": 151 ↛ 155line 151 didn't jump to line 155, because the condition on line 151 was never false

-

152 plugin_metadata = find_json_plugin([str(DEBPUTY_PLUGIN_ROOT_DIR)], plugin_name) 

-

153 return _initialize_plugin_under_test(plugin_metadata, load_debputy_plugin=True) 

-

154 

-

155 raise ValueError( 

-

156 'Invalid or unsupported "DEBPUTY_TEST_PLUGIN_LOCATION" environment variable. It must be either' 

-

157 ' unset OR one of "installed", "uninstalled".' 

-

158 ) 

-

159 

-

160 

-

161def initialize_plugin_under_test( 

-

162 *, 

-

163 plugin_desc_file: Optional[str] = None, 

-

164) -> "InitializedPluginUnderTest": 

-

165 """Load and initialize a plugin for testing it 

-

166 

-

167 This method will load the plugin via plugin description, which is the method that `debputy` does at 

-

168 run-time (in contrast to `initialize_plugin_under_test_preloaded`, which bypasses this concrete part 

-

169 of the flow). 

-

170 

-

171 :param plugin_desc_file: The plugin description file (`.json`) that describes how to load the plugin. 

-

172 If omitted, `debputy` will attempt to attempt the plugin description file based on the test itself. 

-

173 This works for "single-file" plugins, where the description file and the test are right next to 

-

174 each other. 

-

175 

-

176 Note that the description file is *not* required to a valid version at this stage (e.g., "N/A" or 

-

177 "@PLACEHOLDER@") is fine. So you still use this method if you substitute in the version during 

-

178 build after running the tests. To support this flow, the file name can also end with `.json.in` 

-

179 (instead of `.json`). 

-

180 :return: The loaded plugin for testing 

-

181 """ 

-

182 if plugin_desc_file is None: 

-

183 caller_file = inspect.stack()[1].filename 

-

184 return _auto_load_plugin_from_filename(caller_file) 

-

185 if DEBPUTY_TEST_AGAINST_INSTALLED_PLUGINS: 185 ↛ 186line 185 didn't jump to line 186, because the condition on line 185 was never true

-

186 raise RuntimeError( 

-

187 "Running the test against an installed plugin does not work when" 

-

188 " plugin_desc_file is provided. Please skip this test. You can " 

-

189 " import DEBPUTY_TEST_AGAINST_INSTALLED_PLUGINS and use that as" 

-

190 " conditional for this purpose." 

-

191 ) 

-

192 return _initialize_plugin_from_desc(plugin_desc_file) 

-

193 

-

194 

-

195def _initialize_plugin_from_desc( 

-

196 desc_file: str, 

-

197) -> "InitializedPluginUnderTest": 

-

198 if not desc_file.endswith((".json", ".json.in")): 198 ↛ 199line 198 didn't jump to line 199, because the condition on line 198 was never true

-

199 raise ValueError("The plugin file must end with .json or .json.in") 

-

200 

-

201 plugin_metadata = parse_json_plugin_desc(desc_file) 

-

202 

-

203 return _initialize_plugin_under_test(plugin_metadata, load_debputy_plugin=True) 

-

204 

-

205 

-

206def initialize_plugin_under_test_from_inline_json( 

-

207 plugin_name: str, 

-

208 json_content: str, 

-

209) -> "InitializedPluginUnderTest": 

-

210 with BytesIO(json_content.encode("utf-8")) as fd: 

-

211 plugin_metadata = parse_json_plugin_desc(plugin_name, fd=fd) 

-

212 

-

213 return _initialize_plugin_under_test(plugin_metadata, load_debputy_plugin=True) 

-

214 

-

215 

-

216def initialize_plugin_under_test_preloaded( 

-

217 api_compat_version: int, 

-

218 plugin_initializer: PluginInitializationEntryPoint, 

-

219 /, 

-

220 plugin_name: str = "plugin-under-test", 

-

221 load_debputy_plugin: bool = True, 

-

222) -> "InitializedPluginUnderTest": 

-

223 """Internal API: Initialize a plugin for testing without loading it from a file 

-

224 

-

225 This method by-passes the standard loading mechanism, meaning you will not test that your plugin 

-

226 description file is correct. Notably, any feature provided via the JSON description file will 

-

227 **NOT** be visible for the test. 

-

228 

-

229 This API is mostly useful for testing parts of debputy itself. 

-

230 

-

231 :param api_compat_version: The API version the plugin was written for. Use the same version as the 

-

232 version from the entry point (The `v1` part of `debputy.plugins.v1.initialize` translate into `1`). 

-

233 :param plugin_initializer: The entry point of the plugin 

-

234 :param plugin_name: Normally, debputy would derive this from the entry point. In the test, it will 

-

235 use a test name and version. However, you can explicitly set if you want the real name/version. 

-

236 :param load_debputy_plugin: Whether to load debputy's own plugin first. Doing so provides a more 

-

237 realistic test and enables the test to detect conflicts with debputy's own plugins (de facto making 

-

238 the plugin unloadable in practice if such a conflict is present). This option is mostly provided 

-

239 to enable debputy to use this method for self testing. 

-

240 :return: The loaded plugin for testing 

-

241 """ 

-

242 

-

243 if DEBPUTY_TEST_AGAINST_INSTALLED_PLUGINS: 243 ↛ 244line 243 didn't jump to line 244, because the condition on line 243 was never true

-

244 raise RuntimeError( 

-

245 "Running the test against an installed plugin does not work when" 

-

246 " the plugin is preload. Please skip this test. You can " 

-

247 " import DEBPUTY_TEST_AGAINST_INSTALLED_PLUGINS and use that as" 

-

248 " conditional for this purpose." 

-

249 ) 

-

250 

-

251 plugin_metadata = DebputyPluginMetadata( 

-

252 plugin_name=plugin_name, 

-

253 api_compat_version=api_compat_version, 

-

254 plugin_initializer=plugin_initializer, 

-

255 plugin_loader=None, 

-

256 plugin_path="<loaded-via-test>", 

-

257 ) 

-

258 

-

259 return _initialize_plugin_under_test( 

-

260 plugin_metadata, 

-

261 load_debputy_plugin=load_debputy_plugin, 

-

262 ) 

-

263 

-

264 

-

265class _MockArchTable: 

-

266 @staticmethod 

-

267 def matches_architecture(_a: str, _b: str) -> bool: 

-

268 return True 

-

269 

-

270 

-

271FAKE_DPKG_QUERY_TABLE = cast("DpkgArchTable", _MockArchTable()) 

-

272del _MockArchTable 

-

273 

-

274 

-

275def package_metadata_context( 

-

276 *, 

-

277 host_arch: str = "amd64", 

-

278 package_fields: Optional[Dict[str, str]] = None, 

-

279 related_udeb_package_fields: Optional[Dict[str, str]] = None, 

-

280 binary_package_version: str = "1.0-1", 

-

281 related_udeb_package_version: Optional[str] = None, 

-

282 should_be_acted_on: bool = True, 

-

283 related_udeb_fs_root: Optional[VirtualPath] = None, 

-

284 accessible_package_roots: Sequence[Tuple[Mapping[str, str], VirtualPath]] = tuple(), 

-

285) -> PackageProcessingContext: 

-

286 process_table = faked_arch_table(host_arch) 

-

287 f = { 

-

288 "Package": "foo", 

-

289 "Architecture": "any", 

-

290 } 

-

291 if package_fields is not None: 

-

292 f.update(package_fields) 

-

293 

-

294 bin_package = BinaryPackage( 

-

295 Deb822(f), 

-

296 process_table, 

-

297 FAKE_DPKG_QUERY_TABLE, 

-

298 is_main_package=True, 

-

299 should_be_acted_on=should_be_acted_on, 

-

300 ) 

-

301 udeb_package = None 

-

302 if related_udeb_package_fields is not None: 302 ↛ 303line 302 didn't jump to line 303, because the condition on line 302 was never true

-

303 uf = dict(related_udeb_package_fields) 

-

304 uf.setdefault("Package", f'{f["Package"]}-udeb') 

-

305 uf.setdefault("Architecture", f["Architecture"]) 

-

306 uf.setdefault("Package-Type", "udeb") 

-

307 udeb_package = BinaryPackage( 

-

308 Deb822(uf), 

-

309 process_table, 

-

310 FAKE_DPKG_QUERY_TABLE, 

-

311 is_main_package=False, 

-

312 should_be_acted_on=True, 

-

313 ) 

-

314 if related_udeb_package_version is None: 

-

315 related_udeb_package_version = binary_package_version 

-

316 if accessible_package_roots: 

-

317 apr = [] 

-

318 for fields, apr_fs_root in accessible_package_roots: 

-

319 apr_fields = Deb822(dict(fields)) 

-

320 if "Package" not in apr_fields: 320 ↛ 321line 320 didn't jump to line 321, because the condition on line 320 was never true

-

321 raise ValueError( 

-

322 "Missing mandatory Package field in member of accessible_package_roots" 

-

323 ) 

-

324 if "Architecture" not in apr_fields: 324 ↛ 325line 324 didn't jump to line 325, because the condition on line 324 was never true

-

325 raise ValueError( 

-

326 "Missing mandatory Architecture field in member of accessible_package_roots" 

-

327 ) 

-

328 apr_package = BinaryPackage( 

-

329 apr_fields, 

-

330 process_table, 

-

331 FAKE_DPKG_QUERY_TABLE, 

-

332 is_main_package=False, 

-

333 should_be_acted_on=True, 

-

334 ) 

-

335 r = package_cross_check_precheck(bin_package, apr_package) 

-

336 if not r[0]: 336 ↛ 337line 336 didn't jump to line 337, because the condition on line 336 was never true

-

337 raise ValueError( 

-

338 f"{apr_package.name} would not be accessible for {bin_package.name}" 

-

339 ) 

-

340 apr.append((apr_package, apr_fs_root)) 

-

341 

-

342 if related_udeb_fs_root is not None: 342 ↛ 343line 342 didn't jump to line 343, because the condition on line 342 was never true

-

343 if udeb_package is None: 

-

344 raise ValueError( 

-

345 "related_udeb_package_fields must be given when related_udeb_fs_root is given" 

-

346 ) 

-

347 r = package_cross_check_precheck(bin_package, udeb_package) 

-

348 if not r[0]: 

-

349 raise ValueError( 

-

350 f"{udeb_package.name} would not be accessible for {bin_package.name}, so providing" 

-

351 " related_udeb_fs_root is irrelevant" 

-

352 ) 

-

353 apr.append(udeb_package) 

-

354 apr = tuple(apr) 

-

355 else: 

-

356 apr = tuple() 

-

357 

-

358 return PackageProcessingContextTestProvider( 

-

359 binary_package=bin_package, 

-

360 related_udeb_package=udeb_package, 

-

361 binary_package_version=binary_package_version, 

-

362 related_udeb_package_version=related_udeb_package_version, 

-

363 accessible_package_roots=lambda: apr, 

-

364 ) 

-

365 

-

366 

-

367def manifest_variable_resolution_context( 

-

368 *, 

-

369 debian_dir: Optional[VirtualPath] = None, 

-

370) -> VariableContext: 

-

371 if debian_dir is None: 

-

372 debian_dir = FSRootDir() 

-

373 

-

374 return VariableContext(debian_dir) 

-

375 

-

376 

-

377class MaintscriptAccessorTestProvider(MaintscriptAccessorProviderBase): 

-

378 __slots__ = ("_plugin_metadata", "_plugin_source_id", "_maintscript_container") 

-

379 

-

380 def __init__( 

-

381 self, 

-

382 plugin_metadata: DebputyPluginMetadata, 

-

383 plugin_source_id: str, 

-

384 maintscript_container: Dict[str, List[RegisteredMaintscript]], 

-

385 ): 

-

386 self._plugin_metadata = plugin_metadata 

-

387 self._plugin_source_id = plugin_source_id 

-

388 self._maintscript_container = maintscript_container 

-

389 

-

390 @classmethod 

-

391 def _apply_condition_to_script( 

-

392 cls, condition: str, run_snippet: str, /, indent: Optional[bool] = None 

-

393 ) -> str: 

-

394 return run_snippet 

-

395 

-

396 def _append_script( 

-

397 self, 

-

398 caller_name: str, 

-

399 maintscript: Maintscript, 

-

400 full_script: str, 

-

401 /, 

-

402 perform_substitution: bool = True, 

-

403 ) -> None: 

-

404 if self._plugin_source_id not in self._maintscript_container: 

-

405 self._maintscript_container[self._plugin_source_id] = [] 

-

406 self._maintscript_container[self._plugin_source_id].append( 

-

407 RegisteredMaintscript( 

-

408 maintscript, 

-

409 caller_name, 

-

410 full_script, 

-

411 perform_substitution, 

-

412 ) 

-

413 ) 

-

414 

-

415 

-

416class RegisteredMetadataImpl(RegisteredMetadata): 

-

417 __slots__ = ( 

-

418 "_substvars", 

-

419 "_triggers", 

-

420 "_maintscripts", 

-

421 ) 

-

422 

-

423 def __init__( 

-

424 self, 

-

425 substvars: Substvars, 

-

426 triggers: List[RegisteredTrigger], 

-

427 maintscripts: List[RegisteredMaintscript], 

-

428 ) -> None: 

-

429 self._substvars = substvars 

-

430 self._triggers = triggers 

-

431 self._maintscripts = maintscripts 

-

432 

-

433 @property 

-

434 def substvars(self) -> Substvars: 

-

435 return self._substvars 

-

436 

-

437 @property 

-

438 def triggers(self) -> List[RegisteredTrigger]: 

-

439 return self._triggers 

-

440 

-

441 def maintscripts( 

-

442 self, 

-

443 *, 

-

444 maintscript: Optional[Maintscript] = None, 

-

445 ) -> List[RegisteredMaintscript]: 

-

446 if maintscript is None: 

-

447 return self._maintscripts 

-

448 return [m for m in self._maintscripts if m.maintscript == maintscript] 

-

449 

-

450 

-

451class BinaryCtrlAccessorTestProvider(BinaryCtrlAccessorProviderBase): 

-

452 __slots__ = ("_maintscript_container",) 

-

453 

-

454 def __init__( 

-

455 self, 

-

456 plugin_metadata: DebputyPluginMetadata, 

-

457 plugin_source_id: str, 

-

458 context: PackageProcessingContext, 

-

459 ) -> None: 

-

460 super().__init__( 

-

461 plugin_metadata, 

-

462 plugin_source_id, 

-

463 context, 

-

464 {}, 

-

465 FlushableSubstvars(), 

-

466 (None, None), 

-

467 ) 

-

468 self._maintscript_container: Dict[str, List[RegisteredMaintscript]] = {} 

-

469 

-

470 def _create_maintscript_accessor(self) -> MaintscriptAccessor: 

-

471 return MaintscriptAccessorTestProvider( 

-

472 self._plugin_metadata, 

-

473 self._plugin_source_id, 

-

474 self._maintscript_container, 

-

475 ) 

-

476 

-

477 def registered_metadata(self) -> RegisteredMetadata: 

-

478 return RegisteredMetadataImpl( 

-

479 self._substvars, 

-

480 [ 

-

481 RegisteredTrigger.from_plugin_provided_trigger(t) 

-

482 for t in self._triggers.values() 

-

483 if t.provider_source_id == self._plugin_source_id 

-

484 ], 

-

485 self._maintscript_container.get(self._plugin_source_id, []), 

-

486 ) 

-

487 

-

488 

-

489class ServiceRegistryTestImpl(ServiceRegistry[DSD]): 

-

490 __slots__ = ("_service_manager_details", "_service_definitions") 

-

491 

-

492 def __init__( 

-

493 self, 

-

494 service_manager_details: ServiceManagerDetails, 

-

495 detected_services: List[DetectedService[DSD]], 

-

496 ) -> None: 

-

497 self._service_manager_details = service_manager_details 

-

498 self._service_definitions = detected_services 

-

499 

-

500 def register_service( 

-

501 self, 

-

502 path: VirtualPath, 

-

503 name: Union[str, List[str]], 

-

504 *, 

-

505 type_of_service: str = "service", # "timer", etc. 

-

506 service_scope: str = "system", 

-

507 enable_by_default: bool = True, 

-

508 start_by_default: bool = True, 

-

509 default_upgrade_rule: ServiceUpgradeRule = "restart", 

-

510 service_context: Optional[DSD] = None, 

-

511 ) -> None: 

-

512 names = name if isinstance(name, list) else [name] 

-

513 if len(names) < 1: 513 ↛ 514line 513 didn't jump to line 514, because the condition on line 513 was never true

-

514 raise ValueError( 

-

515 f"The service must have at least one name - {path.absolute} did not have any" 

-

516 ) 

-

517 self._service_definitions.append( 

-

518 DetectedService( 

-

519 path, 

-

520 names, 

-

521 type_of_service, 

-

522 service_scope, 

-

523 enable_by_default, 

-

524 start_by_default, 

-

525 default_upgrade_rule, 

-

526 service_context, 

-

527 ) 

-

528 ) 

-

529 

-

530 

-

531@contextlib.contextmanager 

-

532def _read_only_fs_root(fs_root: VirtualPath) -> Iterator[VirtualPath]: 

-

533 if fs_root.is_read_write: 533 ↛ 539line 533 didn't jump to line 539, because the condition on line 533 was never false

-

534 assert isinstance(fs_root, FSRootDir) 

-

535 fs_root.is_read_write = False 

-

536 yield fs_root 

-

537 fs_root.is_read_write = True 

-

538 else: 

-

539 yield fs_root 

-

540 

-

541 

-

542class InitializedPluginUnderTestImpl(InitializedPluginUnderTest): 

-

543 def __init__( 

-

544 self, 

-

545 plugin_name: str, 

-

546 feature_set: PluginProvidedFeatureSet, 

-

547 substitution: SubstitutionImpl, 

-

548 ) -> None: 

-

549 self._feature_set = feature_set 

-

550 self._plugin_name = plugin_name 

-

551 self._packager_provided_files: Optional[ 

-

552 Dict[str, RegisteredPackagerProvidedFile] 

-

553 ] = None 

-

554 self._triggers: Dict[Tuple[DpkgTriggerType, str], PluginProvidedTrigger] = {} 

-

555 self._maintscript_container: Dict[str, List[RegisteredMaintscript]] = {} 

-

556 self._substitution = substitution 

-

557 assert plugin_name in self._feature_set.plugin_data 

-

558 

-

559 @property 

-

560 def _plugin_metadata(self) -> DebputyPluginMetadata: 

-

561 return self._feature_set.plugin_data[self._plugin_name] 

-

562 

-

563 def packager_provided_files_by_stem( 

-

564 self, 

-

565 ) -> Mapping[str, RegisteredPackagerProvidedFile]: 

-

566 ppf = self._packager_provided_files 

-

567 if ppf is None: 

-

568 result: Dict[str, RegisteredPackagerProvidedFile] = {} 

-

569 for spec in self._feature_set.packager_provided_files.values(): 

-

570 if spec.debputy_plugin_metadata.plugin_name != self._plugin_name: 

-

571 continue 

-

572 # Registered as a virtual subclass, so this should always be True 

-

573 assert isinstance(spec, RegisteredPackagerProvidedFile) 

-

574 result[spec.stem] = spec 

-

575 self._packager_provided_files = result 

-

576 ppf = result 

-

577 return ppf 

-

578 

-

579 def run_metadata_detector( 

-

580 self, 

-

581 metadata_detector_id: str, 

-

582 fs_root: VirtualPath, 

-

583 context: Optional[PackageProcessingContext] = None, 

-

584 ) -> RegisteredMetadata: 

-

585 if fs_root.parent_dir is not None: 585 ↛ 586line 585 didn't jump to line 586, because the condition on line 585 was never true

-

586 raise ValueError("Provided path must be the file system root.") 

-

587 detectors = self._feature_set.metadata_maintscript_detectors[self._plugin_name] 

-

588 matching_detectors = [ 

-

589 d for d in detectors if d.detector_id == metadata_detector_id 

-

590 ] 

-

591 if len(matching_detectors) != 1: 591 ↛ 592line 591 didn't jump to line 592, because the condition on line 591 was never true

-

592 assert not matching_detectors 

-

593 raise ValueError( 

-

594 f"The plugin {self._plugin_name} did not provide a metadata detector with ID" 

-

595 f' "{metadata_detector_id}"' 

-

596 ) 

-

597 if context is None: 

-

598 context = package_metadata_context() 

-

599 detector = matching_detectors[0] 

-

600 if not detector.applies_to(context.binary_package): 

-

601 raise ValueError( 

-

602 f'The detector "{metadata_detector_id}" from {self._plugin_name} does not apply to the' 

-

603 " given package. Consider using `package_metadata_context()` to emulate a binary package" 

-

604 " with the correct specification. As an example: " 

-

605 '`package_metadata_context(package_fields={"Package-Type": "udeb"})` would emulate a udeb' 

-

606 " package." 

-

607 ) 

-

608 

-

609 ctrl = BinaryCtrlAccessorTestProvider( 

-

610 self._plugin_metadata, 

-

611 metadata_detector_id, 

-

612 context, 

-

613 ) 

-

614 with _read_only_fs_root(fs_root) as ro_root: 

-

615 detector.run_detector( 

-

616 ro_root, 

-

617 ctrl, 

-

618 context, 

-

619 ) 

-

620 return ctrl.registered_metadata() 

-

621 

-

622 def run_package_processor( 

-

623 self, 

-

624 package_processor_id: str, 

-

625 fs_root: VirtualPath, 

-

626 context: Optional[PackageProcessingContext] = None, 

-

627 ) -> None: 

-

628 if fs_root.parent_dir is not None: 628 ↛ 629line 628 didn't jump to line 629, because the condition on line 628 was never true

-

629 raise ValueError("Provided path must be the file system root.") 

-

630 pp_key = (self._plugin_name, package_processor_id) 

-

631 package_processor = self._feature_set.all_package_processors.get(pp_key) 

-

632 if package_processor is None: 632 ↛ 633line 632 didn't jump to line 633, because the condition on line 632 was never true

-

633 raise ValueError( 

-

634 f"The plugin {self._plugin_name} did not provide a package processor with ID" 

-

635 f' "{package_processor_id}"' 

-

636 ) 

-

637 if context is None: 637 ↛ 639line 637 didn't jump to line 639, because the condition on line 637 was never false

-

638 context = package_metadata_context() 

-

639 if not fs_root.is_read_write: 639 ↛ 640line 639 didn't jump to line 640, because the condition on line 639 was never true

-

640 raise ValueError( 

-

641 "The provided fs_root is read-only and it must be read-write for package processor" 

-

642 ) 

-

643 if not package_processor.applies_to(context.binary_package): 643 ↛ 644line 643 didn't jump to line 644, because the condition on line 643 was never true

-

644 raise ValueError( 

-

645 f'The package processor "{package_processor_id}" from {self._plugin_name} does not apply' 

-

646 " to the given package. Consider using `package_metadata_context()` to emulate a binary" 

-

647 " package with the correct specification. As an example: " 

-

648 '`package_metadata_context(package_fields={"Package-Type": "udeb"})` would emulate a udeb' 

-

649 " package." 

-

650 ) 

-

651 package_processor.run_package_processor( 

-

652 fs_root, 

-

653 None, 

-

654 context, 

-

655 ) 

-

656 

-

657 @property 

-

658 def declared_manifest_variables(self) -> FrozenSet[str]: 

-

659 return frozenset( 

-

660 { 

-

661 k 

-

662 for k, v in self._feature_set.manifest_variables.items() 

-

663 if v.plugin_metadata.plugin_name == self._plugin_name 

-

664 } 

-

665 ) 

-

666 

-

667 def automatic_discard_rules_examples_with_issues(self) -> Sequence[ADRExampleIssue]: 

-

668 issues = [] 

-

669 for adr in self._feature_set.auto_discard_rules.values(): 

-

670 if adr.plugin_metadata.plugin_name != self._plugin_name: 670 ↛ 671line 670 didn't jump to line 671, because the condition on line 670 was never true

-

671 continue 

-

672 for idx, example in enumerate(adr.examples): 

-

673 result = process_discard_rule_example( 

-

674 adr, 

-

675 example, 

-

676 ) 

-

677 if result.inconsistent_paths: 

-

678 issues.append( 

-

679 ADRExampleIssue( 

-

680 adr.name, 

-

681 idx, 

-

682 [ 

-

683 x.absolute + ("/" if x.is_dir else "") 

-

684 for x in result.inconsistent_paths 

-

685 ], 

-

686 ) 

-

687 ) 

-

688 return issues 

-

689 

-

690 def run_service_detection_and_integrations( 

-

691 self, 

-

692 service_manager: str, 

-

693 fs_root: VirtualPath, 

-

694 context: Optional[PackageProcessingContext] = None, 

-

695 *, 

-

696 service_context_type_hint: Optional[Type[DSD]] = None, 

-

697 ) -> Tuple[List[DetectedService[DSD]], RegisteredMetadata]: 

-

698 if fs_root.parent_dir is not None: 698 ↛ 699line 698 didn't jump to line 699, because the condition on line 698 was never true

-

699 raise ValueError("Provided path must be the file system root.") 

-

700 try: 

-

701 service_manager_details = self._feature_set.service_managers[ 

-

702 service_manager 

-

703 ] 

-

704 if service_manager_details.plugin_metadata.plugin_name != self._plugin_name: 704 ↛ 705line 704 didn't jump to line 705, because the condition on line 704 was never true

-

705 raise KeyError(service_manager) 

-

706 except KeyError: 

-

707 raise ValueError( 

-

708 f"The plugin {self._plugin_name} does not provide a" 

-

709 f" service manager called {service_manager}" 

-

710 ) from None 

-

711 

-

712 if context is None: 712 ↛ 714line 712 didn't jump to line 714, because the condition on line 712 was never false

-

713 context = package_metadata_context() 

-

714 detected_services: List[DetectedService[DSD]] = [] 

-

715 registry = ServiceRegistryTestImpl(service_manager_details, detected_services) 

-

716 service_manager_details.service_detector( 

-

717 fs_root, 

-

718 registry, 

-

719 context, 

-

720 ) 

-

721 ctrl = BinaryCtrlAccessorTestProvider( 

-

722 self._plugin_metadata, 

-

723 service_manager_details.service_manager, 

-

724 context, 

-

725 ) 

-

726 if detected_services: 

-

727 service_definitions = [ 

-

728 ServiceDefinitionImpl( 

-

729 ds.names[0], 

-

730 ds.names, 

-

731 ds.path, 

-

732 ds.type_of_service, 

-

733 ds.service_scope, 

-

734 ds.enable_by_default, 

-

735 ds.start_by_default, 

-

736 ds.default_upgrade_rule, 

-

737 self._plugin_name, 

-

738 True, 

-

739 ds.service_context, 

-

740 ) 

-

741 for ds in detected_services 

-

742 ] 

-

743 service_manager_details.service_integrator( 

-

744 service_definitions, 

-

745 ctrl, 

-

746 context, 

-

747 ) 

-

748 return detected_services, ctrl.registered_metadata() 

-

749 

-

750 def manifest_variables( 

-

751 self, 

-

752 *, 

-

753 resolution_context: Optional[VariableContext] = None, 

-

754 mocked_variables: Optional[Mapping[str, str]] = None, 

-

755 ) -> Mapping[str, str]: 

-

756 valid_manifest_variables = frozenset( 

-

757 { 

-

758 n 

-

759 for n, v in self._feature_set.manifest_variables.items() 

-

760 if v.plugin_metadata.plugin_name == self._plugin_name 

-

761 } 

-

762 ) 

-

763 if resolution_context is None: 

-

764 resolution_context = manifest_variable_resolution_context() 

-

765 substitution = self._substitution.copy_for_subst_test( 

-

766 self._feature_set, 

-

767 resolution_context, 

-

768 extra_substitutions=mocked_variables, 

-

769 ) 

-

770 return SubstitutionTable( 

-

771 valid_manifest_variables, 

-

772 substitution, 

-

773 ) 

-

774 

-

775 

-

776class SubstitutionTable(Mapping[str, str]): 

-

777 def __init__( 

-

778 self, valid_manifest_variables: FrozenSet[str], substitution: Substitution 

-

779 ) -> None: 

-

780 self._valid_manifest_variables = valid_manifest_variables 

-

781 self._resolved: Set[str] = set() 

-

782 self._substitution = substitution 

-

783 

-

784 def __contains__(self, item: object) -> bool: 

-

785 return item in self._valid_manifest_variables 

-

786 

-

787 def __getitem__(self, key: str) -> str: 

-

788 if key not in self._valid_manifest_variables: 788 ↛ 789line 788 didn't jump to line 789, because the condition on line 788 was never true

-

789 raise KeyError(key) 

-

790 v = self._substitution.substitute( 

-

791 "{{" + key + "}}", f"test of manifest variable `{key}`" 

-

792 ) 

-

793 self._resolved.add(key) 

-

794 return v 

-

795 

-

796 def __len__(self) -> int: 

-

797 return len(self._valid_manifest_variables) 

-

798 

-

799 def __iter__(self) -> Iterator[str]: 

-

800 return iter(self._valid_manifest_variables) 

-

801 

-

802 def keys(self) -> KeysView[str]: 

-

803 return cast("KeysView[str]", self._valid_manifest_variables) 

-
- - - diff --git a/coverage-report/d_4b9be07fb6071cd2_test_spec_py.html b/coverage-report/d_4b9be07fb6071cd2_test_spec_py.html deleted file mode 100644 index 4c0ed29..0000000 --- a/coverage-report/d_4b9be07fb6071cd2_test_spec_py.html +++ /dev/null @@ -1,463 +0,0 @@ - - - - - Coverage for src/debputy/plugin/api/test_api/test_spec.py: 100% - - - - - -
-
-

- Coverage for src/debputy/plugin/api/test_api/test_spec.py: - 100% -

- -

- 79 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2import os 

-

3from abc import ABCMeta 

-

4from typing import ( 

-

5 Iterable, 

-

6 Mapping, 

-

7 Callable, 

-

8 Optional, 

-

9 Union, 

-

10 List, 

-

11 Tuple, 

-

12 Set, 

-

13 Sequence, 

-

14 Generic, 

-

15 Type, 

-

16 Self, 

-

17 FrozenSet, 

-

18) 

-

19 

-

20from debian.substvars import Substvars 

-

21 

-

22from debputy import filesystem_scan 

-

23from debputy.plugin.api import ( 

-

24 VirtualPath, 

-

25 PackageProcessingContext, 

-

26 DpkgTriggerType, 

-

27 Maintscript, 

-

28) 

-

29from debputy.plugin.api.impl_types import PluginProvidedTrigger 

-

30from debputy.plugin.api.spec import DSD, ServiceUpgradeRule, PathDef 

-

31from debputy.substitution import VariableContext 

-

32 

-

33DEBPUTY_TEST_AGAINST_INSTALLED_PLUGINS = ( 

-

34 os.environ.get("DEBPUTY_TEST_PLUGIN_LOCATION", "uninstalled") == "installed" 

-

35) 

-

36 

-

37 

-

38@dataclasses.dataclass(slots=True, frozen=True) 

-

39class ADRExampleIssue: 

-

40 name: str 

-

41 example_index: int 

-

42 inconsistent_paths: Sequence[str] 

-

43 

-

44 

-

45def build_virtual_file_system( 

-

46 paths: Iterable[Union[str, PathDef]], 

-

47 read_write_fs: bool = True, 

-

48) -> VirtualPath: 

-

49 """Create a pure-virtual file system for use with metadata detectors 

-

50 

-

51 This method will generate a virtual file system a list of path names or virtual path definitions. It will 

-

52 also insert any implicit path required to make the file system connected. As an example: 

-

53 

-

54 >>> fs_root = build_virtual_file_system(['./usr/share/doc/package/copyright']) 

-

55 >>> # The file we explicitly requested is obviously there 

-

56 >>> fs_root.lookup('./usr/share/doc/package/copyright') is not None 

-

57 True 

-

58 >>> # but so is every directory up to that point 

-

59 >>> all(fs_root.lookup(d).is_dir 

-

60 ... for d in ['./usr', './usr/share', './usr/share/doc', './usr/share/doc/package'] 

-

61 ... ) 

-

62 True 

-

63 

-

64 Any string provided will be passed to `virtual_path` using all defaults for other parameters, making `str` 

-

65 arguments a nice easy shorthand if you just want a path to exist, but do not really care about it otherwise 

-

66 (or `virtual_path_def` defaults happens to work for you). 

-

67 

-

68 Here is a very small example of how to create some basic file system objects to get you started: 

-

69 

-

70 >>> from debputy.plugin.api import virtual_path_def 

-

71 >>> path_defs = [ 

-

72 ... './usr/share/doc/', # Create a directory 

-

73 ... virtual_path_def("./bin/zcat", link_target="/bin/gzip"), # Create a symlink 

-

74 ... virtual_path_def("./bin/gzip", mode=0o755), # Create a file (with a custom mode) 

-

75 ... ] 

-

76 >>> fs_root = build_virtual_file_system(path_defs) 

-

77 >>> fs_root.lookup('./usr/share/doc').is_dir 

-

78 True 

-

79 >>> fs_root.lookup('./bin/zcat').is_symlink 

-

80 True 

-

81 >>> fs_root.lookup('./bin/zcat').readlink() == '/bin/gzip' 

-

82 True 

-

83 >>> fs_root.lookup('./bin/gzip').is_file 

-

84 True 

-

85 >>> fs_root.lookup('./bin/gzip').mode == 0o755 

-

86 True 

-

87 

-

88 :param paths: An iterable any mix of path names (str) and virtual_path_def definitions 

-

89 (results from `virtual_path_def`). 

-

90 :param read_write_fs: Whether the file system is read-write (True) or read-only (False). 

-

91 Note that this is the default permission; the plugin test API may temporarily turn a 

-

92 read-write to read-only temporarily (when running a metadata detector, etc.). 

-

93 :return: The root of the generated file system 

-

94 """ 

-

95 return filesystem_scan.build_virtual_fs(paths, read_write_fs=read_write_fs) 

-

96 

-

97 

-

98@dataclasses.dataclass(slots=True, frozen=True) 

-

99class RegisteredTrigger: 

-

100 dpkg_trigger_type: DpkgTriggerType 

-

101 dpkg_trigger_target: str 

-

102 

-

103 def serialized_format(self) -> str: 

-

104 """The semantic contents of the DEBIAN/triggers file""" 

-

105 return f"{self.dpkg_trigger_type} {self.dpkg_trigger_target}" 

-

106 

-

107 @classmethod 

-

108 def from_plugin_provided_trigger( 

-

109 cls, 

-

110 plugin_provided_trigger: PluginProvidedTrigger, 

-

111 ) -> "Self": 

-

112 return cls( 

-

113 plugin_provided_trigger.dpkg_trigger_type, 

-

114 plugin_provided_trigger.dpkg_trigger_target, 

-

115 ) 

-

116 

-

117 

-

118@dataclasses.dataclass(slots=True, frozen=True) 

-

119class RegisteredMaintscript: 

-

120 """Details about a maintscript registered by a plugin""" 

-

121 

-

122 """Which maintscript is applies to (e.g., "postinst")""" 

-

123 maintscript: Maintscript 

-

124 """Which method was used to trigger the script (e.g., "on_configure")""" 

-

125 registration_method: str 

-

126 """The snippet provided by the plugin as it was provided 

-

127 

-

128 That is, no indentation/conditions/substitutions have been applied to this text 

-

129 """ 

-

130 plugin_provided_script: str 

-

131 """Whether substitutions would have been applied in a production run""" 

-

132 requested_substitution: bool 

-

133 

-

134 

-

135@dataclasses.dataclass(slots=True, frozen=True) 

-

136class DetectedService(Generic[DSD]): 

-

137 path: VirtualPath 

-

138 names: Sequence[str] 

-

139 type_of_service: str 

-

140 service_scope: str 

-

141 enable_by_default: bool 

-

142 start_by_default: bool 

-

143 default_upgrade_rule: ServiceUpgradeRule 

-

144 service_context: Optional[DSD] 

-

145 

-

146 

-

147class RegisteredPackagerProvidedFile(metaclass=ABCMeta): 

-

148 """Record of a registered packager provided file - No instantiation 

-

149 

-

150 New "mandatory" attributes may be added in minor versions, which means instantiation will break tests. 

-

151 Plugin providers should therefore not create instances of this dataclass. It is visible only to aid 

-

152 test writing by providing type-safety / auto-completion. 

-

153 """ 

-

154 

-

155 """The name stem used for generating the file""" 

-

156 stem: str 

-

157 """The recorded directory these file should be installed into""" 

-

158 installed_path: str 

-

159 """The mode that debputy will give these files when installed (unless overridden)""" 

-

160 default_mode: int 

-

161 """The default priority assigned to files unless overridden (if priories are assigned at all)""" 

-

162 default_priority: Optional[int] 

-

163 """The filename format to be used""" 

-

164 filename_format: Optional[str] 

-

165 """The formatting correcting callback""" 

-

166 post_formatting_rewrite: Optional[Callable[[str], str]] 

-

167 

-

168 def compute_dest( 

-

169 self, 

-

170 assigned_name: str, 

-

171 *, 

-

172 assigned_priority: Optional[int] = None, 

-

173 owning_package: Optional[str] = None, 

-

174 path: Optional[VirtualPath] = None, 

-

175 ) -> Tuple[str, str]: 

-

176 """Determine the basename of this packager provided file 

-

177 

-

178 This method is useful for verifying that the `installed_path` and `post_formatting_rewrite` works 

-

179 as intended. As example, some programs do not support "." in their configuration files, so you might 

-

180 have a post_formatting_rewrite à la `lambda x: x.replace(".", "_")`. Then you can test it by 

-

181 calling `assert rppf.compute_dest("python3.11")[1] == "python3_11"` to verify that if a package like 

-

182 `python3.11` were to use this packager provided file, it would still generate a supported file name. 

-

183 

-

184 For the `assigned_name` parameter, then this is normally derived from the filename. Examples for 

-

185 how to derive it: 

-

186 

-

187 * `debian/my-pkg.stem` => `my-pkg` 

-

188 * `debian/my-pkg.my-custom-name.stem` => `my-custom-name` 

-

189 

-

190 Note that all parts (`my-pkg`, `my-custom-name` and `stem`) can contain periods (".") despite 

-

191 also being a delimiter. Additionally, `my-custom-name` is not restricted to being a valid package 

-

192 name, so it can have any file-system valid character in it. 

-

193 

-

194 For the 0.01% case, where the plugin is using *both* `{name}` *and* `{owning_package}` in the 

-

195 installed_path, then you can separately *also* set the `owning_package` attribute. However, by 

-

196 default the `assigned_named` is used for both when `owning_package` is not provided. 

-

197 

-

198 :param assigned_name: The name assigned. Usually this is the name of the package containing the file. 

-

199 :param assigned_priority: Optionally a priority override for the file (if priority is supported). Must be 

-

200 omitted/None if priorities are not supported. 

-

201 :param owning_package: Optionally the name of the owning package. It is only needed for those exceedingly 

-

202 rare cases where the `installed_path` contains both `{owning_package}` (usually in addition to `{name}`). 

-

203 :param path: Special-case param, only needed for when testing a special `debputy` PPF.. 

-

204 :return: A tuple of the directory name and the basename (in that order) that combined makes up that path 

-

205 that debputy would use. 

-

206 """ 

-

207 raise NotImplementedError 

-

208 

-

209 

-

210class RegisteredMetadata: 

-

211 __slots__ = () 

-

212 

-

213 @property 

-

214 def substvars(self) -> Substvars: 

-

215 """Returns the Substvars 

-

216 

-

217 :return: The substvars in their current state. 

-

218 """ 

-

219 raise NotImplementedError 

-

220 

-

221 @property 

-

222 def triggers(self) -> List[RegisteredTrigger]: 

-

223 raise NotImplementedError 

-

224 

-

225 def maintscripts( 

-

226 self, 

-

227 *, 

-

228 maintscript: Optional[Maintscript] = None, 

-

229 ) -> List[RegisteredMaintscript]: 

-

230 """Extract the maintscript provided by the given metadata detector 

-

231 

-

232 :param maintscript: If provided, only snippet registered for the given maintscript is returned. Can be 

-

233 used to say "Give me all the 'postinst' snippets by this metadata detector", which can simplify 

-

234 verification in some cases. 

-

235 :return: A list of all matching maintscript registered by the metadata detector. If the detector has 

-

236 not been run, then the list will be empty. If the metadata detector has been run multiple times, 

-

237 then this is the aggregation of all the runs. 

-

238 """ 

-

239 raise NotImplementedError 

-

240 

-

241 

-

242class InitializedPluginUnderTest: 

-

243 def packager_provided_files(self) -> Iterable[RegisteredPackagerProvidedFile]: 

-

244 """An iterable of all packager provided files registered by the plugin under test 

-

245 

-

246 If you want a particular order, please sort the result. 

-

247 """ 

-

248 return self.packager_provided_files_by_stem().values() 

-

249 

-

250 def packager_provided_files_by_stem( 

-

251 self, 

-

252 ) -> Mapping[str, RegisteredPackagerProvidedFile]: 

-

253 """All packager provided files registered by the plugin under test grouped by name stem""" 

-

254 raise NotImplementedError 

-

255 

-

256 def run_metadata_detector( 

-

257 self, 

-

258 metadata_detector_id: str, 

-

259 fs_root: VirtualPath, 

-

260 context: Optional[PackageProcessingContext] = None, 

-

261 ) -> RegisteredMetadata: 

-

262 """Run a metadata detector (by its ID) against a given file system 

-

263 

-

264 :param metadata_detector_id: The ID of the metadata detector to run 

-

265 :param fs_root: The file system the metadata detector should see (must be the root of the file system) 

-

266 :param context: The context the metadata detector should see. If not provided, one will be mock will be 

-

267 provided to the extent possible. 

-

268 :return: The metadata registered by the metadata detector 

-

269 """ 

-

270 raise NotImplementedError 

-

271 

-

272 def run_package_processor( 

-

273 self, 

-

274 package_processor_id: str, 

-

275 fs_root: VirtualPath, 

-

276 context: Optional[PackageProcessingContext] = None, 

-

277 ) -> None: 

-

278 """Run a package processor (by its ID) against a given file system 

-

279 

-

280 Note: Dependency processors are *not* run first. 

-

281 

-

282 :param package_processor_id: The ID of the package processor to run 

-

283 :param fs_root: The file system the package processor should see (must be the root of the file system) 

-

284 :param context: The context the package processor should see. If not provided, one will be mock will be 

-

285 provided to the extent possible. 

-

286 """ 

-

287 raise NotImplementedError 

-

288 

-

289 @property 

-

290 def declared_manifest_variables(self) -> Union[Set[str], FrozenSet[str]]: 

-

291 """Extract the manifest variables declared by the plugin 

-

292 

-

293 :return: All manifest variables declared by the plugin 

-

294 """ 

-

295 raise NotImplementedError 

-

296 

-

297 def automatic_discard_rules_examples_with_issues(self) -> Sequence[ADRExampleIssue]: 

-

298 """Validate examples of the automatic discard rules 

-

299 

-

300 For any failed example, use `debputy plugin show automatic-discard-rules <name>` to see 

-

301 the failed example in full. 

-

302 

-

303 :return: If any examples have issues, this will return a non-empty sequence with an 

-

304 entry with each issue. 

-

305 """ 

-

306 raise NotImplementedError 

-

307 

-

308 def run_service_detection_and_integrations( 

-

309 self, 

-

310 service_manager: str, 

-

311 fs_root: VirtualPath, 

-

312 context: Optional[PackageProcessingContext] = None, 

-

313 *, 

-

314 service_context_type_hint: Optional[Type[DSD]] = None, 

-

315 ) -> Tuple[List[DetectedService[DSD]], RegisteredMetadata]: 

-

316 """Run the service manager's detection logic and return the results 

-

317 

-

318 This method can be used to validate the service detection and integration logic of a plugin 

-

319 for a given service manager. 

-

320 

-

321 First the service detector is run and if it finds any services, the integrator code is then 

-

322 run on those services with their default values. 

-

323 

-

324 :param service_manager: The name of the service manager as provided during the initialization 

-

325 :param fs_root: The file system the system detector should see (must be the root of 

-

326 the file system) 

-

327 :param context: The context the service detector should see. If not provided, one will be mock 

-

328 will be provided to the extent possible. 

-

329 :param service_context_type_hint: Unused; but can be used as a type hint for `mypy` (etc.) 

-

330 to align the return type. 

-

331 :return: A tuple of the list of all detected services in the provided file system and the 

-

332 metadata generated by the integrator (if any services were detected). 

-

333 """ 

-

334 raise NotImplementedError 

-

335 

-

336 def manifest_variables( 

-

337 self, 

-

338 *, 

-

339 resolution_context: Optional[VariableContext] = None, 

-

340 mocked_variables: Optional[Mapping[str, str]] = None, 

-

341 ) -> Mapping[str, str]: 

-

342 """Provide a table of the manifest variables registered by the plugin 

-

343 

-

344 Each key is a manifest variable and the value of said key is the value of the manifest 

-

345 variable. Lazy loaded variables are resolved when accessed for the first time and may 

-

346 raise exceptions if the preconditions are not correct. 

-

347 

-

348 Note this method can be called multiple times with different parameters to provide 

-

349 different contexts. Lazy loaded variables are resolved at most once per context. 

-

350 

-

351 :param resolution_context: An optional context for lazy loaded manifest variables. 

-

352 Create an instance of it via `manifest_variable_resolution_context`. 

-

353 :param mocked_variables: An optional mapping that provides values for certain manifest 

-

354 variables. This can be used if you want a certain variable to have a certain value 

-

355 for the test to be stable (or because the manifest variable you are mocking is from 

-

356 another plugin, and you do not want to deal with the implementation details of how 

-

357 it is set). Any variable that depends on the mocked variable will use the mocked 

-

358 variable in the given context. 

-

359 :return: A table of the manifest variables provided by the plugin. Note this table 

-

360 only contains manifest variables registered by the plugin. Attempting to resolve 

-

361 other variables (directly), such as mocked variables or from other plugins, will 

-

362 trigger a `KeyError`. 

-

363 """ 

-

364 raise NotImplementedError 

-
- - - diff --git a/coverage-report/d_4f754ff76d8638bb___init___py.html b/coverage-report/d_4f754ff76d8638bb___init___py.html deleted file mode 100644 index 012583e..0000000 --- a/coverage-report/d_4f754ff76d8638bb___init___py.html +++ /dev/null @@ -1,99 +0,0 @@ - - - - - Coverage for src/debputy/manifest_parser/__init__.py: 100% - - - - - -
-
-

- Coverage for src/debputy/manifest_parser/__init__.py: - 100% -

- -

- 0 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-
- - - diff --git a/coverage-report/d_4f754ff76d8638bb_base_types_py.html b/coverage-report/d_4f754ff76d8638bb_base_types_py.html deleted file mode 100644 index 2c319ef..0000000 --- a/coverage-report/d_4f754ff76d8638bb_base_types_py.html +++ /dev/null @@ -1,539 +0,0 @@ - - - - - Coverage for src/debputy/manifest_parser/base_types.py: 84% - - - - - -
-
-

- Coverage for src/debputy/manifest_parser/base_types.py: - 84% -

- -

- 213 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2import os 

-

3from functools import lru_cache 

-

4from typing import ( 

-

5 TypedDict, 

-

6 NotRequired, 

-

7 Sequence, 

-

8 Optional, 

-

9 Union, 

-

10 Literal, 

-

11 Tuple, 

-

12 Mapping, 

-

13 Iterable, 

-

14 TYPE_CHECKING, 

-

15 Callable, 

-

16 Type, 

-

17 Generic, 

-

18) 

-

19 

-

20from debputy.manifest_parser.exceptions import ManifestParseException 

-

21from debputy.manifest_parser.util import ( 

-

22 AttributePath, 

-

23 _SymbolicModeSegment, 

-

24 parse_symbolic_mode, 

-

25) 

-

26from debputy.path_matcher import MatchRule, ExactFileSystemPath 

-

27from debputy.substitution import Substitution 

-

28from debputy.types import S 

-

29from debputy.util import _normalize_path, T 

-

30 

-

31if TYPE_CHECKING: 

-

32 from debputy.manifest_conditions import ManifestCondition 

-

33 from debputy.manifest_parser.parser_data import ParserContextData 

-

34 

-

35 

-

36class DebputyParsedContent(TypedDict): 

-

37 pass 

-

38 

-

39 

-

40class DebputyDispatchableType: 

-

41 __slots__ = () 

-

42 

-

43 

-

44class DebputyParsedContentStandardConditional(DebputyParsedContent): 

-

45 when: NotRequired["ManifestCondition"] 

-

46 

-

47 

-

48@dataclasses.dataclass(slots=True, frozen=True) 

-

49class OwnershipDefinition: 

-

50 entity_name: str 

-

51 entity_id: int 

-

52 

-

53 

-

54@dataclasses.dataclass 

-

55class TypeMapping(Generic[S, T]): 

-

56 target_type: Type[T] 

-

57 source_type: Type[S] 

-

58 mapper: Callable[[S, AttributePath, Optional["ParserContextData"]], T] 

-

59 

-

60 

-

61ROOT_DEFINITION = OwnershipDefinition("root", 0) 

-

62 

-

63 

-

64BAD_OWNER_NAMES = { 

-

65 "_apt", # All things owned by _apt are generated by apt after installation 

-

66 "nogroup", # It is not supposed to own anything as it is an entity used for dropping permissions 

-

67 "nobody", # It is not supposed to own anything as it is an entity used for dropping permissions 

-

68} 

-

69BAD_OWNER_IDS = { 

-

70 65534, # ID of nobody / nogroup 

-

71} 

-

72 

-

73 

-

74def _parse_ownership( 

-

75 v: Union[str, int], 

-

76 attribute_path: AttributePath, 

-

77) -> Tuple[Optional[str], Optional[int]]: 

-

78 if isinstance(v, str) and ":" in v: 78 ↛ 79line 78 didn't jump to line 79, because the condition on line 78 was never true

-

79 if v == ":": 

-

80 raise ManifestParseException( 

-

81 f'Invalid ownership value "{v}" at {attribute_path.path}: Ownership is redundant if it is ":"' 

-

82 f" (blank name and blank id). Please provide non-default values or remove the definition." 

-

83 ) 

-

84 entity_name: Optional[str] 

-

85 entity_id: Optional[int] 

-

86 entity_name, entity_id_str = v.split(":") 

-

87 if entity_name == "": 

-

88 entity_name = None 

-

89 if entity_id_str != "": 

-

90 entity_id = int(entity_id_str) 

-

91 else: 

-

92 entity_id = None 

-

93 return entity_name, entity_id 

-

94 

-

95 if isinstance(v, int): 

-

96 return None, v 

-

97 if v.isdigit(): 97 ↛ 98line 97 didn't jump to line 98, because the condition on line 97 was never true

-

98 raise ManifestParseException( 

-

99 f'Invalid ownership value "{v}" at {attribute_path.path}: The provided value "{v}" is a string (implying' 

-

100 " name lookup), but it contains an integer (implying id lookup). Please use a regular int for id lookup" 

-

101 f' (removing the quotes) or add a ":" in the end ("{v}:") as a disambiguation if you are *really* looking' 

-

102 " for an entity with that name." 

-

103 ) 

-

104 return v, None 

-

105 

-

106 

-

107@lru_cache 

-

108def _load_ownership_table_from_file( 

-

109 name: Literal["passwd.master", "group.master"], 

-

110) -> Tuple[Mapping[str, OwnershipDefinition], Mapping[int, OwnershipDefinition]]: 

-

111 filename = os.path.join("/usr/share/base-passwd", name) 

-

112 name_table = {} 

-

113 uid_table = {} 

-

114 for owner_def in _read_ownership_def_from_base_password_template(filename): 

-

115 # Could happen if base-passwd template has two users with the same ID. We assume this will not occur. 

-

116 assert owner_def.entity_name not in name_table 

-

117 assert owner_def.entity_id not in uid_table 

-

118 name_table[owner_def.entity_name] = owner_def 

-

119 uid_table[owner_def.entity_id] = owner_def 

-

120 

-

121 return name_table, uid_table 

-

122 

-

123 

-

124def _read_ownership_def_from_base_password_template( 

-

125 template_file: str, 

-

126) -> Iterable[OwnershipDefinition]: 

-

127 with open(template_file) as fd: 

-

128 for line in fd: 

-

129 entity_name, _star, entity_id, _remainder = line.split(":", 3) 

-

130 if entity_id == "0" and entity_name == "root": 

-

131 yield ROOT_DEFINITION 

-

132 else: 

-

133 yield OwnershipDefinition(entity_name, int(entity_id)) 

-

134 

-

135 

-

136class FileSystemMode: 

-

137 @classmethod 

-

138 def parse_filesystem_mode( 

-

139 cls, 

-

140 mode_raw: str, 

-

141 attribute_path: AttributePath, 

-

142 ) -> "FileSystemMode": 

-

143 if mode_raw and mode_raw[0].isdigit(): 

-

144 return OctalMode.parse_filesystem_mode(mode_raw, attribute_path) 

-

145 return SymbolicMode.parse_filesystem_mode(mode_raw, attribute_path) 

-

146 

-

147 def compute_mode(self, current_mode: int, is_dir: bool) -> int: 

-

148 raise NotImplementedError 

-

149 

-

150 

-

151@dataclasses.dataclass(slots=True, frozen=True) 

-

152class SymbolicMode(FileSystemMode): 

-

153 provided_mode: str 

-

154 segments: Sequence[_SymbolicModeSegment] 

-

155 

-

156 @classmethod 

-

157 def parse_filesystem_mode( 

-

158 cls, 

-

159 mode_raw: str, 

-

160 attribute_path: AttributePath, 

-

161 ) -> "SymbolicMode": 

-

162 segments = list(parse_symbolic_mode(mode_raw, attribute_path)) 

-

163 return SymbolicMode(mode_raw, segments) 

-

164 

-

165 def __str__(self) -> str: 

-

166 return self.symbolic_mode() 

-

167 

-

168 @property 

-

169 def is_symbolic_mode(self) -> bool: 

-

170 return False 

-

171 

-

172 def symbolic_mode(self) -> str: 

-

173 return self.provided_mode 

-

174 

-

175 def compute_mode(self, current_mode: int, is_dir: bool) -> int: 

-

176 final_mode = current_mode 

-

177 for segment in self.segments: 

-

178 final_mode = segment.apply(final_mode, is_dir) 

-

179 return final_mode 

-

180 

-

181 

-

182@dataclasses.dataclass(slots=True, frozen=True) 

-

183class OctalMode(FileSystemMode): 

-

184 octal_mode: int 

-

185 

-

186 @classmethod 

-

187 def parse_filesystem_mode( 

-

188 cls, 

-

189 mode_raw: str, 

-

190 attribute_path: AttributePath, 

-

191 ) -> "FileSystemMode": 

-

192 try: 

-

193 mode = int(mode_raw, base=8) 

-

194 except ValueError as e: 

-

195 error_msg = 'An octal mode must be all digits between 0-7 (such as "644")' 

-

196 raise ManifestParseException( 

-

197 f"Cannot parse {attribute_path.path} as an octal mode: {error_msg}" 

-

198 ) from e 

-

199 return OctalMode(mode) 

-

200 

-

201 @property 

-

202 def is_octal_mode(self) -> bool: 

-

203 return True 

-

204 

-

205 def compute_mode(self, _current_mode: int, _is_dir: bool) -> int: 

-

206 return self.octal_mode 

-

207 

-

208 def __str__(self) -> str: 

-

209 return f"0{oct(self.octal_mode)[2:]}" 

-

210 

-

211 

-

212@dataclasses.dataclass(slots=True, frozen=True) 

-

213class _StaticFileSystemOwnerGroup: 

-

214 ownership_definition: OwnershipDefinition 

-

215 

-

216 @property 

-

217 def entity_name(self) -> str: 

-

218 return self.ownership_definition.entity_name 

-

219 

-

220 @property 

-

221 def entity_id(self) -> int: 

-

222 return self.ownership_definition.entity_id 

-

223 

-

224 @classmethod 

-

225 def from_manifest_value( 

-

226 cls, 

-

227 raw_input: Union[str, int], 

-

228 attribute_path: AttributePath, 

-

229 ) -> "_StaticFileSystemOwnerGroup": 

-

230 provided_name, provided_id = _parse_ownership(raw_input, attribute_path) 

-

231 owner_def = cls._resolve(raw_input, provided_name, provided_id, attribute_path) 

-

232 if ( 232 ↛ 236line 232 didn't jump to line 236

-

233 owner_def.entity_name in BAD_OWNER_NAMES 

-

234 or owner_def.entity_id in BAD_OWNER_IDS 

-

235 ): 

-

236 raise ManifestParseException( 

-

237 f'Refusing to use "{raw_input}" as {cls._owner_type()} (defined at {attribute_path.path})' 

-

238 f' as it resolves to "{owner_def.entity_name}:{owner_def.entity_id}" and no path should have this' 

-

239 f" entity as {cls._owner_type()} as it is unsafe." 

-

240 ) 

-

241 return cls(owner_def) 

-

242 

-

243 @classmethod 

-

244 def _resolve( 

-

245 cls, 

-

246 raw_input: Union[str, int], 

-

247 provided_name: Optional[str], 

-

248 provided_id: Optional[int], 

-

249 attribute_path: AttributePath, 

-

250 ) -> OwnershipDefinition: 

-

251 table_name = cls._ownership_table_name() 

-

252 name_table, id_table = _load_ownership_table_from_file(table_name) 

-

253 name_match = ( 

-

254 name_table.get(provided_name) if provided_name is not None else None 

-

255 ) 

-

256 id_match = id_table.get(provided_id) if provided_id is not None else None 

-

257 if id_match is None and name_match is None: 257 ↛ 258line 257 didn't jump to line 258, because the condition on line 257 was never true

-

258 name_part = provided_name if provided_name is not None else "N/A" 

-

259 id_part = provided_id if provided_id is not None else "N/A" 

-

260 raise ManifestParseException( 

-

261 f'Cannot resolve "{raw_input}" as {cls._owner_type()} (from {attribute_path.path}):' 

-

262 f" It is not known to be a static {cls._owner_type()} from base-passwd." 

-

263 f' The value was interpreted as name: "{name_part}" and id: {id_part}' 

-

264 ) 

-

265 if id_match is None: 

-

266 assert name_match is not None 

-

267 return name_match 

-

268 if name_match is None: 268 ↛ 271line 268 didn't jump to line 271, because the condition on line 268 was never false

-

269 assert id_match is not None 

-

270 return id_match 

-

271 if provided_name != id_match.entity_name: 

-

272 raise ManifestParseException( 

-

273 f"Bad {cls._owner_type()} declaration: The id {provided_id} resolves to {id_match.entity_name}" 

-

274 f" according to base-passwd, but the packager declared to should have been {provided_name}" 

-

275 f" at {attribute_path.path}" 

-

276 ) 

-

277 if provided_id != name_match.entity_id: 

-

278 raise ManifestParseException( 

-

279 f"Bad {cls._owner_type} declaration: The name {provided_name} resolves to {name_match.entity_id}" 

-

280 f" according to base-passwd, but the packager declared to should have been {provided_id}" 

-

281 f" at {attribute_path.path}" 

-

282 ) 

-

283 return id_match 

-

284 

-

285 @classmethod 

-

286 def _owner_type(cls) -> Literal["owner", "group"]: 

-

287 raise NotImplementedError 

-

288 

-

289 @classmethod 

-

290 def _ownership_table_name(cls) -> Literal["passwd.master", "group.master"]: 

-

291 raise NotImplementedError 

-

292 

-

293 

-

294class StaticFileSystemOwner(_StaticFileSystemOwnerGroup): 

-

295 @classmethod 

-

296 def _owner_type(cls) -> Literal["owner", "group"]: 

-

297 return "owner" 

-

298 

-

299 @classmethod 

-

300 def _ownership_table_name(cls) -> Literal["passwd.master", "group.master"]: 

-

301 return "passwd.master" 

-

302 

-

303 

-

304class StaticFileSystemGroup(_StaticFileSystemOwnerGroup): 

-

305 @classmethod 

-

306 def _owner_type(cls) -> Literal["owner", "group"]: 

-

307 return "group" 

-

308 

-

309 @classmethod 

-

310 def _ownership_table_name(cls) -> Literal["passwd.master", "group.master"]: 

-

311 return "group.master" 

-

312 

-

313 

-

314@dataclasses.dataclass(slots=True, frozen=True) 

-

315class SymlinkTarget: 

-

316 raw_symlink_target: str 

-

317 attribute_path: AttributePath 

-

318 symlink_target: str 

-

319 

-

320 @classmethod 

-

321 def parse_symlink_target( 

-

322 cls, 

-

323 raw_symlink_target: str, 

-

324 attribute_path: AttributePath, 

-

325 substitution: Substitution, 

-

326 ) -> "SymlinkTarget": 

-

327 return SymlinkTarget( 

-

328 raw_symlink_target, 

-

329 attribute_path, 

-

330 substitution.substitute(raw_symlink_target, attribute_path.path), 

-

331 ) 

-

332 

-

333 

-

334class FileSystemMatchRule: 

-

335 @property 

-

336 def raw_match_rule(self) -> str: 

-

337 raise NotImplementedError 

-

338 

-

339 @property 

-

340 def attribute_path(self) -> AttributePath: 

-

341 raise NotImplementedError 

-

342 

-

343 @property 

-

344 def match_rule(self) -> MatchRule: 

-

345 raise NotImplementedError 

-

346 

-

347 @classmethod 

-

348 def parse_path_match( 

-

349 cls, 

-

350 raw_match_rule: str, 

-

351 attribute_path: AttributePath, 

-

352 parser_context: "ParserContextData", 

-

353 ) -> "FileSystemMatchRule": 

-

354 return cls.from_path_match( 

-

355 raw_match_rule, attribute_path, parser_context.substitution 

-

356 ) 

-

357 

-

358 @classmethod 

-

359 def from_path_match( 

-

360 cls, 

-

361 raw_match_rule: str, 

-

362 attribute_path: AttributePath, 

-

363 substitution: "Substitution", 

-

364 ) -> "FileSystemMatchRule": 

-

365 try: 

-

366 mr = MatchRule.from_path_or_glob( 

-

367 raw_match_rule, 

-

368 attribute_path.path, 

-

369 substitution=substitution, 

-

370 ) 

-

371 except ValueError as e: 

-

372 raise ManifestParseException( 

-

373 f'Could not parse "{raw_match_rule}" (defined at {attribute_path.path})' 

-

374 f" as a path or a glob: {e.args[0]}" 

-

375 ) 

-

376 

-

377 if isinstance(mr, ExactFileSystemPath): 

-

378 return FileSystemExactMatchRule( 

-

379 raw_match_rule, 

-

380 attribute_path, 

-

381 mr, 

-

382 ) 

-

383 return FileSystemGenericMatch( 

-

384 raw_match_rule, 

-

385 attribute_path, 

-

386 mr, 

-

387 ) 

-

388 

-

389 

-

390@dataclasses.dataclass(slots=True, frozen=True) 

-

391class FileSystemGenericMatch(FileSystemMatchRule): 

-

392 raw_match_rule: str 

-

393 attribute_path: AttributePath 

-

394 match_rule: MatchRule 

-

395 

-

396 

-

397@dataclasses.dataclass(slots=True, frozen=True) 

-

398class FileSystemExactMatchRule(FileSystemMatchRule): 

-

399 raw_match_rule: str 

-

400 attribute_path: AttributePath 

-

401 match_rule: ExactFileSystemPath 

-

402 

-

403 @classmethod 

-

404 def from_path_match( 

-

405 cls, 

-

406 raw_match_rule: str, 

-

407 attribute_path: AttributePath, 

-

408 substitution: "Substitution", 

-

409 ) -> "FileSystemExactMatchRule": 

-

410 try: 

-

411 normalized = _normalize_path(raw_match_rule) 

-

412 except ValueError as e: 

-

413 raise ManifestParseException( 

-

414 f'The path "{raw_match_rule}" provided in {attribute_path.path} should be relative to the' 

-

415 ' root of the package and not use any ".." or "." segments.' 

-

416 ) from e 

-

417 if normalized == ".": 417 ↛ 418line 417 didn't jump to line 418, because the condition on line 417 was never true

-

418 raise ManifestParseException( 

-

419 f'The path "{raw_match_rule}" matches a file system root and that is not a valid match' 

-

420 f' at "{attribute_path.path}". Please narrow the provided path.' 

-

421 ) 

-

422 mr = ExactFileSystemPath( 

-

423 substitution.substitute(normalized, attribute_path.path) 

-

424 ) 

-

425 if mr.path.endswith("/") and issubclass(cls, FileSystemExactNonDirMatchRule): 425 ↛ 426line 425 didn't jump to line 426, because the condition on line 425 was never true

-

426 raise ManifestParseException( 

-

427 f'The path "{raw_match_rule}" at {attribute_path.path} resolved to' 

-

428 f' "{mr.path}". Since the resolved path ends with a slash ("/"), this' 

-

429 " means only a directory can match. However, this attribute should" 

-

430 " match a *non*-directory" 

-

431 ) 

-

432 return cls( 

-

433 raw_match_rule, 

-

434 attribute_path, 

-

435 mr, 

-

436 ) 

-

437 

-

438 

-

439class FileSystemExactNonDirMatchRule(FileSystemExactMatchRule): 

-

440 pass 

-
- - - diff --git a/coverage-report/d_4f754ff76d8638bb_declarative_parser_py.html b/coverage-report/d_4f754ff76d8638bb_declarative_parser_py.html deleted file mode 100644 index 79bc071..0000000 --- a/coverage-report/d_4f754ff76d8638bb_declarative_parser_py.html +++ /dev/null @@ -1,2102 +0,0 @@ - - - - - Coverage for src/debputy/manifest_parser/declarative_parser.py: 76% - - - - - -
-
-

- Coverage for src/debputy/manifest_parser/declarative_parser.py: - 76% -

- -

- 781 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import collections 

-

2import dataclasses 

-

3from typing import ( 

-

4 Any, 

-

5 Callable, 

-

6 Tuple, 

-

7 TypedDict, 

-

8 Dict, 

-

9 get_type_hints, 

-

10 Annotated, 

-

11 get_args, 

-

12 get_origin, 

-

13 TypeVar, 

-

14 Generic, 

-

15 FrozenSet, 

-

16 Mapping, 

-

17 Optional, 

-

18 cast, 

-

19 is_typeddict, 

-

20 Type, 

-

21 Union, 

-

22 List, 

-

23 Collection, 

-

24 NotRequired, 

-

25 Iterable, 

-

26 Literal, 

-

27 Sequence, 

-

28 Container, 

-

29) 

-

30 

-

31from debputy.manifest_parser.base_types import ( 

-

32 DebputyParsedContent, 

-

33 FileSystemMatchRule, 

-

34 FileSystemExactMatchRule, 

-

35 DebputyDispatchableType, 

-

36 TypeMapping, 

-

37) 

-

38from debputy.manifest_parser.exceptions import ( 

-

39 ManifestParseException, 

-

40) 

-

41from debputy.manifest_parser.mapper_code import ( 

-

42 normalize_into_list, 

-

43 wrap_into_list, 

-

44 map_each_element, 

-

45) 

-

46from debputy.manifest_parser.parser_data import ParserContextData 

-

47from debputy.manifest_parser.util import AttributePath, unpack_type, find_annotation 

-

48from debputy.plugin.api.impl_types import ( 

-

49 DeclarativeInputParser, 

-

50 TD, 

-

51 _ALL_PACKAGE_TYPES, 

-

52 resolve_package_type_selectors, 

-

53 ListWrappedDeclarativeInputParser, 

-

54 DispatchingObjectParser, 

-

55 DispatchingTableParser, 

-

56 TTP, 

-

57 TP, 

-

58 InPackageContextParser, 

-

59) 

-

60from debputy.plugin.api.spec import ParserDocumentation, PackageTypeSelector 

-

61from debputy.util import _info, _warn, assume_not_none 

-

62 

-

63try: 

-

64 from Levenshtein import distance 

-

65except ImportError: 

-

66 _WARN_ONCE = False 

-

67 

-

68 def _detect_possible_typo( 

-

69 _key: str, 

-

70 _value: object, 

-

71 _manifest_attributes: Mapping[str, "AttributeDescription"], 

-

72 _path: "AttributePath", 

-

73 ) -> None: 

-

74 global _WARN_ONCE 

-

75 if not _WARN_ONCE: 

-

76 _WARN_ONCE = True 

-

77 _info( 

-

78 "Install python3-levenshtein to have debputy try to detect typos in the manifest." 

-

79 ) 

-

80 

-

81else: 

-

82 

-

83 def _detect_possible_typo( 

-

84 key: str, 

-

85 value: object, 

-

86 manifest_attributes: Mapping[str, "AttributeDescription"], 

-

87 path: "AttributePath", 

-

88 ) -> None: 

-

89 k_len = len(key) 

-

90 key_path = path[key] 

-

91 matches: List[str] = [] 

-

92 current_match_strength = 0 

-

93 for acceptable_key, attr in manifest_attributes.items(): 

-

94 if abs(k_len - len(acceptable_key)) > 2: 

-

95 continue 

-

96 d = distance(key, acceptable_key) 

-

97 if d > 2: 

-

98 continue 

-

99 try: 

-

100 attr.type_validator.ensure_type(value, key_path) 

-

101 except ManifestParseException: 

-

102 if attr.type_validator.base_type_match(value): 

-

103 match_strength = 1 

-

104 else: 

-

105 match_strength = 0 

-

106 else: 

-

107 match_strength = 2 

-

108 

-

109 if match_strength < current_match_strength: 

-

110 continue 

-

111 if match_strength > current_match_strength: 

-

112 current_match_strength = match_strength 

-

113 matches.clear() 

-

114 matches.append(acceptable_key) 

-

115 

-

116 if not matches: 

-

117 return 

-

118 ref = f'at "{path.path}"' if path else "at the manifest root level" 

-

119 if len(matches) == 1: 

-

120 possible_match = repr(matches[0]) 

-

121 _warn( 

-

122 f'Possible typo: The key "{key}" {ref} should probably have been {possible_match}' 

-

123 ) 

-

124 else: 

-

125 matches.sort() 

-

126 possible_matches = ", ".join(repr(a) for a in matches) 

-

127 _warn( 

-

128 f'Possible typo: The key "{key}" {ref} should probably have been one of {possible_matches}' 

-

129 ) 

-

130 

-

131 

-

132SF = TypeVar("SF") 

-

133T = TypeVar("T") 

-

134S = TypeVar("S") 

-

135 

-

136 

-

137_NONE_TYPE = type(None) 

-

138 

-

139 

-

140# These must be able to appear in an "isinstance" check and must be builtin types. 

-

141BASIC_SIMPLE_TYPES = { 

-

142 str: "string", 

-

143 int: "integer", 

-

144 bool: "boolean", 

-

145} 

-

146 

-

147 

-

148class AttributeTypeHandler: 

-

149 __slots__ = ("_description", "_ensure_type", "base_type", "mapper") 

-

150 

-

151 def __init__( 

-

152 self, 

-

153 description: str, 

-

154 ensure_type: Callable[[Any, AttributePath], None], 

-

155 *, 

-

156 base_type: Optional[Type[Any]] = None, 

-

157 mapper: Optional[ 

-

158 Callable[[Any, AttributePath, Optional["ParserContextData"]], Any] 

-

159 ] = None, 

-

160 ) -> None: 

-

161 self._description = description 

-

162 self._ensure_type = ensure_type 

-

163 self.base_type = base_type 

-

164 self.mapper = mapper 

-

165 

-

166 def describe_type(self) -> str: 

-

167 return self._description 

-

168 

-

169 def ensure_type(self, obj: object, path: AttributePath) -> None: 

-

170 self._ensure_type(obj, path) 

-

171 

-

172 def base_type_match(self, obj: object) -> bool: 

-

173 base_type = self.base_type 

-

174 return base_type is not None and isinstance(obj, base_type) 

-

175 

-

176 def map_type( 

-

177 self, 

-

178 value: Any, 

-

179 path: AttributePath, 

-

180 parser_context: Optional["ParserContextData"], 

-

181 ) -> Any: 

-

182 mapper = self.mapper 

-

183 if mapper is not None: 

-

184 return mapper(value, path, parser_context) 

-

185 return value 

-

186 

-

187 def combine_mapper( 

-

188 self, 

-

189 mapper: Optional[ 

-

190 Callable[[Any, AttributePath, Optional["ParserContextData"]], Any] 

-

191 ], 

-

192 ) -> "AttributeTypeHandler": 

-

193 if mapper is None: 

-

194 return self 

-

195 if self.mapper is not None: 

-

196 m = self.mapper 

-

197 

-

198 def _combined_mapper( 

-

199 value: Any, 

-

200 path: AttributePath, 

-

201 parser_context: Optional["ParserContextData"], 

-

202 ) -> Any: 

-

203 return mapper(m(value, path, parser_context), path, parser_context) 

-

204 

-

205 else: 

-

206 _combined_mapper = mapper 

-

207 

-

208 return AttributeTypeHandler( 

-

209 self._description, 

-

210 self._ensure_type, 

-

211 base_type=self.base_type, 

-

212 mapper=_combined_mapper, 

-

213 ) 

-

214 

-

215 

-

216@dataclasses.dataclass(slots=True) 

-

217class AttributeDescription: 

-

218 source_attribute_name: str 

-

219 target_attribute: str 

-

220 attribute_type: Any 

-

221 type_validator: AttributeTypeHandler 

-

222 annotations: Tuple[Any, ...] 

-

223 conflicting_attributes: FrozenSet[str] 

-

224 conditional_required: Optional["ConditionalRequired"] 

-

225 parse_hints: Optional["DetectedDebputyParseHint"] = None 

-

226 is_optional: bool = False 

-

227 

-

228 

-

229def _extract_path_hint(v: Any, attribute_path: AttributePath) -> bool: 

-

230 if attribute_path.path_hint is not None: 230 ↛ 231line 230 didn't jump to line 231, because the condition on line 230 was never true

-

231 return True 

-

232 if isinstance(v, str): 

-

233 attribute_path.path_hint = v 

-

234 return True 

-

235 elif isinstance(v, list) and len(v) > 0 and isinstance(v[0], str): 

-

236 attribute_path.path_hint = v[0] 

-

237 return True 

-

238 return False 

-

239 

-

240 

-

241@dataclasses.dataclass(slots=True, frozen=True) 

-

242class DeclarativeNonMappingInputParser(DeclarativeInputParser[TD], Generic[TD, SF]): 

-

243 alt_form_parser: AttributeDescription 

-

244 inline_reference_documentation: Optional[ParserDocumentation] = None 

-

245 

-

246 def parse_input( 

-

247 self, 

-

248 value: object, 

-

249 path: AttributePath, 

-

250 *, 

-

251 parser_context: Optional["ParserContextData"] = None, 

-

252 ) -> TD: 

-

253 if self.reference_documentation_url is not None: 

-

254 doc_ref = f" (Documentation: {self.reference_documentation_url})" 

-

255 else: 

-

256 doc_ref = "" 

-

257 

-

258 alt_form_parser = self.alt_form_parser 

-

259 if value is None: 259 ↛ 260line 259 didn't jump to line 260, because the condition on line 259 was never true

-

260 form_note = f" The value must have type: {alt_form_parser.type_validator.describe_type()}" 

-

261 if self.reference_documentation_url is not None: 

-

262 doc_ref = f" Please see {self.reference_documentation_url} for the documentation." 

-

263 raise ManifestParseException( 

-

264 f"The attribute {path.path} was missing a value. {form_note}{doc_ref}" 

-

265 ) 

-

266 _extract_path_hint(value, path) 

-

267 alt_form_parser.type_validator.ensure_type(value, path) 

-

268 attribute = alt_form_parser.target_attribute 

-

269 alias_mapping = { 

-

270 attribute: ("", None), 

-

271 } 

-

272 v = alt_form_parser.type_validator.map_type(value, path, parser_context) 

-

273 path.alias_mapping = alias_mapping 

-

274 return cast("TD", {attribute: v}) 

-

275 

-

276 

-

277@dataclasses.dataclass(slots=True) 

-

278class DeclarativeMappingInputParser(DeclarativeInputParser[TD], Generic[TD, SF]): 

-

279 input_time_required_parameters: FrozenSet[str] 

-

280 all_parameters: FrozenSet[str] 

-

281 manifest_attributes: Mapping[str, "AttributeDescription"] 

-

282 source_attributes: Mapping[str, "AttributeDescription"] 

-

283 at_least_one_of: FrozenSet[FrozenSet[str]] 

-

284 alt_form_parser: Optional[AttributeDescription] 

-

285 mutually_exclusive_attributes: FrozenSet[FrozenSet[str]] = frozenset() 

-

286 _per_attribute_conflicts_cache: Optional[Mapping[str, FrozenSet[str]]] = None 

-

287 inline_reference_documentation: Optional[ParserDocumentation] = None 

-

288 path_hint_source_attributes: Sequence[str] = tuple() 

-

289 

-

290 def _parse_alt_form( 

-

291 self, 

-

292 value: object, 

-

293 path: AttributePath, 

-

294 *, 

-

295 parser_context: Optional["ParserContextData"] = None, 

-

296 ) -> TD: 

-

297 alt_form_parser = self.alt_form_parser 

-

298 if alt_form_parser is None: 298 ↛ 299line 298 didn't jump to line 299, because the condition on line 298 was never true

-

299 raise ManifestParseException( 

-

300 f"The attribute {path.path} must be a mapping.{self._doc_url_error_suffix()}" 

-

301 ) 

-

302 _extract_path_hint(value, path) 

-

303 alt_form_parser.type_validator.ensure_type(value, path) 

-

304 assert ( 

-

305 value is not None 

-

306 ), "The alternative form was None, but the parser should have rejected None earlier." 

-

307 attribute = alt_form_parser.target_attribute 

-

308 alias_mapping = { 

-

309 attribute: ("", None), 

-

310 } 

-

311 v = alt_form_parser.type_validator.map_type(value, path, parser_context) 

-

312 path.alias_mapping = alias_mapping 

-

313 return cast("TD", {attribute: v}) 

-

314 

-

315 def _validate_expected_keys( 

-

316 self, 

-

317 value: Dict[Any, Any], 

-

318 path: AttributePath, 

-

319 *, 

-

320 parser_context: Optional["ParserContextData"] = None, 

-

321 ) -> None: 

-

322 unknown_keys = value.keys() - self.all_parameters 

-

323 doc_ref = self._doc_url_error_suffix() 

-

324 if unknown_keys: 324 ↛ 325line 324 didn't jump to line 325, because the condition on line 324 was never true

-

325 for k in unknown_keys: 

-

326 if isinstance(k, str): 

-

327 _detect_possible_typo(k, value[k], self.manifest_attributes, path) 

-

328 unused_keys = self.all_parameters - value.keys() 

-

329 if unused_keys: 

-

330 k = ", ".join(unused_keys) 

-

331 raise ManifestParseException( 

-

332 f'Unknown keys "{unknown_keys}" at {path.path}". Keys that could be used here are: {k}.{doc_ref}' 

-

333 ) 

-

334 raise ManifestParseException( 

-

335 f'Unknown keys "{unknown_keys}" at {path.path}". Please remove them.{doc_ref}' 

-

336 ) 

-

337 missing_keys = self.input_time_required_parameters - value.keys() 

-

338 if missing_keys: 

-

339 required = ", ".join(repr(k) for k in sorted(missing_keys)) 

-

340 raise ManifestParseException( 

-

341 f"The following keys were required but not present at {path.path}: {required}{doc_ref}" 

-

342 ) 

-

343 for maybe_required in self.all_parameters - value.keys(): 

-

344 attr = self.manifest_attributes[maybe_required] 

-

345 assert attr.conditional_required is None or parser_context is not None 

-

346 if ( 346 ↛ 352line 346 didn't jump to line 352

-

347 attr.conditional_required is not None 

-

348 and attr.conditional_required.condition_applies( 

-

349 assume_not_none(parser_context) 

-

350 ) 

-

351 ): 

-

352 reason = attr.conditional_required.reason 

-

353 raise ManifestParseException( 

-

354 f'Missing the *conditionally* required attribute "{maybe_required}" at {path.path}. {reason}{doc_ref}' 

-

355 ) 

-

356 for keyset in self.at_least_one_of: 

-

357 matched_keys = value.keys() & keyset 

-

358 if not matched_keys: 358 ↛ 359line 358 didn't jump to line 359, because the condition on line 358 was never true

-

359 conditionally_required = ", ".join(repr(k) for k in sorted(keyset)) 

-

360 raise ManifestParseException( 

-

361 f"At least one of the following keys must be present at {path.path}:" 

-

362 f" {conditionally_required}{doc_ref}" 

-

363 ) 

-

364 for group in self.mutually_exclusive_attributes: 

-

365 matched = value.keys() & group 

-

366 if len(matched) > 1: 366 ↛ 367line 366 didn't jump to line 367, because the condition on line 366 was never true

-

367 ck = ", ".join(repr(k) for k in sorted(matched)) 

-

368 raise ManifestParseException( 

-

369 f"Could not parse {path.path}: The following attributes are" 

-

370 f" mutually exclusive: {ck}{doc_ref}" 

-

371 ) 

-

372 

-

373 def _parse_typed_dict_form( 

-

374 self, 

-

375 value: Dict[Any, Any], 

-

376 path: AttributePath, 

-

377 *, 

-

378 parser_context: Optional["ParserContextData"] = None, 

-

379 ) -> TD: 

-

380 self._validate_expected_keys(value, path, parser_context=parser_context) 

-

381 result = {} 

-

382 per_attribute_conflicts = self._per_attribute_conflicts() 

-

383 alias_mapping = {} 

-

384 for path_hint_source_attributes in self.path_hint_source_attributes: 

-

385 v = value.get(path_hint_source_attributes) 

-

386 if v is not None and _extract_path_hint(v, path): 

-

387 break 

-

388 for k, v in value.items(): 

-

389 attr = self.manifest_attributes[k] 

-

390 matched = value.keys() & per_attribute_conflicts[k] 

-

391 if matched: 391 ↛ 392line 391 didn't jump to line 392, because the condition on line 391 was never true

-

392 ck = ", ".join(repr(k) for k in sorted(matched)) 

-

393 raise ManifestParseException( 

-

394 f'The attribute "{k}" at {path.path} cannot be used with the following' 

-

395 f" attributes: {ck}{self._doc_url_error_suffix()}" 

-

396 ) 

-

397 nk = attr.target_attribute 

-

398 key_path = path[k] 

-

399 attr.type_validator.ensure_type(v, key_path) 

-

400 if v is None: 400 ↛ 401line 400 didn't jump to line 401, because the condition on line 400 was never true

-

401 continue 

-

402 if k != nk: 

-

403 alias_mapping[nk] = k, None 

-

404 v = attr.type_validator.map_type(v, key_path, parser_context) 

-

405 result[nk] = v 

-

406 if alias_mapping: 

-

407 path.alias_mapping = alias_mapping 

-

408 return cast("TD", result) 

-

409 

-

410 def _doc_url_error_suffix(self, *, see_url_version: bool = False) -> str: 

-

411 doc_url = self.reference_documentation_url 

-

412 if doc_url is not None: 

-

413 if see_url_version: 413 ↛ 414line 413 didn't jump to line 414, because the condition on line 413 was never true

-

414 return f" Please see {doc_url} for the documentation." 

-

415 return f" (Documentation: {doc_url})" 

-

416 return "" 

-

417 

-

418 def parse_input( 

-

419 self, 

-

420 value: object, 

-

421 path: AttributePath, 

-

422 *, 

-

423 parser_context: Optional["ParserContextData"] = None, 

-

424 ) -> TD: 

-

425 if value is None: 425 ↛ 426line 425 didn't jump to line 426, because the condition on line 425 was never true

-

426 form_note = " The attribute must be a mapping." 

-

427 if self.alt_form_parser is not None: 

-

428 form_note = ( 

-

429 " The attribute can be a mapping or a non-mapping format" 

-

430 ' (usually, "non-mapping format" means a string or a list of strings).' 

-

431 ) 

-

432 doc_ref = self._doc_url_error_suffix(see_url_version=True) 

-

433 raise ManifestParseException( 

-

434 f"The attribute {path.path} was missing a value. {form_note}{doc_ref}" 

-

435 ) 

-

436 

-

437 if not isinstance(value, dict): 

-

438 return self._parse_alt_form(value, path, parser_context=parser_context) 

-

439 return self._parse_typed_dict_form(value, path, parser_context=parser_context) 

-

440 

-

441 def _per_attribute_conflicts(self) -> Mapping[str, FrozenSet[str]]: 

-

442 conflicts = self._per_attribute_conflicts_cache 

-

443 if conflicts is not None: 

-

444 return conflicts 

-

445 attrs = self.source_attributes 

-

446 conflicts = { 

-

447 a.source_attribute_name: frozenset( 

-

448 attrs[ca].source_attribute_name for ca in a.conflicting_attributes 

-

449 ) 

-

450 for a in attrs.values() 

-

451 } 

-

452 self._per_attribute_conflicts_cache = conflicts 

-

453 return self._per_attribute_conflicts_cache 

-

454 

-

455 

-

456class DebputyParseHint: 

-

457 @classmethod 

-

458 def target_attribute(cls, target_attribute: str) -> "DebputyParseHint": 

-

459 """Define this source attribute to have a different target attribute name 

-

460 

-

461 As an example: 

-

462 

-

463 >>> class SourceType(TypedDict): 

-

464 ... source: Annotated[NotRequired[str], DebputyParseHint.target_attribute("sources")] 

-

465 ... sources: NotRequired[List[str]] 

-

466 >>> class TargetType(TypedDict): 

-

467 ... sources: List[str] 

-

468 >>> pg = ParserGenerator() 

-

469 >>> parser = pg.generate_parser(TargetType, source_content=SourceType) 

-

470 

-

471 In this example, the user can provide either `source` or `sources` and the parser will 

-

472 map them to the `sources` attribute in the `TargetType`. Note this example relies on 

-

473 the builtin mapping of `str` to `List[str]` to align the types between `source` (from 

-

474 SourceType) and `sources` (from TargetType). 

-

475 

-

476 The following rules apply: 

-

477 

-

478 * All source attributes that map to the same target attribute will be mutually exclusive 

-

479 (that is, the user cannot give `source` *and* `sources` as input). 

-

480 * When the target attribute is required, the source attributes are conditionally 

-

481 mandatory requiring the user to provide exactly one of them. 

-

482 * When multiple source attributes point to a single target attribute, none of the source 

-

483 attributes can be Required. 

-

484 * The annotation can only be used for the source type specification and the source type 

-

485 specification must be different from the target type specification. 

-

486 

-

487 The `target_attribute` annotation can be used without having multiple source attributes. This 

-

488 can be useful if the source attribute name is not valid as a python variable identifier to 

-

489 rename it to a valid python identifier. 

-

490 

-

491 :param target_attribute: The attribute name in the target content 

-

492 :return: The annotation. 

-

493 """ 

-

494 return TargetAttribute(target_attribute) 

-

495 

-

496 @classmethod 

-

497 def conflicts_with_source_attributes( 

-

498 cls, 

-

499 *conflicting_source_attributes: str, 

-

500 ) -> "DebputyParseHint": 

-

501 """Declare a conflict with one or more source attributes 

-

502 

-

503 Example: 

-

504 

-

505 >>> class SourceType(TypedDict): 

-

506 ... source: Annotated[NotRequired[str], DebputyParseHint.target_attribute("sources")] 

-

507 ... sources: NotRequired[List[str]] 

-

508 ... into_dir: NotRequired[str] 

-

509 ... renamed_to: Annotated[ 

-

510 ... NotRequired[str], 

-

511 ... DebputyParseHint.conflicts_with_source_attributes("sources", "into_dir") 

-

512 ... ] 

-

513 >>> class TargetType(TypedDict): 

-

514 ... sources: List[str] 

-

515 ... into_dir: NotRequired[str] 

-

516 ... renamed_to: NotRequired[str] 

-

517 >>> pg = ParserGenerator() 

-

518 >>> parser = pg.generate_parser(TargetType, source_content=SourceType) 

-

519 

-

520 In this example, if the user was to provide `renamed_to` with `sources` or `into_dir` the parser would report 

-

521 an error. However, the parser will allow `renamed_to` with `source` as the conflict is considered only for 

-

522 the input source. That is, it is irrelevant that `sources` and `source´ happens to "map" to the same target 

-

523 attribute. 

-

524 

-

525 The following rules apply: 

-

526 * It is not possible for a target attribute to declare conflicts unless the target type spec is reused as 

-

527 source type spec. 

-

528 * All attributes involved in a conflict must be NotRequired. If any of the attributes are Required, then 

-

529 the parser generator will reject the input. 

-

530 * All attributes listed in the conflict must be valid attributes in the source type spec. 

-

531 

-

532 Note you do not have to specify conflicts between two attributes with the same target attribute name. The 

-

533 `target_attribute` annotation will handle that for you. 

-

534 

-

535 :param conflicting_source_attributes: All source attributes that cannot be used with this attribute. 

-

536 :return: The annotation. 

-

537 """ 

-

538 if len(conflicting_source_attributes) < 1: 538 ↛ 539line 538 didn't jump to line 539, because the condition on line 538 was never true

-

539 raise ValueError( 

-

540 "DebputyParseHint.conflicts_with_source_attributes requires at least one attribute as input" 

-

541 ) 

-

542 return ConflictWithSourceAttribute(frozenset(conflicting_source_attributes)) 

-

543 

-

544 @classmethod 

-

545 def required_when_single_binary( 

-

546 cls, 

-

547 *, 

-

548 package_type: PackageTypeSelector = _ALL_PACKAGE_TYPES, 

-

549 ) -> "DebputyParseHint": 

-

550 """Declare a source attribute as required when the source package produces exactly one binary package 

-

551 

-

552 The attribute in question must always be declared as `NotRequired` in the TypedDict and this condition 

-

553 can only be used for source attributes. 

-

554 """ 

-

555 resolved_package_types = resolve_package_type_selectors(package_type) 

-

556 reason = "The field is required for source packages producing exactly one binary package" 

-

557 if resolved_package_types != _ALL_PACKAGE_TYPES: 

-

558 types = ", ".join(sorted(resolved_package_types)) 

-

559 reason += f" of type {types}" 

-

560 return ConditionalRequired( 

-

561 reason, 

-

562 lambda c: len( 

-

563 [ 

-

564 p 

-

565 for p in c.binary_packages.values() 

-

566 if p.package_type in package_type 

-

567 ] 

-

568 ) 

-

569 == 1, 

-

570 ) 

-

571 return ConditionalRequired( 

-

572 reason, 

-

573 lambda c: c.is_single_binary_package, 

-

574 ) 

-

575 

-

576 @classmethod 

-

577 def required_when_multi_binary( 

-

578 cls, 

-

579 *, 

-

580 package_type: PackageTypeSelector = _ALL_PACKAGE_TYPES, 

-

581 ) -> "DebputyParseHint": 

-

582 """Declare a source attribute as required when the source package produces two or more binary package 

-

583 

-

584 The attribute in question must always be declared as `NotRequired` in the TypedDict and this condition 

-

585 can only be used for source attributes. 

-

586 """ 

-

587 resolved_package_types = resolve_package_type_selectors(package_type) 

-

588 reason = "The field is required for source packages producing two or more binary packages" 

-

589 if resolved_package_types != _ALL_PACKAGE_TYPES: 

-

590 types = ", ".join(sorted(resolved_package_types)) 

-

591 reason = ( 

-

592 "The field is required for source packages producing not producing exactly one binary packages" 

-

593 f" of type {types}" 

-

594 ) 

-

595 return ConditionalRequired( 

-

596 reason, 

-

597 lambda c: len( 

-

598 [ 

-

599 p 

-

600 for p in c.binary_packages.values() 

-

601 if p.package_type in package_type 

-

602 ] 

-

603 ) 

-

604 != 1, 

-

605 ) 

-

606 return ConditionalRequired( 

-

607 reason, 

-

608 lambda c: not c.is_single_binary_package, 

-

609 ) 

-

610 

-

611 @classmethod 

-

612 def manifest_attribute(cls, attribute: str) -> "DebputyParseHint": 

-

613 """Declare what the attribute name (as written in the manifest) should be 

-

614 

-

615 By default, debputy will do an attribute normalizing that will take valid python identifiers such 

-

616 as `dest_dir` and remap it to the manifest variant (such as `dest-dir`) automatically. If you have 

-

617 a special case, where this built-in normalization is insufficient or the python name is considerably 

-

618 different from what the user would write in the manifest, you can use this parse hint to set the 

-

619 name that the user would have to write in the manifest for this attribute. 

-

620 

-

621 >>> class SourceType(TypedDict): 

-

622 ... source: List[FileSystemMatchRule] 

-

623 ... # Use "as" in the manifest because "as_" was not pretty enough 

-

624 ... install_as: Annotated[NotRequired[FileSystemExactMatchRule], DebputyParseHint.manifest_attribute("as")] 

-

625 

-

626 In this example, we use the parse hint to use "as" as the name in the manifest, because we cannot 

-

627 use "as" a valid python identifier (it is a keyword). While debputy would map `as_` to `as` for us, 

-

628 we have chosen to use `install_as` as a python identifier. 

-

629 """ 

-

630 return ManifestAttribute(attribute) 

-

631 

-

632 @classmethod 

-

633 def not_path_error_hint(cls) -> "DebputyParseHint": 

-

634 """Mark this attribute as not a "path hint" when it comes to reporting errors 

-

635 

-

636 By default, `debputy` will pick up attributes that uses path names (FileSystemMatchRule) as 

-

637 candidates for parse error hints (the little "<Search for: VALUE>" in error messages). 

-

638 

-

639 Most rules only have one active path-based attribute and paths tends to be unique enough 

-

640 that it helps people spot the issue faster. However, in rare cases, you can have multiple 

-

641 attributes that fit the bill. In this case, this hint can be used to "hide" the suboptimal 

-

642 choice. As an example: 

-

643 

-

644 >>> class SourceType(TypedDict): 

-

645 ... source: List[FileSystemMatchRule] 

-

646 ... install_as: Annotated[NotRequired[FileSystemExactMatchRule], DebputyParseHint.not_path_error_hint()] 

-

647 

-

648 In this case, without the hint, `debputy` might pick up `install_as` as the attribute to 

-

649 use as hint for error reporting. However, here we have decided that we never want `install_as` 

-

650 leaving `source` as the only option. 

-

651 

-

652 Generally, this type hint must be placed on the **source** format. Any source attribute matching 

-

653 the parsed format will be ignored. 

-

654 

-

655 Mind the asymmetry: The annotation is placed in the **source** format while `debputy` looks at 

-

656 the type of the target attribute to determine if it counts as path. 

-

657 """ 

-

658 return NOT_PATH_HINT 

-

659 

-

660 

-

661@dataclasses.dataclass(frozen=True, slots=True) 

-

662class TargetAttribute(DebputyParseHint): 

-

663 attribute: str 

-

664 

-

665 

-

666@dataclasses.dataclass(frozen=True, slots=True) 

-

667class ConflictWithSourceAttribute(DebputyParseHint): 

-

668 conflicting_attributes: FrozenSet[str] 

-

669 

-

670 

-

671@dataclasses.dataclass(frozen=True, slots=True) 

-

672class ConditionalRequired(DebputyParseHint): 

-

673 reason: str 

-

674 condition: Callable[["ParserContextData"], bool] 

-

675 

-

676 def condition_applies(self, context: "ParserContextData") -> bool: 

-

677 return self.condition(context) 

-

678 

-

679 

-

680@dataclasses.dataclass(frozen=True, slots=True) 

-

681class ManifestAttribute(DebputyParseHint): 

-

682 attribute: str 

-

683 

-

684 

-

685class NotPathHint(DebputyParseHint): 

-

686 pass 

-

687 

-

688 

-

689NOT_PATH_HINT = NotPathHint() 

-

690 

-

691 

-

692def _is_path_attribute_candidate( 

-

693 source_attribute: AttributeDescription, target_attribute: AttributeDescription 

-

694) -> bool: 

-

695 if ( 

-

696 source_attribute.parse_hints 

-

697 and not source_attribute.parse_hints.applicable_as_path_hint 

-

698 ): 

-

699 return False 

-

700 target_type = target_attribute.attribute_type 

-

701 _, origin, args = unpack_type(target_type, False) 

-

702 match_type = target_type 

-

703 if origin == list: 

-

704 match_type = args[0] 

-

705 return isinstance(match_type, type) and issubclass(match_type, FileSystemMatchRule) 

-

706 

-

707 

-

708class ParserGenerator: 

-

709 def __init__(self) -> None: 

-

710 self._registered_types: Dict[Any, TypeMapping[Any, Any]] = {} 

-

711 self._object_parsers: Dict[str, DispatchingObjectParser] = {} 

-

712 self._table_parsers: Dict[ 

-

713 Type[DebputyDispatchableType], DispatchingTableParser[Any] 

-

714 ] = {} 

-

715 self._in_package_context_parser: Dict[str, Any] = {} 

-

716 

-

717 def register_mapped_type(self, mapped_type: TypeMapping) -> None: 

-

718 existing = self._registered_types.get(mapped_type.target_type) 

-

719 if existing is not None: 719 ↛ 720line 719 didn't jump to line 720, because the condition on line 719 was never true

-

720 raise ValueError(f"The type {existing} is already registered") 

-

721 self._registered_types[mapped_type.target_type] = mapped_type 

-

722 

-

723 def discard_mapped_type(self, mapped_type: Type[T]) -> None: 

-

724 del self._registered_types[mapped_type] 

-

725 

-

726 def add_table_parser(self, rt: Type[DebputyDispatchableType], path: str) -> None: 

-

727 assert rt not in self._table_parsers 

-

728 self._table_parsers[rt] = DispatchingTableParser(rt, path) 

-

729 

-

730 def add_object_parser( 

-

731 self, 

-

732 path: str, 

-

733 *, 

-

734 parser_documentation: Optional[ParserDocumentation] = None, 

-

735 ) -> None: 

-

736 assert path not in self._in_package_context_parser 

-

737 assert path not in self._object_parsers 

-

738 self._object_parsers[path] = DispatchingObjectParser( 

-

739 path, parser_documentation=parser_documentation 

-

740 ) 

-

741 

-

742 def add_in_package_context_parser( 

-

743 self, 

-

744 path: str, 

-

745 delegate: DeclarativeInputParser[Any], 

-

746 ) -> None: 

-

747 assert path not in self._in_package_context_parser 

-

748 assert path not in self._object_parsers 

-

749 self._in_package_context_parser[path] = InPackageContextParser(path, delegate) 

-

750 

-

751 @property 

-

752 def dispatchable_table_parsers( 

-

753 self, 

-

754 ) -> Mapping[Type[DebputyDispatchableType], DispatchingTableParser[Any]]: 

-

755 return self._table_parsers 

-

756 

-

757 @property 

-

758 def dispatchable_object_parsers(self) -> Mapping[str, DispatchingObjectParser]: 

-

759 return self._object_parsers 

-

760 

-

761 def dispatch_parser_table_for( 

-

762 self, rule_type: TTP 

-

763 ) -> Optional[DispatchingTableParser[TP]]: 

-

764 return cast( 

-

765 "Optional[DispatchingTableParser[TP]]", self._table_parsers.get(rule_type) 

-

766 ) 

-

767 

-

768 def generate_parser( 

-

769 self, 

-

770 parsed_content: Type[TD], 

-

771 *, 

-

772 source_content: Optional[SF] = None, 

-

773 allow_optional: bool = False, 

-

774 inline_reference_documentation: Optional[ParserDocumentation] = None, 

-

775 ) -> DeclarativeInputParser[TD]: 

-

776 """Derive a parser from a TypedDict 

-

777 

-

778 Generates a parser for a segment of the manifest (think the `install-docs` snippet) from a TypedDict 

-

779 or two that are used as a description. 

-

780 

-

781 In its most simple use-case, the caller provides a TypedDict of the expected attributed along with 

-

782 their types. As an example: 

-

783 

-

784 >>> class InstallDocsRule(DebputyParsedContent): 

-

785 ... sources: List[str] 

-

786 ... into: List[str] 

-

787 >>> pg = ParserGenerator() 

-

788 >>> simple_parser = pg.generate_parser(InstallDocsRule) 

-

789 

-

790 This will create a parser that would be able to interpret something like: 

-

791 

-

792 ```yaml 

-

793 install-docs: 

-

794 sources: ["docs/*"] 

-

795 into: ["my-pkg"] 

-

796 ``` 

-

797 

-

798 While this is sufficient for programmers, it is a bit rigid for the packager writing the manifest. Therefore, 

-

799 you can also provide a TypedDict describing the input, enabling more flexibility: 

-

800 

-

801 >>> class InstallDocsRule(DebputyParsedContent): 

-

802 ... sources: List[str] 

-

803 ... into: List[str] 

-

804 >>> class InputDocsRuleInputFormat(TypedDict): 

-

805 ... source: NotRequired[Annotated[str, DebputyParseHint.target_attribute("sources")]] 

-

806 ... sources: NotRequired[List[str]] 

-

807 ... into: Union[str, List[str]] 

-

808 >>> pg = ParserGenerator() 

-

809 >>> flexible_parser = pg.generate_parser( 

-

810 ... InstallDocsRule, 

-

811 ... source_content=InputDocsRuleInputFormat, 

-

812 ... ) 

-

813 

-

814 In this case, the `sources` field can either come from a single `source` in the manifest (which must be a string) 

-

815 or `sources` (which must be a list of strings). The parser also ensures that only one of `source` or `sources` 

-

816 is used to ensure the input is not ambiguous. For the `into` parameter, the parser will accept it being a str 

-

817 or a list of strings. Regardless of how the input was provided, the parser will normalize the input such that 

-

818 both `sources` and `into` in the result is a list of strings. As an example, this parser can accept 

-

819 both the previous input but also the following input: 

-

820 

-

821 ```yaml 

-

822 install-docs: 

-

823 source: "docs/*" 

-

824 into: "my-pkg" 

-

825 ``` 

-

826 

-

827 The `source` and `into` attributes are then normalized to lists as if the user had written them as lists 

-

828 with a single string in them. As noted above, the name of the `source` attribute will also be normalized 

-

829 while parsing. 

-

830 

-

831 In the cases where only one field is required by the user, it can sometimes make sense to allow a non-dict 

-

832 as part of the input. Example: 

-

833 

-

834 >>> class DiscardRule(DebputyParsedContent): 

-

835 ... paths: List[str] 

-

836 >>> class DiscardRuleInputDictFormat(TypedDict): 

-

837 ... path: NotRequired[Annotated[str, DebputyParseHint.target_attribute("paths")]] 

-

838 ... paths: NotRequired[List[str]] 

-

839 >>> # This format relies on DiscardRule having exactly one Required attribute 

-

840 >>> DiscardRuleInputWithAltFormat = Union[ 

-

841 ... DiscardRuleInputDictFormat, 

-

842 ... str, 

-

843 ... List[str], 

-

844 ... ] 

-

845 >>> pg = ParserGenerator() 

-

846 >>> flexible_parser = pg.generate_parser( 

-

847 ... DiscardRule, 

-

848 ... source_content=DiscardRuleInputWithAltFormat, 

-

849 ... ) 

-

850 

-

851 

-

852 Supported types: 

-

853 * `List` - must have a fixed type argument (such as `List[str]`) 

-

854 * `str` 

-

855 * `int` 

-

856 * `BinaryPackage` - When provided (or required), the user must provide a package name listed 

-

857 in the debian/control file. The code receives the BinaryPackage instance 

-

858 matching that input. 

-

859 * `FileSystemMode` - When provided (or required), the user must provide a file system mode in any 

-

860 format that `debputy' provides (such as `0644` or `a=rw,go=rw`). 

-

861 * `FileSystemOwner` - When provided (or required), the user must a file system owner that is 

-

862 available statically on all Debian systems (must be in `base-passwd`). 

-

863 The user has multiple options for how to specify it (either via name or id). 

-

864 * `FileSystemGroup` - When provided (or required), the user must a file system group that is 

-

865 available statically on all Debian systems (must be in `base-passwd`). 

-

866 The user has multiple options for how to specify it (either via name or id). 

-

867 * `ManifestCondition` - When provided (or required), the user must specify a conditional rule to apply. 

-

868 Usually, it is better to extend `DebputyParsedContentStandardConditional`, which 

-

869 provides the `debputy' default `when` parameter for conditionals. 

-

870 

-

871 Supported special type-like parameters: 

-

872 

-

873 * `Required` / `NotRequired` to mark a field as `Required` or `NotRequired`. Must be provided at the 

-

874 outermost level. Cannot vary between `parsed_content` and `source_content`. 

-

875 * `Annotated`. Accepted at the outermost level (inside Required/NotRequired) but ignored at the moment. 

-

876 * `Union`. Must be the outermost level (inside `Annotated` or/and `Required`/`NotRequired` if these are present). 

-

877 Automapping (see below) is restricted to two members in the Union. 

-

878 

-

879 Notable non-supported types: 

-

880 * `Mapping` and all variants therefore (such as `dict`). In the future, nested `TypedDict`s may be allowed. 

-

881 * `Optional` (or `Union[..., None]`): Use `NotRequired` for optional fields. 

-

882 

-

883 Automatic mapping rules from `source_content` to `parsed_content`: 

-

884 - `Union[T, List[T]]` can be narrowed automatically to `List[T]`. Transformation is basically: 

-

885 `lambda value: value if isinstance(value, list) else [value]` 

-

886 - `T` can be mapped automatically to `List[T]`, Transformation being: `lambda value: [value]` 

-

887 

-

888 Additionally, types can be annotated (`Annotated[str, ...]`) with `DebputyParseHint`s. Check its classmethod 

-

889 for concrete features that may be useful to you. 

-

890 

-

891 :param parsed_content: A DebputyParsedContent / TypedDict describing the desired model of the input once parsed. 

-

892 (DebputyParsedContent is a TypedDict subclass that work around some inadequate type checkers). 

-

893 It can also be a `List[DebputyParsedContent]`. In that case, `source_content` must be a 

-

894 `List[TypedDict[...]]`. 

-

895 :param source_content: Optionally, a TypedDict describing the input allowed by the user. This can be useful 

-

896 to describe more variations than in `parsed_content` that the parser will normalize for you. If omitted, 

-

897 the parsed_content is also considered the source_content (which affects what annotations are allowed in it). 

-

898 Note you should never pass the parsed_content as source_content directly. 

-

899 :param allow_optional: In rare cases, you want to support explicitly provided vs. optional. In this case, you 

-

900 should set this to True. Though, in 99.9% of all cases, you want `NotRequired` rather than `Optional` (and 

-

901 can keep this False). 

-

902 :param inline_reference_documentation: Optionally, programmatic documentation 

-

903 :return: An input parser capable of reading input matching the TypedDict(s) used as reference. 

-

904 """ 

-

905 orig_parsed_content = parsed_content 

-

906 if source_content is parsed_content: 906 ↛ 907line 906 didn't jump to line 907, because the condition on line 906 was never true

-

907 raise ValueError( 

-

908 "Do not provide source_content if it is the same as parsed_content" 

-

909 ) 

-

910 is_list_wrapped = False 

-

911 if get_origin(orig_parsed_content) == list: 

-

912 parsed_content = get_args(orig_parsed_content)[0] 

-

913 is_list_wrapped = True 

-

914 

-

915 if isinstance(parsed_content, type) and issubclass( 

-

916 parsed_content, DebputyDispatchableType 

-

917 ): 

-

918 parser = self.dispatch_parser_table_for(parsed_content) 

-

919 if parser is None: 919 ↛ 920line 919 didn't jump to line 920, because the condition on line 919 was never true

-

920 raise ValueError( 

-

921 f"Unsupported parsed_content descriptor: {parsed_content.__qualname__}." 

-

922 f" The class {parsed_content.__qualname__} is not a pre-registered type." 

-

923 ) 

-

924 # FIXME: Only the list wrapped version has documentation. 

-

925 if is_list_wrapped: 925 ↛ 930line 925 didn't jump to line 930, because the condition on line 925 was never false

-

926 parser = ListWrappedDeclarativeInputParser( 

-

927 parser, 

-

928 inline_reference_documentation=inline_reference_documentation, 

-

929 ) 

-

930 return parser 

-

931 

-

932 if not is_typeddict(parsed_content): 932 ↛ 933line 932 didn't jump to line 933, because the condition on line 932 was never true

-

933 raise ValueError( 

-

934 f"Unsupported parsed_content descriptor: {parsed_content.__qualname__}." 

-

935 ' Only "TypedDict"-based types and a subset of "DebputyDispatchableType" are supported.' 

-

936 ) 

-

937 if is_list_wrapped: 

-

938 if get_origin(source_content) != list: 938 ↛ 939line 938 didn't jump to line 939, because the condition on line 938 was never true

-

939 raise ValueError( 

-

940 "If the parsed_content is a List type, then source_format must be a List type as well." 

-

941 ) 

-

942 source_content = get_args(source_content)[0] 

-

943 

-

944 target_attributes = self._parse_types( 

-

945 parsed_content, 

-

946 allow_source_attribute_annotations=source_content is None, 

-

947 forbid_optional=not allow_optional, 

-

948 ) 

-

949 required_target_parameters = frozenset(parsed_content.__required_keys__) 

-

950 parsed_alt_form = None 

-

951 non_mapping_source_only = False 

-

952 

-

953 if source_content is not None: 

-

954 default_target_attribute = None 

-

955 if len(required_target_parameters) == 1: 

-

956 default_target_attribute = next(iter(required_target_parameters)) 

-

957 

-

958 source_typed_dict, alt_source_forms = _extract_typed_dict( 

-

959 source_content, 

-

960 default_target_attribute, 

-

961 ) 

-

962 if alt_source_forms: 

-

963 parsed_alt_form = self._parse_alt_form( 

-

964 alt_source_forms, 

-

965 default_target_attribute, 

-

966 ) 

-

967 if source_typed_dict is not None: 

-

968 source_content_attributes = self._parse_types( 

-

969 source_typed_dict, 

-

970 allow_target_attribute_annotation=True, 

-

971 allow_source_attribute_annotations=True, 

-

972 forbid_optional=not allow_optional, 

-

973 ) 

-

974 source_content_parameter = "source_content" 

-

975 source_and_parsed_differs = True 

-

976 else: 

-

977 source_typed_dict = parsed_content 

-

978 source_content_attributes = target_attributes 

-

979 source_content_parameter = "parsed_content" 

-

980 source_and_parsed_differs = True 

-

981 non_mapping_source_only = True 

-

982 else: 

-

983 source_typed_dict = parsed_content 

-

984 source_content_attributes = target_attributes 

-

985 source_content_parameter = "parsed_content" 

-

986 source_and_parsed_differs = False 

-

987 

-

988 sources = collections.defaultdict(set) 

-

989 seen_targets = set() 

-

990 seen_source_names: Dict[str, str] = {} 

-

991 source_attributes: Dict[str, AttributeDescription] = {} 

-

992 path_hint_source_attributes = [] 

-

993 

-

994 for k in source_content_attributes: 

-

995 ia = source_content_attributes[k] 

-

996 

-

997 ta = ( 

-

998 target_attributes.get(ia.target_attribute) 

-

999 if source_and_parsed_differs 

-

1000 else ia 

-

1001 ) 

-

1002 if ta is None: 1002 ↛ 1004line 1002 didn't jump to line 1004, because the condition on line 1002 was never true

-

1003 # Error message would be wrong if this assertion is false. 

-

1004 assert source_and_parsed_differs 

-

1005 raise ValueError( 

-

1006 f'The attribute "{k}" from the "source_content" parameter should have mapped' 

-

1007 f' to "{ia.target_attribute}", but that parameter does not exist in "parsed_content"' 

-

1008 ) 

-

1009 if _is_path_attribute_candidate(ia, ta): 

-

1010 path_hint_source_attributes.append(ia.source_attribute_name) 

-

1011 existing_source_name = seen_source_names.get(ia.source_attribute_name) 

-

1012 if existing_source_name: 1012 ↛ 1013line 1012 didn't jump to line 1013, because the condition on line 1012 was never true

-

1013 raise ValueError( 

-

1014 f'The attribute "{k}" and "{existing_source_name}" both share the source name' 

-

1015 f' "{ia.source_attribute_name}". Please change the {source_content_parameter} parameter,' 

-

1016 f' so only one attribute use "{ia.source_attribute_name}".' 

-

1017 ) 

-

1018 seen_source_names[ia.source_attribute_name] = k 

-

1019 seen_targets.add(ta.target_attribute) 

-

1020 sources[ia.target_attribute].add(k) 

-

1021 if source_and_parsed_differs: 

-

1022 bridge_mapper = self._type_normalize( 

-

1023 k, ia.attribute_type, ta.attribute_type, False 

-

1024 ) 

-

1025 ia.type_validator = ia.type_validator.combine_mapper(bridge_mapper) 

-

1026 source_attributes[k] = ia 

-

1027 

-

1028 def _as_attr_names(td_name: Iterable[str]) -> FrozenSet[str]: 

-

1029 return frozenset( 

-

1030 source_content_attributes[a].source_attribute_name for a in td_name 

-

1031 ) 

-

1032 

-

1033 _check_attributes( 

-

1034 parsed_content, 

-

1035 source_typed_dict, 

-

1036 source_content_attributes, 

-

1037 sources, 

-

1038 ) 

-

1039 

-

1040 at_least_one_of = frozenset( 

-

1041 _as_attr_names(g) 

-

1042 for k, g in sources.items() 

-

1043 if len(g) > 1 and k in required_target_parameters 

-

1044 ) 

-

1045 

-

1046 if source_and_parsed_differs and seen_targets != target_attributes.keys(): 1046 ↛ 1047line 1046 didn't jump to line 1047, because the condition on line 1046 was never true

-

1047 missing = ", ".join( 

-

1048 repr(k) for k in (target_attributes.keys() - seen_targets) 

-

1049 ) 

-

1050 raise ValueError( 

-

1051 'The following attributes in "parsed_content" did not have a source field in "source_content":' 

-

1052 f" {missing}" 

-

1053 ) 

-

1054 all_mutually_exclusive_fields = frozenset( 

-

1055 _as_attr_names(g) for g in sources.values() if len(g) > 1 

-

1056 ) 

-

1057 

-

1058 all_parameters = ( 

-

1059 source_typed_dict.__required_keys__ | source_typed_dict.__optional_keys__ 

-

1060 ) 

-

1061 _check_conflicts( 

-

1062 source_content_attributes, 

-

1063 source_typed_dict.__required_keys__, 

-

1064 all_parameters, 

-

1065 ) 

-

1066 

-

1067 manifest_attributes = { 

-

1068 a.source_attribute_name: a for a in source_content_attributes.values() 

-

1069 } 

-

1070 

-

1071 if parsed_alt_form is not None: 

-

1072 target_attribute = parsed_alt_form.target_attribute 

-

1073 if ( 1073 ↛ 1078line 1073 didn't jump to line 1078

-

1074 target_attribute not in required_target_parameters 

-

1075 and required_target_parameters 

-

1076 or len(required_target_parameters) > 1 

-

1077 ): 

-

1078 raise NotImplementedError( 

-

1079 "When using alternative source formats (Union[TypedDict, ...]), then the" 

-

1080 " target must have at most one require parameter" 

-

1081 ) 

-

1082 bridge_mapper = self._type_normalize( 

-

1083 target_attribute, 

-

1084 parsed_alt_form.attribute_type, 

-

1085 target_attributes[target_attribute].attribute_type, 

-

1086 False, 

-

1087 ) 

-

1088 parsed_alt_form.type_validator = ( 

-

1089 parsed_alt_form.type_validator.combine_mapper(bridge_mapper) 

-

1090 ) 

-

1091 

-

1092 _verify_inline_reference_documentation( 

-

1093 source_content_attributes, 

-

1094 inline_reference_documentation, 

-

1095 parsed_alt_form is not None, 

-

1096 ) 

-

1097 if non_mapping_source_only: 

-

1098 parser = DeclarativeNonMappingInputParser( 

-

1099 assume_not_none(parsed_alt_form), 

-

1100 inline_reference_documentation=inline_reference_documentation, 

-

1101 ) 

-

1102 else: 

-

1103 parser = DeclarativeMappingInputParser( 

-

1104 _as_attr_names(source_typed_dict.__required_keys__), 

-

1105 _as_attr_names(all_parameters), 

-

1106 manifest_attributes, 

-

1107 source_attributes, 

-

1108 mutually_exclusive_attributes=all_mutually_exclusive_fields, 

-

1109 alt_form_parser=parsed_alt_form, 

-

1110 at_least_one_of=at_least_one_of, 

-

1111 inline_reference_documentation=inline_reference_documentation, 

-

1112 path_hint_source_attributes=tuple(path_hint_source_attributes), 

-

1113 ) 

-

1114 if is_list_wrapped: 

-

1115 parser = ListWrappedDeclarativeInputParser(parser) 

-

1116 return parser 

-

1117 

-

1118 def _as_type_validator( 

-

1119 self, 

-

1120 attribute: str, 

-

1121 provided_type: Any, 

-

1122 parsing_typed_dict_attribute: bool, 

-

1123 ) -> AttributeTypeHandler: 

-

1124 assert not isinstance(provided_type, tuple) 

-

1125 

-

1126 if isinstance(provided_type, type) and issubclass( 

-

1127 provided_type, DebputyDispatchableType 

-

1128 ): 

-

1129 return _dispatch_parser(provided_type) 

-

1130 

-

1131 unmapped_type = self._strip_mapped_types( 

-

1132 provided_type, 

-

1133 parsing_typed_dict_attribute, 

-

1134 ) 

-

1135 type_normalizer = self._type_normalize( 

-

1136 attribute, 

-

1137 unmapped_type, 

-

1138 provided_type, 

-

1139 parsing_typed_dict_attribute, 

-

1140 ) 

-

1141 t_unmapped, t_orig, t_args = unpack_type( 

-

1142 unmapped_type, 

-

1143 parsing_typed_dict_attribute, 

-

1144 ) 

-

1145 

-

1146 if ( 1146 ↛ 1152line 1146 didn't jump to line 1152

-

1147 t_orig == Union 

-

1148 and t_args 

-

1149 and len(t_args) == 2 

-

1150 and any(v is _NONE_TYPE for v in t_args) 

-

1151 ): 

-

1152 _, _, args = unpack_type(provided_type, parsing_typed_dict_attribute) 

-

1153 actual_type = [a for a in args if a is not _NONE_TYPE][0] 

-

1154 validator = self._as_type_validator( 

-

1155 attribute, actual_type, parsing_typed_dict_attribute 

-

1156 ) 

-

1157 

-

1158 def _validator(v: Any, path: AttributePath) -> None: 

-

1159 if v is None: 

-

1160 return 

-

1161 validator.ensure_type(v, path) 

-

1162 

-

1163 return AttributeTypeHandler( 

-

1164 validator.describe_type(), 

-

1165 _validator, 

-

1166 base_type=validator.base_type, 

-

1167 mapper=type_normalizer, 

-

1168 ) 

-

1169 

-

1170 if unmapped_type in BASIC_SIMPLE_TYPES: 

-

1171 type_name = BASIC_SIMPLE_TYPES[unmapped_type] 

-

1172 

-

1173 type_mapping = self._registered_types.get(provided_type) 

-

1174 if type_mapping is not None: 

-

1175 simple_type = f" ({type_name})" 

-

1176 type_name = type_mapping.target_type.__name__ 

-

1177 else: 

-

1178 simple_type = "" 

-

1179 

-

1180 def _validator(v: Any, path: AttributePath) -> None: 

-

1181 if not isinstance(v, unmapped_type): 

-

1182 _validation_type_error( 

-

1183 path, f"The attribute must be a {type_name}{simple_type}" 

-

1184 ) 

-

1185 

-

1186 return AttributeTypeHandler( 

-

1187 type_name, 

-

1188 _validator, 

-

1189 base_type=unmapped_type, 

-

1190 mapper=type_normalizer, 

-

1191 ) 

-

1192 if t_orig == list: 

-

1193 if not t_args: 1193 ↛ 1194line 1193 didn't jump to line 1194, because the condition on line 1193 was never true

-

1194 raise ValueError( 

-

1195 f'The attribute "{attribute}" is List but does not have Generics (Must use List[X])' 

-

1196 ) 

-

1197 _, t_provided_orig, t_provided_args = unpack_type( 

-

1198 provided_type, 

-

1199 parsing_typed_dict_attribute, 

-

1200 ) 

-

1201 genetic_type = t_args[0] 

-

1202 key_mapper = self._as_type_validator( 

-

1203 attribute, 

-

1204 genetic_type, 

-

1205 parsing_typed_dict_attribute, 

-

1206 ) 

-

1207 

-

1208 def _validator(v: Any, path: AttributePath) -> None: 

-

1209 if not isinstance(v, list): 1209 ↛ 1210line 1209 didn't jump to line 1210, because the condition on line 1209 was never true

-

1210 _validation_type_error(path, "The attribute must be a list") 

-

1211 for i, v in enumerate(v): 

-

1212 key_mapper.ensure_type(v, path[i]) 

-

1213 

-

1214 list_mapper = ( 

-

1215 map_each_element(key_mapper.mapper) 

-

1216 if key_mapper.mapper is not None 

-

1217 else None 

-

1218 ) 

-

1219 

-

1220 return AttributeTypeHandler( 

-

1221 f"List of {key_mapper.describe_type()}", 

-

1222 _validator, 

-

1223 base_type=list, 

-

1224 mapper=type_normalizer, 

-

1225 ).combine_mapper(list_mapper) 

-

1226 if is_typeddict(provided_type): 

-

1227 subparser = self.generate_parser(cast("Type[TD]", provided_type)) 

-

1228 return AttributeTypeHandler( 

-

1229 description=f"{provided_type.__name__} (Typed Mapping)", 

-

1230 ensure_type=lambda v, ap: None, 

-

1231 base_type=dict, 

-

1232 mapper=lambda v, ap, cv: subparser.parse_input( 

-

1233 v, ap, parser_context=cv 

-

1234 ), 

-

1235 ) 

-

1236 if t_orig == dict: 

-

1237 if not t_args or len(t_args) != 2: 1237 ↛ 1238line 1237 didn't jump to line 1238, because the condition on line 1237 was never true

-

1238 raise ValueError( 

-

1239 f'The attribute "{attribute}" is Dict but does not have Generics (Must use Dict[str, Y])' 

-

1240 ) 

-

1241 if t_args[0] != str: 1241 ↛ 1242line 1241 didn't jump to line 1242, because the condition on line 1241 was never true

-

1242 raise ValueError( 

-

1243 f'The attribute "{attribute}" is Dict and has a non-str type as key.' 

-

1244 " Currently, only `str` is supported (Dict[str, Y])" 

-

1245 ) 

-

1246 key_mapper = self._as_type_validator( 

-

1247 attribute, 

-

1248 t_args[0], 

-

1249 parsing_typed_dict_attribute, 

-

1250 ) 

-

1251 value_mapper = self._as_type_validator( 

-

1252 attribute, 

-

1253 t_args[1], 

-

1254 parsing_typed_dict_attribute, 

-

1255 ) 

-

1256 

-

1257 if key_mapper.base_type is None: 1257 ↛ 1258line 1257 didn't jump to line 1258, because the condition on line 1257 was never true

-

1258 raise ValueError( 

-

1259 f'The attribute "{attribute}" is Dict and the key did not have a trivial base type. Key types' 

-

1260 f" without trivial base types (such as `str`) are not supported at the moment." 

-

1261 ) 

-

1262 

-

1263 if value_mapper.mapper is not None: 1263 ↛ 1264line 1263 didn't jump to line 1264, because the condition on line 1263 was never true

-

1264 raise ValueError( 

-

1265 f'The attribute "{attribute}" is Dict and the value requires mapping.' 

-

1266 " Currently, this is not supported. Consider a simpler type (such as Dict[str, str] or Dict[str, Any])." 

-

1267 " Better typing may come later" 

-

1268 ) 

-

1269 

-

1270 def _validator(uv: Any, path: AttributePath) -> None: 

-

1271 if not isinstance(uv, dict): 1271 ↛ 1272line 1271 didn't jump to line 1272, because the condition on line 1271 was never true

-

1272 _validation_type_error(path, "The attribute must be a mapping") 

-

1273 key_name = "the first key in the mapping" 

-

1274 for i, (k, v) in enumerate(uv.items()): 

-

1275 if not key_mapper.base_type_match(k): 1275 ↛ 1276line 1275 didn't jump to line 1276, because the condition on line 1275 was never true

-

1276 kp = path.copy_with_path_hint(key_name) 

-

1277 _validation_type_error( 

-

1278 kp, 

-

1279 f'The key number {i + 1} in attribute "{kp}" must be a {key_mapper.describe_type()}', 

-

1280 ) 

-

1281 key_name = f"the key after {k}" 

-

1282 value_mapper.ensure_type(v, path[k]) 

-

1283 

-

1284 return AttributeTypeHandler( 

-

1285 f"Mapping of {value_mapper.describe_type()}", 

-

1286 _validator, 

-

1287 base_type=dict, 

-

1288 mapper=type_normalizer, 

-

1289 ).combine_mapper(key_mapper.mapper) 

-

1290 if t_orig == Union: 

-

1291 if _is_two_arg_x_list_x(t_args): 

-

1292 # Force the order to be "X, List[X]" as it simplifies the code 

-

1293 x_list_x = ( 

-

1294 t_args if get_origin(t_args[1]) == list else (t_args[1], t_args[0]) 

-

1295 ) 

-

1296 

-

1297 # X, List[X] could match if X was List[Y]. However, our code below assumes 

-

1298 # that X is a non-list. The `_is_two_arg_x_list_x` returns False for this 

-

1299 # case to avoid this assert and fall into the "generic case". 

-

1300 assert get_origin(x_list_x[0]) != list 

-

1301 x_subtype_checker = self._as_type_validator( 

-

1302 attribute, 

-

1303 x_list_x[0], 

-

1304 parsing_typed_dict_attribute, 

-

1305 ) 

-

1306 list_x_subtype_checker = self._as_type_validator( 

-

1307 attribute, 

-

1308 x_list_x[1], 

-

1309 parsing_typed_dict_attribute, 

-

1310 ) 

-

1311 type_description = x_subtype_checker.describe_type() 

-

1312 type_description = f"{type_description} or a list of {type_description}" 

-

1313 

-

1314 def _validator(v: Any, path: AttributePath) -> None: 

-

1315 if isinstance(v, list): 

-

1316 list_x_subtype_checker.ensure_type(v, path) 

-

1317 else: 

-

1318 x_subtype_checker.ensure_type(v, path) 

-

1319 

-

1320 return AttributeTypeHandler( 

-

1321 type_description, 

-

1322 _validator, 

-

1323 mapper=type_normalizer, 

-

1324 ) 

-

1325 else: 

-

1326 subtype_checker = [ 

-

1327 self._as_type_validator(attribute, a, parsing_typed_dict_attribute) 

-

1328 for a in t_args 

-

1329 ] 

-

1330 type_description = "one-of: " + ", ".join( 

-

1331 f"{sc.describe_type()}" for sc in subtype_checker 

-

1332 ) 

-

1333 mapper = subtype_checker[0].mapper 

-

1334 if any(mapper != sc.mapper for sc in subtype_checker): 1334 ↛ 1335line 1334 didn't jump to line 1335, because the condition on line 1334 was never true

-

1335 raise ValueError( 

-

1336 f'Cannot handle the union "{provided_type}" as the target types need different' 

-

1337 " type normalization/mapping logic. Unions are generally limited to Union[X, List[X]]" 

-

1338 " where X is a non-collection type." 

-

1339 ) 

-

1340 

-

1341 def _validator(v: Any, path: AttributePath) -> None: 

-

1342 partial_matches = [] 

-

1343 for sc in subtype_checker: 1343 ↛ 1351line 1343 didn't jump to line 1351, because the loop on line 1343 didn't complete

-

1344 try: 

-

1345 sc.ensure_type(v, path) 

-

1346 return 

-

1347 except ManifestParseException as e: 

-

1348 if sc.base_type_match(v): 1348 ↛ 1349line 1348 didn't jump to line 1349, because the condition on line 1348 was never true

-

1349 partial_matches.append((sc, e)) 

-

1350 

-

1351 if len(partial_matches) == 1: 

-

1352 raise partial_matches[0][1] 

-

1353 _validation_type_error( 

-

1354 path, f"Could not match against: {type_description}" 

-

1355 ) 

-

1356 

-

1357 return AttributeTypeHandler( 

-

1358 type_description, 

-

1359 _validator, 

-

1360 mapper=type_normalizer, 

-

1361 ) 

-

1362 if t_orig == Literal: 

-

1363 # We want "x" for string values; repr provides 'x' 

-

1364 pretty = ", ".join( 

-

1365 f'"{v}"' if isinstance(v, str) else str(v) for v in t_args 

-

1366 ) 

-

1367 

-

1368 def _validator(v: Any, path: AttributePath) -> None: 

-

1369 if v not in t_args: 

-

1370 value_hint = "" 

-

1371 if isinstance(v, str): 1371 ↛ 1373line 1371 didn't jump to line 1373, because the condition on line 1371 was never false

-

1372 value_hint = f"({v}) " 

-

1373 _validation_type_error( 

-

1374 path, 

-

1375 f"Value {value_hint}must be one of the following literal values: {pretty}", 

-

1376 ) 

-

1377 

-

1378 return AttributeTypeHandler( 

-

1379 f"One of the following literal values: {pretty}", 

-

1380 _validator, 

-

1381 ) 

-

1382 

-

1383 if provided_type == Any: 1383 ↛ 1388line 1383 didn't jump to line 1388, because the condition on line 1383 was never false

-

1384 return AttributeTypeHandler( 

-

1385 "any (unvalidated)", 

-

1386 lambda *a: None, 

-

1387 ) 

-

1388 raise ValueError( 

-

1389 f'The attribute "{attribute}" had/contained a type {provided_type}, which is not supported' 

-

1390 ) 

-

1391 

-

1392 def _parse_types( 

-

1393 self, 

-

1394 spec: Type[TypedDict], 

-

1395 allow_target_attribute_annotation: bool = False, 

-

1396 allow_source_attribute_annotations: bool = False, 

-

1397 forbid_optional: bool = True, 

-

1398 ) -> Dict[str, AttributeDescription]: 

-

1399 annotations = get_type_hints(spec, include_extras=True) 

-

1400 return { 

-

1401 k: self._attribute_description( 

-

1402 k, 

-

1403 t, 

-

1404 k in spec.__required_keys__, 

-

1405 allow_target_attribute_annotation=allow_target_attribute_annotation, 

-

1406 allow_source_attribute_annotations=allow_source_attribute_annotations, 

-

1407 forbid_optional=forbid_optional, 

-

1408 ) 

-

1409 for k, t in annotations.items() 

-

1410 } 

-

1411 

-

1412 def _attribute_description( 

-

1413 self, 

-

1414 attribute: str, 

-

1415 orig_td: Any, 

-

1416 is_required: bool, 

-

1417 forbid_optional: bool = True, 

-

1418 allow_target_attribute_annotation: bool = False, 

-

1419 allow_source_attribute_annotations: bool = False, 

-

1420 ) -> AttributeDescription: 

-

1421 td, anno, is_optional = _parse_type( 

-

1422 attribute, orig_td, forbid_optional=forbid_optional 

-

1423 ) 

-

1424 type_validator = self._as_type_validator(attribute, td, True) 

-

1425 parsed_annotations = DetectedDebputyParseHint.parse_annotations( 

-

1426 anno, 

-

1427 f' Seen with attribute "{attribute}".', 

-

1428 attribute, 

-

1429 is_required, 

-

1430 allow_target_attribute_annotation=allow_target_attribute_annotation, 

-

1431 allow_source_attribute_annotations=allow_source_attribute_annotations, 

-

1432 ) 

-

1433 return AttributeDescription( 

-

1434 target_attribute=parsed_annotations.target_attribute, 

-

1435 attribute_type=td, 

-

1436 type_validator=type_validator, 

-

1437 annotations=anno, 

-

1438 is_optional=is_optional, 

-

1439 conflicting_attributes=parsed_annotations.conflict_with_source_attributes, 

-

1440 conditional_required=parsed_annotations.conditional_required, 

-

1441 source_attribute_name=assume_not_none( 

-

1442 parsed_annotations.source_manifest_attribute 

-

1443 ), 

-

1444 parse_hints=parsed_annotations, 

-

1445 ) 

-

1446 

-

1447 def _parse_alt_form( 

-

1448 self, 

-

1449 alt_form, 

-

1450 default_target_attribute: Optional[str], 

-

1451 ) -> AttributeDescription: 

-

1452 td, anno, is_optional = _parse_type( 

-

1453 "source_format alternative form", 

-

1454 alt_form, 

-

1455 forbid_optional=True, 

-

1456 parsing_typed_dict_attribute=False, 

-

1457 ) 

-

1458 type_validator = self._as_type_validator( 

-

1459 "source_format alternative form", 

-

1460 td, 

-

1461 True, 

-

1462 ) 

-

1463 parsed_annotations = DetectedDebputyParseHint.parse_annotations( 

-

1464 anno, 

-

1465 " The alternative for source_format.", 

-

1466 None, 

-

1467 False, 

-

1468 default_target_attribute=default_target_attribute, 

-

1469 allow_target_attribute_annotation=True, 

-

1470 allow_source_attribute_annotations=False, 

-

1471 ) 

-

1472 return AttributeDescription( 

-

1473 target_attribute=parsed_annotations.target_attribute, 

-

1474 attribute_type=td, 

-

1475 type_validator=type_validator, 

-

1476 annotations=anno, 

-

1477 is_optional=is_optional, 

-

1478 conflicting_attributes=parsed_annotations.conflict_with_source_attributes, 

-

1479 conditional_required=parsed_annotations.conditional_required, 

-

1480 source_attribute_name="Alt form of the source_format", 

-

1481 ) 

-

1482 

-

1483 def _union_narrowing( 

-

1484 self, 

-

1485 input_type: Any, 

-

1486 target_type: Any, 

-

1487 parsing_typed_dict_attribute: bool, 

-

1488 ) -> Optional[Callable[[Any, AttributePath, Optional["ParserContextData"]], Any]]: 

-

1489 _, input_orig, input_args = unpack_type( 

-

1490 input_type, parsing_typed_dict_attribute 

-

1491 ) 

-

1492 _, target_orig, target_args = unpack_type( 

-

1493 target_type, parsing_typed_dict_attribute 

-

1494 ) 

-

1495 

-

1496 if input_orig != Union or not input_args: 1496 ↛ 1497line 1496 didn't jump to line 1497, because the condition on line 1496 was never true

-

1497 raise ValueError("input_type must be a Union[...] with non-empty args") 

-

1498 

-

1499 # Currently, we only support Union[X, List[X]] -> List[Y] narrowing or Union[X, List[X]] -> Union[Y, Union[Y]] 

-

1500 # - Where X = Y or there is a simple standard transformation from X to Y. 

-

1501 

-

1502 if target_orig not in (Union, list) or not target_args: 

-

1503 # Not supported 

-

1504 return None 

-

1505 

-

1506 if target_orig == Union and set(input_args) == set(target_args): 1506 ↛ 1508line 1506 didn't jump to line 1508, because the condition on line 1506 was never true

-

1507 # Not needed (identity mapping) 

-

1508 return None 

-

1509 

-

1510 if target_orig == list and not any(get_origin(a) == list for a in input_args): 1510 ↛ exit,   1510 ↛ 15122 missed branches: 1) line 1510 didn't finish the generator expression on line 1510, 2) line 1510 didn't jump to line 1512, because the condition on line 1510 was never true

-

1511 # Not supported 

-

1512 return None 

-

1513 

-

1514 target_arg = target_args[0] 

-

1515 simplified_type = self._strip_mapped_types( 

-

1516 target_arg, parsing_typed_dict_attribute 

-

1517 ) 

-

1518 acceptable_types = { 

-

1519 target_arg, 

-

1520 List[target_arg], # type: ignore 

-

1521 simplified_type, 

-

1522 List[simplified_type], # type: ignore 

-

1523 } 

-

1524 target_format = ( 

-

1525 target_arg, 

-

1526 List[target_arg], # type: ignore 

-

1527 ) 

-

1528 in_target_format = 0 

-

1529 in_simple_format = 0 

-

1530 for input_arg in input_args: 

-

1531 if input_arg not in acceptable_types: 1531 ↛ 1533line 1531 didn't jump to line 1533, because the condition on line 1531 was never true

-

1532 # Not supported 

-

1533 return None 

-

1534 if input_arg in target_format: 

-

1535 in_target_format += 1 

-

1536 else: 

-

1537 in_simple_format += 1 

-

1538 

-

1539 assert in_simple_format or in_target_format 

-

1540 

-

1541 if in_target_format and not in_simple_format: 

-

1542 # Union[X, List[X]] -> List[X] 

-

1543 return normalize_into_list 

-

1544 mapped = self._registered_types[target_arg] 

-

1545 if not in_target_format and in_simple_format: 1545 ↛ 1560line 1545 didn't jump to line 1560, because the condition on line 1545 was never false

-

1546 # Union[X, List[X]] -> List[Y] 

-

1547 

-

1548 def _mapper_x_list_y( 

-

1549 x: Union[Any, List[Any]], 

-

1550 ap: AttributePath, 

-

1551 pc: Optional["ParserContextData"], 

-

1552 ) -> List[Any]: 

-

1553 in_list_form: List[Any] = normalize_into_list(x, ap, pc) 

-

1554 

-

1555 return [mapped.mapper(x, ap, pc) for x in in_list_form] 

-

1556 

-

1557 return _mapper_x_list_y 

-

1558 

-

1559 # Union[Y, List[X]] -> List[Y] 

-

1560 if not isinstance(target_arg, type): 

-

1561 raise ValueError( 

-

1562 f"Cannot narrow {input_type} -> {target_type}: The automatic conversion does" 

-

1563 f" not support mixed types. Please use either {simplified_type} or {target_arg}" 

-

1564 f" in the source content (but both a mix of both)" 

-

1565 ) 

-

1566 

-

1567 def _mapper_mixed_list_y( 

-

1568 x: Union[Any, List[Any]], 

-

1569 ap: AttributePath, 

-

1570 pc: Optional["ParserContextData"], 

-

1571 ) -> List[Any]: 

-

1572 in_list_form: List[Any] = normalize_into_list(x, ap, pc) 

-

1573 

-

1574 return [ 

-

1575 x if isinstance(x, target_arg) else mapped.mapper(x, ap, pc) 

-

1576 for x in in_list_form 

-

1577 ] 

-

1578 

-

1579 return _mapper_mixed_list_y 

-

1580 

-

1581 def _type_normalize( 

-

1582 self, 

-

1583 attribute: str, 

-

1584 input_type: Any, 

-

1585 target_type: Any, 

-

1586 parsing_typed_dict_attribute: bool, 

-

1587 ) -> Optional[Callable[[Any, AttributePath, Optional["ParserContextData"]], Any]]: 

-

1588 if input_type == target_type: 

-

1589 return None 

-

1590 _, input_orig, input_args = unpack_type( 

-

1591 input_type, parsing_typed_dict_attribute 

-

1592 ) 

-

1593 _, target_orig, target_args = unpack_type( 

-

1594 target_type, 

-

1595 parsing_typed_dict_attribute, 

-

1596 ) 

-

1597 if input_orig == Union: 

-

1598 result = self._union_narrowing( 

-

1599 input_type, target_type, parsing_typed_dict_attribute 

-

1600 ) 

-

1601 if result: 

-

1602 return result 

-

1603 elif target_orig == list and target_args[0] == input_type: 

-

1604 return wrap_into_list 

-

1605 

-

1606 mapped = self._registered_types.get(target_type) 

-

1607 if mapped is not None and input_type == mapped.source_type: 

-

1608 # Source -> Target 

-

1609 return mapped.mapper 

-

1610 if target_orig == list and target_args: 1610 ↛ 1628line 1610 didn't jump to line 1628, because the condition on line 1610 was never false

-

1611 mapped = self._registered_types.get(target_args[0]) 

-

1612 if mapped is not None: 1612 ↛ 1628line 1612 didn't jump to line 1628, because the condition on line 1612 was never false

-

1613 # mypy is dense and forgot `mapped` cannot be optional in the comprehensions. 

-

1614 mapped_type: TypeMapping = mapped 

-

1615 if input_type == mapped.source_type: 1615 ↛ 1617line 1615 didn't jump to line 1617, because the condition on line 1615 was never true

-

1616 # Source -> List[Target] 

-

1617 return lambda x, ap, pc: [mapped_type.mapper(x, ap, pc)] 

-

1618 if ( 1618 ↛ 1628line 1618 didn't jump to line 1628

-

1619 input_orig == list 

-

1620 and input_args 

-

1621 and input_args[0] == mapped_type.source_type 

-

1622 ): 

-

1623 # List[Source] -> List[Target] 

-

1624 return lambda xs, ap, pc: [ 

-

1625 mapped_type.mapper(x, ap, pc) for x in xs 

-

1626 ] 

-

1627 

-

1628 raise ValueError( 

-

1629 f'Unsupported type normalization for "{attribute}": Cannot automatically map/narrow' 

-

1630 f" {input_type} to {target_type}" 

-

1631 ) 

-

1632 

-

1633 def _strip_mapped_types( 

-

1634 self, orig_td: Any, parsing_typed_dict_attribute: bool 

-

1635 ) -> Any: 

-

1636 m = self._registered_types.get(orig_td) 

-

1637 if m is not None: 

-

1638 return m.source_type 

-

1639 _, v, args = unpack_type(orig_td, parsing_typed_dict_attribute) 

-

1640 if v == list: 

-

1641 arg = args[0] 

-

1642 m = self._registered_types.get(arg) 

-

1643 if m: 

-

1644 return List[m.source_type] # type: ignore 

-

1645 if v == Union: 

-

1646 stripped_args = tuple( 

-

1647 self._strip_mapped_types(x, parsing_typed_dict_attribute) for x in args 

-

1648 ) 

-

1649 if stripped_args != args: 

-

1650 return Union[stripped_args] 

-

1651 return orig_td 

-

1652 

-

1653 

-

1654def _verify_inline_reference_documentation( 

-

1655 source_content_attributes: Mapping[str, AttributeDescription], 

-

1656 inline_reference_documentation: Optional[ParserDocumentation], 

-

1657 has_alt_form: bool, 

-

1658) -> None: 

-

1659 if inline_reference_documentation is None: 

-

1660 return 

-

1661 attribute_doc = inline_reference_documentation.attribute_doc 

-

1662 if attribute_doc: 

-

1663 seen = set() 

-

1664 for attr_doc in attribute_doc: 

-

1665 for attr_name in attr_doc.attributes: 

-

1666 attr = source_content_attributes.get(attr_name) 

-

1667 if attr is None: 1667 ↛ 1668line 1667 didn't jump to line 1668, because the condition on line 1667 was never true

-

1668 raise ValueError( 

-

1669 f'The inline_reference_documentation references an attribute "{attr_name}", which does not' 

-

1670 f" exist in the source format." 

-

1671 ) 

-

1672 if attr_name in seen: 1672 ↛ 1673line 1672 didn't jump to line 1673, because the condition on line 1672 was never true

-

1673 raise ValueError( 

-

1674 f'The inline_reference_documentation has documentation for "{attr_name}" twice,' 

-

1675 f" which is not supported. Please document it at most once" 

-

1676 ) 

-

1677 seen.add(attr_name) 

-

1678 

-

1679 undocumented = source_content_attributes.keys() - seen 

-

1680 if undocumented: 1680 ↛ 1681line 1680 didn't jump to line 1681, because the condition on line 1680 was never true

-

1681 undocumented_attrs = ", ".join(undocumented) 

-

1682 raise ValueError( 

-

1683 "The following attributes were not documented. If this is deliberate, then please" 

-

1684 ' declare each them as undocumented (via undocumented_attr("foo")):' 

-

1685 f" {undocumented_attrs}" 

-

1686 ) 

-

1687 

-

1688 if inline_reference_documentation.alt_parser_description and not has_alt_form: 1688 ↛ 1689line 1688 didn't jump to line 1689, because the condition on line 1688 was never true

-

1689 raise ValueError( 

-

1690 "The inline_reference_documentation had documentation for an non-mapping format," 

-

1691 " but the source format does not have a non-mapping format." 

-

1692 ) 

-

1693 

-

1694 

-

1695def _check_conflicts( 

-

1696 input_content_attributes: Dict[str, AttributeDescription], 

-

1697 required_attributes: FrozenSet[str], 

-

1698 all_attributes: FrozenSet[str], 

-

1699) -> None: 

-

1700 for attr_name, attr in input_content_attributes.items(): 

-

1701 if attr_name in required_attributes and attr.conflicting_attributes: 1701 ↛ 1702line 1701 didn't jump to line 1702, because the condition on line 1701 was never true

-

1702 c = ", ".join(repr(a) for a in attr.conflicting_attributes) 

-

1703 raise ValueError( 

-

1704 f'The attribute "{attr_name}" is required and conflicts with the attributes: {c}.' 

-

1705 " This makes it impossible to use these attributes. Either remove the attributes" 

-

1706 f' (along with the conflicts for them), adjust the conflicts or make "{attr_name}"' 

-

1707 " optional (NotRequired)" 

-

1708 ) 

-

1709 else: 

-

1710 required_conflicts = attr.conflicting_attributes & required_attributes 

-

1711 if required_conflicts: 1711 ↛ 1712line 1711 didn't jump to line 1712, because the condition on line 1711 was never true

-

1712 c = ", ".join(repr(a) for a in required_conflicts) 

-

1713 raise ValueError( 

-

1714 f'The attribute "{attr_name}" conflicts with the following *required* attributes: {c}.' 

-

1715 f' This makes it impossible to use the "{attr_name}" attribute. Either remove it,' 

-

1716 f" adjust the conflicts or make the listed attributes optional (NotRequired)" 

-

1717 ) 

-

1718 unknown_attributes = attr.conflicting_attributes - all_attributes 

-

1719 if unknown_attributes: 1719 ↛ 1720line 1719 didn't jump to line 1720, because the condition on line 1719 was never true

-

1720 c = ", ".join(repr(a) for a in unknown_attributes) 

-

1721 raise ValueError( 

-

1722 f'The attribute "{attr_name}" declares a conflict with the following unknown attributes: {c}.' 

-

1723 f" None of these attributes were declared in the input." 

-

1724 ) 

-

1725 

-

1726 

-

1727def _check_attributes( 

-

1728 content: Type[TypedDict], 

-

1729 input_content: Type[TypedDict], 

-

1730 input_content_attributes: Dict[str, AttributeDescription], 

-

1731 sources: Mapping[str, Collection[str]], 

-

1732) -> None: 

-

1733 target_required_keys = content.__required_keys__ 

-

1734 input_required_keys = input_content.__required_keys__ 

-

1735 all_input_keys = input_required_keys | input_content.__optional_keys__ 

-

1736 

-

1737 for input_name in all_input_keys: 

-

1738 attr = input_content_attributes[input_name] 

-

1739 target_name = attr.target_attribute 

-

1740 source_names = sources[target_name] 

-

1741 input_is_required = input_name in input_required_keys 

-

1742 target_is_required = target_name in target_required_keys 

-

1743 

-

1744 assert source_names 

-

1745 

-

1746 if input_is_required and len(source_names) > 1: 1746 ↛ 1747line 1746 didn't jump to line 1747, because the condition on line 1746 was never true

-

1747 raise ValueError( 

-

1748 f'The source attribute "{input_name}" is required, but it maps to "{target_name}",' 

-

1749 f' which has multiple sources "{source_names}". If "{input_name}" should be required,' 

-

1750 f' then there is no need for additional sources for "{target_name}". Alternatively,' 

-

1751 f' "{input_name}" might be missing a NotRequired type' 

-

1752 f' (example: "{input_name}: NotRequired[<OriginalTypeHere>]")' 

-

1753 ) 

-

1754 if not input_is_required and target_is_required and len(source_names) == 1: 1754 ↛ 1755line 1754 didn't jump to line 1755, because the condition on line 1754 was never true

-

1755 raise ValueError( 

-

1756 f'The source attribute "{input_name}" is not marked as required and maps to' 

-

1757 f' "{target_name}", which is marked as required. As there are no other attributes' 

-

1758 f' mapping to "{target_name}", then "{input_name}" must be required as well' 

-

1759 f' ("{input_name}: Required[<Type>]"). Alternatively, "{target_name}" should be optional' 

-

1760 f' ("{target_name}: NotRequired[<Type>]") or an "MappingHint.aliasOf" might be missing.' 

-

1761 ) 

-

1762 

-

1763 

-

1764def _validation_type_error(path: AttributePath, message: str) -> None: 

-

1765 raise ManifestParseException( 

-

1766 f'The attribute "{path.path}" did not have a valid structure/type: {message}' 

-

1767 ) 

-

1768 

-

1769 

-

1770def _is_two_arg_x_list_x(t_args: Tuple[Any, ...]) -> bool: 

-

1771 if len(t_args) != 2: 1771 ↛ 1772line 1771 didn't jump to line 1772, because the condition on line 1771 was never true

-

1772 return False 

-

1773 lhs, rhs = t_args 

-

1774 if get_origin(lhs) == list: 

-

1775 if get_origin(rhs) == list: 1775 ↛ 1778line 1775 didn't jump to line 1778, because the condition on line 1775 was never true

-

1776 # It could still match X, List[X] - but we do not allow this case for now as the caller 

-

1777 # does not support it. 

-

1778 return False 

-

1779 l_args = get_args(lhs) 

-

1780 return bool(l_args and l_args[0] == rhs) 

-

1781 if get_origin(rhs) == list: 

-

1782 r_args = get_args(rhs) 

-

1783 return bool(r_args and r_args[0] == lhs) 

-

1784 return False 

-

1785 

-

1786 

-

1787def _extract_typed_dict( 

-

1788 base_type, 

-

1789 default_target_attribute: Optional[str], 

-

1790) -> Tuple[Optional[Type[TypedDict]], Any]: 

-

1791 if is_typeddict(base_type): 

-

1792 return base_type, None 

-

1793 _, origin, args = unpack_type(base_type, False) 

-

1794 if origin != Union: 

-

1795 if isinstance(base_type, type) and issubclass(base_type, (dict, Mapping)): 1795 ↛ 1796line 1795 didn't jump to line 1796, because the condition on line 1795 was never true

-

1796 raise ValueError( 

-

1797 "The source_format cannot be nor contain a (non-TypedDict) dict" 

-

1798 ) 

-

1799 return None, base_type 

-

1800 typed_dicts = [x for x in args if is_typeddict(x)] 

-

1801 if len(typed_dicts) > 1: 1801 ↛ 1802line 1801 didn't jump to line 1802, because the condition on line 1801 was never true

-

1802 raise ValueError( 

-

1803 "When source_format is a Union, it must contain at most one TypedDict" 

-

1804 ) 

-

1805 typed_dict = typed_dicts[0] if typed_dicts else None 

-

1806 

-

1807 if any(x is None or x is _NONE_TYPE for x in args): 1807 ↛ 1808line 1807 didn't jump to line 1808, because the condition on line 1807 was never true

-

1808 raise ValueError( 

-

1809 "The source_format cannot be nor contain Optional[X] or Union[X, None]" 

-

1810 ) 

-

1811 

-

1812 if any( 1812 ↛ 1817line 1812 didn't jump to line 1817, because the condition on line 1812 was never true

-

1813 isinstance(x, type) and issubclass(x, (dict, Mapping)) 

-

1814 for x in args 

-

1815 if x is not typed_dict 

-

1816 ): 

-

1817 raise ValueError( 

-

1818 "The source_format cannot be nor contain a (non-TypedDict) dict" 

-

1819 ) 

-

1820 remaining = [x for x in args if x is not typed_dict] 

-

1821 has_target_attribute = False 

-

1822 anno = None 

-

1823 if len(remaining) == 1: 1823 ↛ 1824line 1823 didn't jump to line 1824, because the condition on line 1823 was never true

-

1824 base_type, anno, _ = _parse_type( 

-

1825 "source_format alternative form", 

-

1826 remaining[0], 

-

1827 forbid_optional=True, 

-

1828 parsing_typed_dict_attribute=False, 

-

1829 ) 

-

1830 has_target_attribute = bool(anno) and any( 

-

1831 isinstance(x, TargetAttribute) for x in anno 

-

1832 ) 

-

1833 target_type = base_type 

-

1834 else: 

-

1835 target_type = Union[tuple(remaining)] 

-

1836 

-

1837 if default_target_attribute is None and not has_target_attribute: 1837 ↛ 1838line 1837 didn't jump to line 1838, because the condition on line 1837 was never true

-

1838 raise ValueError( 

-

1839 'The alternative format must be Union[TypedDict,Annotated[X, DebputyParseHint.target_attribute("...")]]' 

-

1840 " OR the parsed_content format must have exactly one attribute that is required." 

-

1841 ) 

-

1842 if anno: 1842 ↛ 1843line 1842 didn't jump to line 1843, because the condition on line 1842 was never true

-

1843 final_anno = [target_type] 

-

1844 final_anno.extend(anno) 

-

1845 return typed_dict, Annotated[tuple(final_anno)] 

-

1846 return typed_dict, target_type 

-

1847 

-

1848 

-

1849def _dispatch_parse_generator( 

-

1850 dispatch_type: Type[DebputyDispatchableType], 

-

1851) -> Callable[[Any, AttributePath, Optional["ParserContextData"]], Any]: 

-

1852 def _dispatch_parse( 

-

1853 value: Any, 

-

1854 attribute_path: AttributePath, 

-

1855 parser_context: Optional["ParserContextData"], 

-

1856 ): 

-

1857 assert parser_context is not None 

-

1858 dispatching_parser = parser_context.dispatch_parser_table_for(dispatch_type) 

-

1859 return dispatching_parser.parse_input( 

-

1860 value, attribute_path, parser_context=parser_context 

-

1861 ) 

-

1862 

-

1863 return _dispatch_parse 

-

1864 

-

1865 

-

1866def _dispatch_parser( 

-

1867 dispatch_type: Type[DebputyDispatchableType], 

-

1868) -> AttributeTypeHandler: 

-

1869 return AttributeTypeHandler( 

-

1870 dispatch_type.__name__, 

-

1871 lambda *a: None, 

-

1872 mapper=_dispatch_parse_generator(dispatch_type), 

-

1873 ) 

-

1874 

-

1875 

-

1876def _parse_type( 

-

1877 attribute: str, 

-

1878 orig_td: Any, 

-

1879 forbid_optional: bool = True, 

-

1880 parsing_typed_dict_attribute: bool = True, 

-

1881) -> Tuple[Any, Tuple[Any, ...], bool]: 

-

1882 td, v, args = unpack_type(orig_td, parsing_typed_dict_attribute) 

-

1883 md: Tuple[Any, ...] = tuple() 

-

1884 optional = False 

-

1885 if v is not None: 

-

1886 if v == Annotated: 

-

1887 anno = get_args(td) 

-

1888 md = anno[1:] 

-

1889 td, v, args = unpack_type(anno[0], parsing_typed_dict_attribute) 

-

1890 

-

1891 if td is _NONE_TYPE: 1891 ↛ 1892line 1891 didn't jump to line 1892, because the condition on line 1891 was never true

-

1892 raise ValueError( 

-

1893 f'The attribute "{attribute}" resolved to type "None". "Nil" / "None" fields are not allowed in the' 

-

1894 " debputy manifest, so this attribute does not make sense in its current form." 

-

1895 ) 

-

1896 if forbid_optional and v == Union and any(a is _NONE_TYPE for a in args): 1896 ↛ 1897line 1896 didn't jump to line 1897, because the condition on line 1896 was never true

-

1897 raise ValueError( 

-

1898 f'Detected use of Optional in "{attribute}", which is not allowed here.' 

-

1899 " Please use NotRequired for optional fields" 

-

1900 ) 

-

1901 

-

1902 return td, md, optional 

-

1903 

-

1904 

-

1905def _normalize_attribute_name(attribute: str) -> str: 

-

1906 if attribute.endswith("_"): 

-

1907 attribute = attribute[:-1] 

-

1908 return attribute.replace("_", "-") 

-

1909 

-

1910 

-

1911@dataclasses.dataclass 

-

1912class DetectedDebputyParseHint: 

-

1913 target_attribute: str 

-

1914 source_manifest_attribute: Optional[str] 

-

1915 conflict_with_source_attributes: FrozenSet[str] 

-

1916 conditional_required: Optional[ConditionalRequired] 

-

1917 applicable_as_path_hint: bool 

-

1918 

-

1919 @classmethod 

-

1920 def parse_annotations( 

-

1921 cls, 

-

1922 anno: Tuple[Any, ...], 

-

1923 error_context: str, 

-

1924 default_attribute_name: Optional[str], 

-

1925 is_required: bool, 

-

1926 default_target_attribute: Optional[str] = None, 

-

1927 allow_target_attribute_annotation: bool = False, 

-

1928 allow_source_attribute_annotations: bool = False, 

-

1929 ) -> "DetectedDebputyParseHint": 

-

1930 target_attr_anno = find_annotation(anno, TargetAttribute) 

-

1931 if target_attr_anno: 

-

1932 if not allow_target_attribute_annotation: 1932 ↛ 1933line 1932 didn't jump to line 1933, because the condition on line 1932 was never true

-

1933 raise ValueError( 

-

1934 f"The DebputyParseHint.target_attribute annotation is not allowed in this context.{error_context}" 

-

1935 ) 

-

1936 target_attribute = target_attr_anno.attribute 

-

1937 elif default_target_attribute is not None: 

-

1938 target_attribute = default_target_attribute 

-

1939 elif default_attribute_name is not None: 1939 ↛ 1942line 1939 didn't jump to line 1942, because the condition on line 1939 was never false

-

1940 target_attribute = default_attribute_name 

-

1941 else: 

-

1942 if default_attribute_name is None: 

-

1943 raise ValueError( 

-

1944 "allow_target_attribute_annotation must be True OR " 

-

1945 "default_attribute_name/default_target_attribute must be not None" 

-

1946 ) 

-

1947 raise ValueError( 

-

1948 f"Missing DebputyParseHint.target_attribute annotation.{error_context}" 

-

1949 ) 

-

1950 source_attribute_anno = find_annotation(anno, ManifestAttribute) 

-

1951 _source_attribute_allowed( 

-

1952 allow_source_attribute_annotations, error_context, source_attribute_anno 

-

1953 ) 

-

1954 if source_attribute_anno: 

-

1955 source_attribute_name = source_attribute_anno.attribute 

-

1956 elif default_attribute_name is not None: 

-

1957 source_attribute_name = _normalize_attribute_name(default_attribute_name) 

-

1958 else: 

-

1959 source_attribute_name = None 

-

1960 mutual_exclusive_with_anno = find_annotation(anno, ConflictWithSourceAttribute) 

-

1961 if mutual_exclusive_with_anno: 

-

1962 _source_attribute_allowed( 

-

1963 allow_source_attribute_annotations, 

-

1964 error_context, 

-

1965 mutual_exclusive_with_anno, 

-

1966 ) 

-

1967 conflicting_attributes = mutual_exclusive_with_anno.conflicting_attributes 

-

1968 else: 

-

1969 conflicting_attributes = frozenset() 

-

1970 conditional_required = find_annotation(anno, ConditionalRequired) 

-

1971 

-

1972 if conditional_required and is_required: 1972 ↛ 1973line 1972 didn't jump to line 1973, because the condition on line 1972 was never true

-

1973 if default_attribute_name is None: 

-

1974 raise ValueError( 

-

1975 f"is_required cannot be True without default_attribute_name being not None" 

-

1976 ) 

-

1977 raise ValueError( 

-

1978 f'The attribute "{default_attribute_name}" is Required while also being conditionally required.' 

-

1979 ' Please make the attribute "NotRequired" or remove the conditional requirement.' 

-

1980 ) 

-

1981 

-

1982 not_path_hint_anno = find_annotation(anno, NotPathHint) 

-

1983 applicable_as_path_hint = not_path_hint_anno is None 

-

1984 

-

1985 return DetectedDebputyParseHint( 

-

1986 target_attribute=target_attribute, 

-

1987 source_manifest_attribute=source_attribute_name, 

-

1988 conflict_with_source_attributes=conflicting_attributes, 

-

1989 conditional_required=conditional_required, 

-

1990 applicable_as_path_hint=applicable_as_path_hint, 

-

1991 ) 

-

1992 

-

1993 

-

1994def _source_attribute_allowed( 

-

1995 source_attribute_allowed: bool, 

-

1996 error_context: str, 

-

1997 annotation: Optional[DebputyParseHint], 

-

1998) -> None: 

-

1999 if source_attribute_allowed or annotation is None: 1999 ↛ 2001line 1999 didn't jump to line 2001, because the condition on line 1999 was never false

-

2000 return 

-

2001 raise ValueError( 

-

2002 f'The annotation "{annotation}" cannot be used here. {error_context}' 

-

2003 ) 

-
- - - diff --git a/coverage-report/d_4f754ff76d8638bb_exceptions_py.html b/coverage-report/d_4f754ff76d8638bb_exceptions_py.html deleted file mode 100644 index 038f97c..0000000 --- a/coverage-report/d_4f754ff76d8638bb_exceptions_py.html +++ /dev/null @@ -1,108 +0,0 @@ - - - - - Coverage for src/debputy/manifest_parser/exceptions.py: 100% - - - - - -
-
-

- Coverage for src/debputy/manifest_parser/exceptions.py: - 100% -

- -

- 5 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from debputy.exceptions import DebputyRuntimeError 

-

2 

-

3 

-

4class ManifestParseException(DebputyRuntimeError): 

-

5 pass 

-

6 

-

7 

-

8class ManifestTypeException(ManifestParseException): 

-

9 pass 

-
- - - diff --git a/coverage-report/d_4f754ff76d8638bb_mapper_code_py.html b/coverage-report/d_4f754ff76d8638bb_mapper_code_py.html deleted file mode 100644 index 7b7059c..0000000 --- a/coverage-report/d_4f754ff76d8638bb_mapper_code_py.html +++ /dev/null @@ -1,176 +0,0 @@ - - - - - Coverage for src/debputy/manifest_parser/mapper_code.py: 86% - - - - - -
-
-

- Coverage for src/debputy/manifest_parser/mapper_code.py: - 86% -

- -

- 32 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from typing import ( 

-

2 TypeVar, 

-

3 Optional, 

-

4 Union, 

-

5 List, 

-

6 Callable, 

-

7) 

-

8 

-

9from debputy.manifest_parser.exceptions import ManifestTypeException 

-

10from debputy.manifest_parser.parser_data import ParserContextData 

-

11from debputy.manifest_parser.util import AttributePath 

-

12from debputy.packages import BinaryPackage 

-

13from debputy.util import assume_not_none 

-

14 

-

15S = TypeVar("S") 

-

16T = TypeVar("T") 

-

17 

-

18 

-

19def type_mapper_str2package( 

-

20 raw_package_name: str, 

-

21 ap: AttributePath, 

-

22 opc: Optional[ParserContextData], 

-

23) -> BinaryPackage: 

-

24 pc = assume_not_none(opc) 

-

25 if "{{" in raw_package_name: 

-

26 resolved_package_name = pc.substitution.substitute(raw_package_name, ap.path) 

-

27 else: 

-

28 resolved_package_name = raw_package_name 

-

29 

-

30 package_name_in_message = raw_package_name 

-

31 if resolved_package_name != raw_package_name: 

-

32 package_name_in_message = f'"{resolved_package_name}" ["{raw_package_name}"]' 

-

33 

-

34 if not pc.is_known_package(resolved_package_name): 34 ↛ 35line 34 didn't jump to line 35, because the condition on line 34 was never true

-

35 package_names = ", ".join(pc.binary_packages) 

-

36 raise ManifestTypeException( 

-

37 f'The value {package_name_in_message} (from "{ap.path}") does not reference a package declared in' 

-

38 f" debian/control. Valid options are: {package_names}" 

-

39 ) 

-

40 package_data = pc.binary_package_data(resolved_package_name) 

-

41 if package_data.is_auto_generated_package: 41 ↛ 42line 41 didn't jump to line 42, because the condition on line 41 was never true

-

42 package_names = ", ".join(pc.binary_packages) 

-

43 raise ManifestTypeException( 

-

44 f'The package name {package_name_in_message} (from "{ap.path}") references an auto-generated package.' 

-

45 " However, auto-generated packages are now permitted here. Valid options are:" 

-

46 f" {package_names}" 

-

47 ) 

-

48 return package_data.binary_package 

-

49 

-

50 

-

51def wrap_into_list( 

-

52 x: T, 

-

53 _ap: AttributePath, 

-

54 _pc: Optional["ParserContextData"], 

-

55) -> List[T]: 

-

56 return [x] 

-

57 

-

58 

-

59def normalize_into_list( 

-

60 x: Union[T, List[T]], 

-

61 _ap: AttributePath, 

-

62 _pc: Optional["ParserContextData"], 

-

63) -> List[T]: 

-

64 return x if isinstance(x, list) else [x] 

-

65 

-

66 

-

67def map_each_element( 

-

68 mapper: Callable[[S, AttributePath, Optional["ParserContextData"]], T], 

-

69) -> Callable[[List[S], AttributePath, Optional["ParserContextData"]], List[T]]: 

-

70 def _generated_mapper( 

-

71 xs: List[S], 

-

72 ap: AttributePath, 

-

73 pc: Optional["ParserContextData"], 

-

74 ) -> List[T]: 

-

75 return [mapper(s, ap[i], pc) for i, s in enumerate(xs)] 

-

76 

-

77 return _generated_mapper 

-
- - - diff --git a/coverage-report/d_4f754ff76d8638bb_parser_data_py.html b/coverage-report/d_4f754ff76d8638bb_parser_data_py.html deleted file mode 100644 index 3f5a3d1..0000000 --- a/coverage-report/d_4f754ff76d8638bb_parser_data_py.html +++ /dev/null @@ -1,232 +0,0 @@ - - - - - Coverage for src/debputy/manifest_parser/parser_data.py: 85% - - - - - -
-
-

- Coverage for src/debputy/manifest_parser/parser_data.py: - 85% -

- -

- 54 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import contextlib 

-

2from typing import ( 

-

3 Iterator, 

-

4 Optional, 

-

5 Mapping, 

-

6 NoReturn, 

-

7 Union, 

-

8 Any, 

-

9 TYPE_CHECKING, 

-

10 Tuple, 

-

11) 

-

12 

-

13from debian.debian_support import DpkgArchTable 

-

14 

-

15from debputy._deb_options_profiles import DebBuildOptionsAndProfiles 

-

16from debputy.architecture_support import DpkgArchitectureBuildProcessValuesTable 

-

17from debputy.manifest_conditions import ManifestCondition 

-

18from debputy.manifest_parser.exceptions import ManifestParseException 

-

19from debputy.manifest_parser.util import AttributePath 

-

20from debputy.packages import BinaryPackage 

-

21from debputy.plugin.api.impl_types import ( 

-

22 _ALL_PACKAGE_TYPES, 

-

23 resolve_package_type_selectors, 

-

24 TP, 

-

25 DispatchingTableParser, 

-

26 TTP, 

-

27 DispatchingObjectParser, 

-

28) 

-

29from debputy.plugin.api.spec import PackageTypeSelector 

-

30from debputy.substitution import Substitution 

-

31 

-

32 

-

33if TYPE_CHECKING: 

-

34 from debputy.highlevel_manifest import PackageTransformationDefinition 

-

35 

-

36 

-

37class ParserContextData: 

-

38 @property 

-

39 def binary_packages(self) -> Mapping[str, BinaryPackage]: 

-

40 raise NotImplementedError 

-

41 

-

42 @property 

-

43 def _package_states(self) -> Mapping[str, "PackageTransformationDefinition"]: 

-

44 raise NotImplementedError 

-

45 

-

46 @property 

-

47 def is_single_binary_package(self) -> bool: 

-

48 return len(self.binary_packages) == 1 

-

49 

-

50 def single_binary_package( 

-

51 self, 

-

52 attribute_path: AttributePath, 

-

53 *, 

-

54 package_type: PackageTypeSelector = _ALL_PACKAGE_TYPES, 

-

55 package_attribute: Optional[str] = None, 

-

56 ) -> Optional[BinaryPackage]: 

-

57 resolved_package_types = resolve_package_type_selectors(package_type) 

-

58 possible_matches = [ 

-

59 p 

-

60 for p in self.binary_packages.values() 

-

61 if p.package_type in resolved_package_types 

-

62 ] 

-

63 if len(possible_matches) == 1: 63 ↛ 66line 63 didn't jump to line 66, because the condition on line 63 was never false

-

64 return possible_matches[0] 

-

65 

-

66 if package_attribute is not None: 

-

67 raise ManifestParseException( 

-

68 f"The {attribute_path.path} rule needs the attribute `{package_attribute}`" 

-

69 " for this source package." 

-

70 ) 

-

71 

-

72 if not possible_matches: 

-

73 _package_types = ", ".join(sorted(resolved_package_types)) 

-

74 raise ManifestParseException( 

-

75 f"The {attribute_path.path} rule is not applicable to this source package" 

-

76 f" (it only applies to source packages that builds exactly one of" 

-

77 f" the following package types: {_package_types})." 

-

78 ) 

-

79 raise ManifestParseException( 

-

80 f"The {attribute_path.path} rule is not applicable to multi-binary packages." 

-

81 ) 

-

82 

-

83 def _error(self, msg: str) -> "NoReturn": 

-

84 raise ManifestParseException(msg) 

-

85 

-

86 def is_known_package(self, package_name: str) -> bool: 

-

87 return package_name in self._package_states 

-

88 

-

89 def binary_package_data( 

-

90 self, 

-

91 package_name: str, 

-

92 ) -> "PackageTransformationDefinition": 

-

93 if package_name not in self._package_states: 93 ↛ 94line 93 didn't jump to line 94, because the condition on line 93 was never true

-

94 self._error( 

-

95 f'The package "{package_name}" is not present in the debian/control file (could not find' 

-

96 f' "Package: {package_name}" in a binary stanza) nor is it a -dbgsym package for one' 

-

97 " for a package in debian/control." 

-

98 ) 

-

99 return self._package_states[package_name] 

-

100 

-

101 @property 

-

102 def dpkg_architecture_variables(self) -> DpkgArchitectureBuildProcessValuesTable: 

-

103 raise NotImplementedError 

-

104 

-

105 @property 

-

106 def dpkg_arch_query_table(self) -> DpkgArchTable: 

-

107 raise NotImplementedError 

-

108 

-

109 @property 

-

110 def build_env(self) -> DebBuildOptionsAndProfiles: 

-

111 raise NotImplementedError 

-

112 

-

113 @contextlib.contextmanager 

-

114 def binary_package_context( 

-

115 self, 

-

116 package_name: str, 

-

117 ) -> Iterator["PackageTransformationDefinition"]: 

-

118 raise NotImplementedError 

-

119 

-

120 @property 

-

121 def substitution(self) -> Substitution: 

-

122 raise NotImplementedError 

-

123 

-

124 @property 

-

125 def current_binary_package_state(self) -> "PackageTransformationDefinition": 

-

126 raise NotImplementedError 

-

127 

-

128 @property 

-

129 def is_in_binary_package_state(self) -> bool: 

-

130 raise NotImplementedError 

-

131 

-

132 def dispatch_parser_table_for(self, rule_type: TTP) -> DispatchingTableParser[TP]: 

-

133 raise NotImplementedError 

-
- - - diff --git a/coverage-report/d_4f754ff76d8638bb_parser_doc_py.html b/coverage-report/d_4f754ff76d8638bb_parser_doc_py.html deleted file mode 100644 index 3ce54b7..0000000 --- a/coverage-report/d_4f754ff76d8638bb_parser_doc_py.html +++ /dev/null @@ -1,372 +0,0 @@ - - - - - Coverage for src/debputy/manifest_parser/parser_doc.py: 79% - - - - - -
-
-

- Coverage for src/debputy/manifest_parser/parser_doc.py: - 79% -

- -

- 132 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import itertools 

-

2from typing import Optional, Iterable, Any, Tuple, Mapping, Sequence, FrozenSet 

-

3 

-

4from debputy import DEBPUTY_DOC_ROOT_DIR 

-

5from debputy.manifest_parser.declarative_parser import ( 

-

6 DeclarativeMappingInputParser, 

-

7 DeclarativeNonMappingInputParser, 

-

8 AttributeDescription, 

-

9) 

-

10from debputy.plugin.api.impl_types import ( 

-

11 DebputyPluginMetadata, 

-

12 DeclarativeInputParser, 

-

13 DispatchingObjectParser, 

-

14 ListWrappedDeclarativeInputParser, 

-

15 InPackageContextParser, 

-

16) 

-

17from debputy.plugin.api.spec import ( 

-

18 ParserDocumentation, 

-

19 reference_documentation, 

-

20 undocumented_attr, 

-

21) 

-

22from debputy.util import assume_not_none 

-

23 

-

24 

-

25def _provide_placeholder_parser_doc( 

-

26 parser_doc: Optional[ParserDocumentation], 

-

27 attributes: Iterable[str], 

-

28) -> ParserDocumentation: 

-

29 if parser_doc is None: 29 ↛ 30line 29 didn't jump to line 30, because the condition on line 29 was never true

-

30 parser_doc = reference_documentation() 

-

31 changes = {} 

-

32 if parser_doc.attribute_doc is None: 

-

33 changes["attribute_doc"] = [undocumented_attr(attr) for attr in attributes] 

-

34 

-

35 if changes: 

-

36 return parser_doc.replace(**changes) 

-

37 return parser_doc 

-

38 

-

39 

-

40def doc_args_for_parser_doc( 

-

41 rule_name: str, 

-

42 declarative_parser: DeclarativeInputParser[Any], 

-

43 plugin_metadata: DebputyPluginMetadata, 

-

44) -> Tuple[Mapping[str, str], ParserDocumentation]: 

-

45 attributes: Iterable[str] 

-

46 if isinstance(declarative_parser, DeclarativeMappingInputParser): 

-

47 attributes = declarative_parser.source_attributes.keys() 

-

48 else: 

-

49 attributes = [] 

-

50 doc_args = { 

-

51 "RULE_NAME": rule_name, 

-

52 "MANIFEST_FORMAT_DOC": f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md", 

-

53 "PLUGIN_NAME": plugin_metadata.plugin_name, 

-

54 } 

-

55 parser_doc = _provide_placeholder_parser_doc( 

-

56 declarative_parser.inline_reference_documentation, 

-

57 attributes, 

-

58 ) 

-

59 return doc_args, parser_doc 

-

60 

-

61 

-

62def render_attribute_doc( 

-

63 parser: Any, 

-

64 attributes: Mapping[str, "AttributeDescription"], 

-

65 required_attributes: FrozenSet[str], 

-

66 conditionally_required_attributes: FrozenSet[FrozenSet[str]], 

-

67 parser_doc: ParserDocumentation, 

-

68 doc_args: Mapping[str, str], 

-

69 *, 

-

70 rule_name: str = "<unset>", 

-

71 is_root_rule: bool = False, 

-

72 is_interactive: bool = False, 

-

73) -> Iterable[Tuple[FrozenSet[str], Sequence[str]]]: 

-

74 provided_attribute_docs = ( 

-

75 parser_doc.attribute_doc if parser_doc.attribute_doc is not None else [] 

-

76 ) 

-

77 

-

78 for attr_doc in assume_not_none(provided_attribute_docs): 

-

79 attr_description = attr_doc.description 

-

80 rendered_doc = [] 

-

81 

-

82 for parameter in sorted(attr_doc.attributes): 

-

83 parameter_details = attributes.get(parameter) 

-

84 if parameter_details is not None: 84 ↛ 88line 84 didn't jump to line 88, because the condition on line 84 was never false

-

85 source_name = parameter_details.source_attribute_name 

-

86 describe_type = parameter_details.type_validator.describe_type() 

-

87 else: 

-

88 assert isinstance(parser, DispatchingObjectParser) 

-

89 source_name = parameter 

-

90 subparser = parser.parser_for(source_name).parser 

-

91 if isinstance(subparser, InPackageContextParser): 

-

92 if is_interactive: 

-

93 describe_type = "PackageContext" 

-

94 else: 

-

95 rule_prefix = rule_name if not is_root_rule else "" 

-

96 describe_type = f"PackageContext (chains to `{rule_prefix}::{subparser.manifest_attribute_path_template}`)" 

-

97 

-

98 elif isinstance(subparser, DispatchingObjectParser): 

-

99 if is_interactive: 

-

100 describe_type = "Object" 

-

101 else: 

-

102 rule_prefix = rule_name if not is_root_rule else "" 

-

103 describe_type = f"Object (see `{rule_prefix}::{subparser.manifest_attribute_path_template}`)" 

-

104 elif isinstance(subparser, DeclarativeMappingInputParser): 

-

105 describe_type = "<Type definition not implemented yet>" # TODO: Derive from subparser 

-

106 elif isinstance(subparser, DeclarativeNonMappingInputParser): 

-

107 describe_type = ( 

-

108 subparser.alt_form_parser.type_validator.describe_type() 

-

109 ) 

-

110 else: 

-

111 describe_type = f"<Unknown: Non-introspectable subparser - {subparser.__class__.__name__}>" 

-

112 

-

113 if source_name in required_attributes: 

-

114 req_str = "required" 

-

115 elif any(source_name in s for s in conditionally_required_attributes): 

-

116 req_str = "conditional" 

-

117 else: 

-

118 req_str = "optional" 

-

119 rendered_doc.append(f"`{source_name}` ({req_str}): {describe_type}") 

-

120 

-

121 if attr_description: 121 ↛ 130line 121 didn't jump to line 130, because the condition on line 121 was never false

-

122 rendered_doc.append("") 

-

123 rendered_doc.extend( 

-

124 line 

-

125 for line in attr_description.format(**doc_args).splitlines( 

-

126 keepends=False 

-

127 ) 

-

128 ) 

-

129 rendered_doc.append("") 

-

130 yield attr_doc.attributes, rendered_doc 

-

131 

-

132 

-

133def render_rule( 

-

134 rule_name: str, 

-

135 declarative_parser: DeclarativeInputParser[Any], 

-

136 plugin_metadata: DebputyPluginMetadata, 

-

137 *, 

-

138 is_root_rule: bool = False, 

-

139) -> str: 

-

140 doc_args, parser_doc = doc_args_for_parser_doc( 

-

141 "the manifest root" if is_root_rule else rule_name, 

-

142 declarative_parser, 

-

143 plugin_metadata, 

-

144 ) 

-

145 t = assume_not_none(parser_doc.title).format(**doc_args) 

-

146 r = [ 

-

147 t, 

-

148 "=" * len(t), 

-

149 "", 

-

150 assume_not_none(parser_doc.description).format(**doc_args).rstrip(), 

-

151 "", 

-

152 ] 

-

153 

-

154 alt_form_parser = getattr(declarative_parser, "alt_form_parser", None) 

-

155 is_list_wrapped = False 

-

156 unwrapped_parser = declarative_parser 

-

157 if isinstance(declarative_parser, ListWrappedDeclarativeInputParser): 

-

158 is_list_wrapped = True 

-

159 unwrapped_parser = declarative_parser.delegate 

-

160 

-

161 if isinstance( 

-

162 unwrapped_parser, (DeclarativeMappingInputParser, DispatchingObjectParser) 

-

163 ): 

-

164 

-

165 if isinstance(unwrapped_parser, DeclarativeMappingInputParser): 165 ↛ 171line 165 didn't jump to line 171, because the condition on line 165 was never false

-

166 attributes = unwrapped_parser.source_attributes 

-

167 required = unwrapped_parser.input_time_required_parameters 

-

168 conditionally_required = unwrapped_parser.at_least_one_of 

-

169 mutually_exclusive = unwrapped_parser.mutually_exclusive_attributes 

-

170 else: 

-

171 attributes = {} 

-

172 required = frozenset() 

-

173 conditionally_required = frozenset() 

-

174 mutually_exclusive = frozenset() 

-

175 if is_list_wrapped: 

-

176 r.append("List where each element has the following attributes:") 

-

177 else: 

-

178 r.append("Attributes:") 

-

179 

-

180 rendered_attr_doc = render_attribute_doc( 

-

181 unwrapped_parser, 

-

182 attributes, 

-

183 required, 

-

184 conditionally_required, 

-

185 parser_doc, 

-

186 doc_args, 

-

187 is_root_rule=is_root_rule, 

-

188 rule_name=rule_name, 

-

189 is_interactive=False, 

-

190 ) 

-

191 for _, rendered_doc in rendered_attr_doc: 

-

192 prefix = " - " 

-

193 for line in rendered_doc: 

-

194 if line: 

-

195 r.append(f"{prefix}{line}") 

-

196 else: 

-

197 r.append("") 

-

198 prefix = " " 

-

199 

-

200 if ( 

-

201 bool(conditionally_required) 

-

202 or bool(mutually_exclusive) 

-

203 or any(pd.conflicting_attributes for pd in attributes.values()) 

-

204 ): 

-

205 r.append("") 

-

206 if is_list_wrapped: 

-

207 r.append( 

-

208 "This rule enforces the following restrictions on each element in the list:" 

-

209 ) 

-

210 else: 

-

211 r.append("This rule enforces the following restrictions:") 

-

212 

-

213 if conditionally_required or mutually_exclusive: 213 ↛ 231line 213 didn't jump to line 231, because the condition on line 213 was never false

-

214 all_groups = set( 

-

215 itertools.chain(conditionally_required, mutually_exclusive) 

-

216 ) 

-

217 for g in all_groups: 

-

218 anames = "`, `".join(g) 

-

219 is_mx = g in mutually_exclusive 

-

220 is_cr = g in conditionally_required 

-

221 if is_mx and is_cr: 

-

222 r.append(f" - The rule must use exactly one of: `{anames}`") 

-

223 elif is_cr: 223 ↛ 224line 223 didn't jump to line 224, because the condition on line 223 was never true

-

224 r.append(f" - The rule must use at least one of: `{anames}`") 

-

225 else: 

-

226 assert is_mx 

-

227 r.append( 

-

228 f" - The following attributes are mutually exclusive: `{anames}`" 

-

229 ) 

-

230 

-

231 if mutually_exclusive or any( 231 ↛ exit,   231 ↛ 2482 missed branches: 1) line 231 didn't run the generator expression on line 231, 2) line 231 didn't jump to line 248, because the condition on line 231 was never false

-

232 pd.conflicting_attributes for pd in attributes.values() 

-

233 ): 

-

234 for parameter, parameter_details in sorted(attributes.items()): 

-

235 source_name = parameter_details.source_attribute_name 

-

236 conflicts = set(parameter_details.conflicting_attributes) 

-

237 for mx in mutually_exclusive: 

-

238 if parameter in mx and mx not in conditionally_required: 238 ↛ 239line 238 didn't jump to line 239, because the condition on line 238 was never true

-

239 conflicts |= mx 

-

240 if conflicts: 

-

241 conflicts.discard(parameter) 

-

242 cnames = "`, `".join( 

-

243 attributes[a].source_attribute_name for a in conflicts 

-

244 ) 

-

245 r.append( 

-

246 f" - The attribute `{source_name}` cannot be used with any of: `{cnames}`" 

-

247 ) 

-

248 r.append("") 

-

249 if alt_form_parser is not None: 

-

250 # FIXME: Mapping[str, Any] ends here, which is ironic given the headline. 

-

251 r.append( 

-

252 f"Non-mapping format: {alt_form_parser.type_validator.describe_type()}" 

-

253 ) 

-

254 alt_parser_desc = parser_doc.alt_parser_description 

-

255 if alt_parser_desc: 

-

256 r.extend( 

-

257 f" {line}" 

-

258 for line in alt_parser_desc.format(**doc_args).splitlines( 

-

259 keepends=False 

-

260 ) 

-

261 ) 

-

262 r.append("") 

-

263 

-

264 if declarative_parser.reference_documentation_url is not None: 

-

265 r.append( 

-

266 f"Reference documentation: {declarative_parser.reference_documentation_url}" 

-

267 ) 

-

268 else: 

-

269 r.append( 

-

270 "Reference documentation: No reference documentation link provided by the plugin" 

-

271 ) 

-

272 

-

273 return "\n".join(r) 

-
- - - diff --git a/coverage-report/d_4f754ff76d8638bb_util_py.html b/coverage-report/d_4f754ff76d8638bb_util_py.html deleted file mode 100644 index 4bb7608..0000000 --- a/coverage-report/d_4f754ff76d8638bb_util_py.html +++ /dev/null @@ -1,419 +0,0 @@ - - - - - Coverage for src/debputy/manifest_parser/util.py: 89% - - - - - -
-
-

- Coverage for src/debputy/manifest_parser/util.py: - 89% -

- -

- 192 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2from typing import ( 

-

3 Iterator, 

-

4 Union, 

-

5 Self, 

-

6 Optional, 

-

7 List, 

-

8 Tuple, 

-

9 Mapping, 

-

10 get_origin, 

-

11 get_args, 

-

12 Any, 

-

13 Type, 

-

14 TypeVar, 

-

15 TYPE_CHECKING, 

-

16 Iterable, 

-

17) 

-

18 

-

19if TYPE_CHECKING: 

-

20 from debputy.manifest_parser.declarative_parser import DebputyParseHint 

-

21 

-

22 

-

23MP = TypeVar("MP", bound="DebputyParseHint") 

-

24StrOrInt = Union[str, int] 

-

25AttributePathAliasMapping = Mapping[ 

-

26 StrOrInt, Tuple[StrOrInt, Optional["AttributePathAliasMapping"]] 

-

27] 

-

28 

-

29 

-

30class AttributePath(object): 

-

31 __slots__ = ("parent", "name", "alias_mapping", "path_hint") 

-

32 

-

33 def __init__( 

-

34 self, 

-

35 parent: Optional["AttributePath"], 

-

36 key: Optional[Union[str, int]], 

-

37 *, 

-

38 alias_mapping: Optional[AttributePathAliasMapping] = None, 

-

39 ) -> None: 

-

40 self.parent = parent 

-

41 self.name = key 

-

42 self.path_hint: Optional[str] = None 

-

43 self.alias_mapping = alias_mapping 

-

44 

-

45 @classmethod 

-

46 def root_path(cls) -> "AttributePath": 

-

47 return AttributePath(None, None) 

-

48 

-

49 @classmethod 

-

50 def builtin_path(cls) -> "AttributePath": 

-

51 return AttributePath(None, "$builtin$") 

-

52 

-

53 @classmethod 

-

54 def test_path(cls) -> "AttributePath": 

-

55 return AttributePath(None, "$test$") 

-

56 

-

57 def __bool__(self) -> bool: 

-

58 return self.name is not None or self.parent is not None 

-

59 

-

60 def copy_with_path_hint(self, path_hint: str) -> "AttributePath": 

-

61 p = self.__class__(self.parent, self.name, alias_mapping=self.alias_mapping) 

-

62 p.path_hint = path_hint 

-

63 return p 

-

64 

-

65 def path_segments(self) -> Iterable[Union[str, int]]: 

-

66 segments = list(self._iter_path()) 

-

67 segments.reverse() 

-

68 yield from (s.name for s in segments) 

-

69 

-

70 @property 

-

71 def path(self) -> str: 

-

72 segments = list(self._iter_path()) 

-

73 segments.reverse() 

-

74 parts: List[str] = [] 

-

75 path_hint = None 

-

76 

-

77 for s in segments: 

-

78 k = s.name 

-

79 s_path_hint = s.path_hint 

-

80 if s_path_hint is not None: 

-

81 path_hint = s_path_hint 

-

82 if isinstance(k, int): 

-

83 parts.append(f"[{k}]") 

-

84 elif k is not None: 84 ↛ 77line 84 didn't jump to line 77, because the condition on line 84 was never false

-

85 if parts: 

-

86 parts.append(".") 

-

87 parts.append(k) 

-

88 if path_hint: 

-

89 parts.append(f" <Search for: {path_hint}>") 

-

90 if not parts: 90 ↛ 91line 90 didn't jump to line 91, because the condition on line 90 was never true

-

91 return "document root" 

-

92 return "".join(parts) 

-

93 

-

94 def __str__(self) -> str: 

-

95 return self.path 

-

96 

-

97 def __getitem__(self, item: Union[str, int]) -> "AttributePath": 

-

98 alias_mapping = None 

-

99 if self.alias_mapping: 

-

100 match = self.alias_mapping.get(item) 

-

101 if match: 

-

102 item, alias_mapping = match 

-

103 if item == "": 

-

104 # Support `sources[0]` mapping to `source` by `sources -> source` and `0 -> ""`. 

-

105 return AttributePath( 

-

106 self.parent, self.name, alias_mapping=alias_mapping 

-

107 ) 

-

108 return AttributePath(self, item, alias_mapping=alias_mapping) 

-

109 

-

110 def _iter_path(self) -> Iterator["AttributePath"]: 

-

111 current = self 

-

112 yield current 

-

113 while True: 

-

114 parent = current.parent 

-

115 if not parent: 

-

116 break 

-

117 current = parent 

-

118 yield current 

-

119 

-

120 

-

121@dataclasses.dataclass(slots=True, frozen=True) 

-

122class _SymbolicModeSegment: 

-

123 base_mode: int 

-

124 base_mask: int 

-

125 cap_x_mode: int 

-

126 cap_x_mask: int 

-

127 

-

128 def apply(self, current_mode: int, is_dir: bool) -> int: 

-

129 if current_mode & 0o111 or is_dir: 

-

130 chosen_mode = self.cap_x_mode 

-

131 mode_mask = self.cap_x_mask 

-

132 else: 

-

133 chosen_mode = self.base_mode 

-

134 mode_mask = self.base_mask 

-

135 # set ("="): mode mask clears relevant segment and current_mode are the desired bits 

-

136 # add ("+"): mode mask keeps everything and current_mode are the desired bits 

-

137 # remove ("-"): mode mask clears relevant bits and current_mode are 0 

-

138 return (current_mode & mode_mask) | chosen_mode 

-

139 

-

140 

-

141def _symbolic_mode_bit_inverse(v: int) -> int: 

-

142 # The & part is necessary because otherwise python narrows the inversion to the minimum number of bits 

-

143 # required, which is not what we want. 

-

144 return ~v & 0o7777 

-

145 

-

146 

-

147def parse_symbolic_mode( 

-

148 symbolic_mode: str, 

-

149 attribute_path: Optional[AttributePath], 

-

150) -> Iterator[_SymbolicModeSegment]: 

-

151 sticky_bit = 0o01000 

-

152 setuid_bit = 0o04000 

-

153 setgid_bit = 0o02000 

-

154 mode_group_flag = 0o7 

-

155 subject_mask_and_shift = { 

-

156 "u": (mode_group_flag << 6, 6), 

-

157 "g": (mode_group_flag << 3, 3), 

-

158 "o": (mode_group_flag << 0, 0), 

-

159 } 

-

160 bits = { 

-

161 "r": (0o4, 0o4), 

-

162 "w": (0o2, 0o2), 

-

163 "x": (0o1, 0o1), 

-

164 "X": (0o0, 0o1), 

-

165 "s": (0o0, 0o0), # Special-cased below (it depends on the subject) 

-

166 "t": (0o0, 0o0), # Special-cased below 

-

167 } 

-

168 modifiers = { 

-

169 "+", 

-

170 "-", 

-

171 "=", 

-

172 } 

-

173 in_path = f" in {attribute_path.path}" if attribute_path is not None else "" 

-

174 for orig_part in symbolic_mode.split(","): 

-

175 base_mode = 0 

-

176 cap_x_mode = 0 

-

177 part = orig_part 

-

178 subjects = set() 

-

179 while part and part[0] in ("u", "g", "o", "a"): 

-

180 subject = part[0] 

-

181 if subject == "a": 

-

182 subjects = {"u", "g", "o"} 

-

183 else: 

-

184 subjects.add(subject) 

-

185 part = part[1:] 

-

186 if not subjects: 

-

187 subjects = {"u", "g", "o"} 

-

188 

-

189 if part and part[0] in modifiers: 189 ↛ 191line 189 didn't jump to line 191, because the condition on line 189 was never false

-

190 modifier = part[0] 

-

191 elif not part: 

-

192 raise ValueError( 

-

193 f'Invalid symbolic mode{in_path}: expected [+-=] to be present (from "{orig_part}")' 

-

194 ) 

-

195 else: 

-

196 raise ValueError( 

-

197 f'Invalid symbolic mode{in_path}: Expected "{part[0]}" to be one of [+-=]' 

-

198 f' (from "{orig_part}")' 

-

199 ) 

-

200 part = part[1:] 

-

201 s_bit_seen = False 

-

202 t_bit_seen = False 

-

203 while part and part[0] in bits: 

-

204 if part == "s": 

-

205 s_bit_seen = True 

-

206 elif part == "t": 206 ↛ 207line 206 didn't jump to line 207, because the condition on line 206 was never true

-

207 t_bit_seen = True 

-

208 elif part in ("u", "g", "o"): 208 ↛ 209line 208 didn't jump to line 209, because the condition on line 208 was never true

-

209 raise NotImplementedError( 

-

210 f"Cannot parse symbolic mode{in_path}: Sorry, we do not support referencing an" 

-

211 " existing subject's permissions (a=u) in symbolic modes." 

-

212 ) 

-

213 else: 

-

214 matched_bits = bits.get(part[0]) 

-

215 if matched_bits is None: 215 ↛ 216line 215 didn't jump to line 216, because the condition on line 215 was never true

-

216 valid_bits = "".join(bits) 

-

217 raise ValueError( 

-

218 f'Invalid symbolic mode{in_path}: Expected "{part[0]}" to be one of the letters' 

-

219 f' in "{valid_bits}" (from "{orig_part}")' 

-

220 ) 

-

221 base_mode_bits, cap_x_mode_bits = bits[part[0]] 

-

222 base_mode |= base_mode_bits 

-

223 cap_x_mode |= cap_x_mode_bits 

-

224 part = part[1:] 

-

225 

-

226 if part: 226 ↛ 227line 226 didn't jump to line 227, because the condition on line 226 was never true

-

227 raise ValueError( 

-

228 f'Invalid symbolic mode{in_path}: Could not parse "{part[0]}" from "{orig_part}"' 

-

229 ) 

-

230 

-

231 final_base_mode = 0 

-

232 final_cap_x_mode = 0 

-

233 segment_mask = 0 

-

234 for subject in subjects: 

-

235 mask, shift = subject_mask_and_shift[subject] 

-

236 segment_mask |= mask 

-

237 final_base_mode |= base_mode << shift 

-

238 final_cap_x_mode |= cap_x_mode << shift 

-

239 if modifier == "=": 

-

240 segment_mask |= setuid_bit if "u" in subjects else 0 

-

241 segment_mask |= setgid_bit if "g" in subjects else 0 

-

242 segment_mask |= sticky_bit if "o" in subjects else 0 

-

243 if s_bit_seen: 

-

244 if "u" in subjects: 244 ↛ 247line 244 didn't jump to line 247, because the condition on line 244 was never false

-

245 final_base_mode |= setuid_bit 

-

246 final_cap_x_mode |= setuid_bit 

-

247 if "g" in subjects: 

-

248 final_base_mode |= setgid_bit 

-

249 final_cap_x_mode |= setgid_bit 

-

250 if t_bit_seen: 250 ↛ 251line 250 didn't jump to line 251, because the condition on line 250 was never true

-

251 final_base_mode |= sticky_bit 

-

252 final_cap_x_mode |= sticky_bit 

-

253 if modifier == "+": 

-

254 final_base_mask = ~0 

-

255 final_cap_x_mask = ~0 

-

256 elif modifier == "-": 

-

257 final_base_mask = _symbolic_mode_bit_inverse(final_base_mode) 

-

258 final_cap_x_mask = _symbolic_mode_bit_inverse(final_cap_x_mode) 

-

259 final_base_mode = 0 

-

260 final_cap_x_mode = 0 

-

261 elif modifier == "=": 

-

262 # FIXME: Handle "unmentioned directory's setgid/setuid bits" 

-

263 inverted_mask = _symbolic_mode_bit_inverse(segment_mask) 

-

264 final_base_mask = inverted_mask 

-

265 final_cap_x_mask = inverted_mask 

-

266 else: 

-

267 raise AssertionError( 

-

268 f"Unknown modifier in symbolic mode: {modifier} - should not have happened" 

-

269 ) 

-

270 yield _SymbolicModeSegment( 

-

271 base_mode=final_base_mode, 

-

272 base_mask=final_base_mask, 

-

273 cap_x_mode=final_cap_x_mode, 

-

274 cap_x_mask=final_cap_x_mask, 

-

275 ) 

-

276 

-

277 

-

278def unpack_type( 

-

279 orig_type: Any, 

-

280 parsing_typed_dict_attribute: bool, 

-

281) -> Tuple[Any, Optional[Any], Tuple[Any, ...]]: 

-

282 raw_type = orig_type 

-

283 origin = get_origin(raw_type) 

-

284 args = get_args(raw_type) 

-

285 if not parsing_typed_dict_attribute and repr(origin) in ( 285 ↛ 289line 285 didn't jump to line 289, because the condition on line 285 was never true

-

286 "typing.NotRequired", 

-

287 "typing.Required", 

-

288 ): 

-

289 raise ValueError( 

-

290 f"The Required/NotRequired attributes cannot be used outside typed dicts," 

-

291 f" the type that triggered the error: {orig_type}" 

-

292 ) 

-

293 

-

294 while repr(origin) in ("typing.NotRequired", "typing.Required"): 

-

295 if len(args) != 1: 295 ↛ 296line 295 didn't jump to line 296, because the condition on line 295 was never true

-

296 raise ValueError( 

-

297 f"The type {raw_type} should have exactly one type parameter" 

-

298 ) 

-

299 raw_type = args[0] 

-

300 origin = get_origin(raw_type) 

-

301 args = get_args(raw_type) 

-

302 

-

303 assert not isinstance(raw_type, tuple) 

-

304 

-

305 return raw_type, origin, args 

-

306 

-

307 

-

308def find_annotation( 

-

309 annotations: Tuple[Any, ...], 

-

310 anno_class: Type[MP], 

-

311) -> Optional[MP]: 

-

312 m = None 

-

313 for anno in annotations: 

-

314 if isinstance(anno, anno_class): 

-

315 if m is not None: 315 ↛ 316line 315 didn't jump to line 316, because the condition on line 315 was never true

-

316 raise ValueError( 

-

317 f"The annotation {anno_class.__name__} was used more than once" 

-

318 ) 

-

319 m = anno 

-

320 return m 

-
- - - diff --git a/coverage-report/d_4faea183f900b252___init___py.html b/coverage-report/d_4faea183f900b252___init___py.html deleted file mode 100644 index 7266b4f..0000000 --- a/coverage-report/d_4faea183f900b252___init___py.html +++ /dev/null @@ -1,99 +0,0 @@ - - - - - Coverage for src/debputy/plugin/__init__.py: 100% - - - - - -
-
-

- Coverage for src/debputy/plugin/__init__.py: - 100% -

- -

- 0 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-
- - - diff --git a/coverage-report/d_50e3cc0df0cc5f51___init___py.html b/coverage-report/d_50e3cc0df0cc5f51___init___py.html deleted file mode 100644 index 8eee6c1..0000000 --- a/coverage-report/d_50e3cc0df0cc5f51___init___py.html +++ /dev/null @@ -1,99 +0,0 @@ - - - - - Coverage for src/debputy/lsp/vendoring/__init__.py: 100% - - - - - -
-
-

- Coverage for src/debputy/lsp/vendoring/__init__.py: - 100% -

- -

- 0 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-
- - - diff --git a/coverage-report/d_5d0ec0d5422112df___init___py.html b/coverage-report/d_5d0ec0d5422112df___init___py.html deleted file mode 100644 index 15a9aab..0000000 --- a/coverage-report/d_5d0ec0d5422112df___init___py.html +++ /dev/null @@ -1,99 +0,0 @@ - - - - - Coverage for src/debputy/lsp/__init__.py: 100% - - - - - -
-
-

- Coverage for src/debputy/lsp/__init__.py: - 100% -

- -

- 0 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-
- - - diff --git a/coverage-report/d_5d0ec0d5422112df_debputy_ls_py.html b/coverage-report/d_5d0ec0d5422112df_debputy_ls_py.html deleted file mode 100644 index 15b1233..0000000 --- a/coverage-report/d_5d0ec0d5422112df_debputy_ls_py.html +++ /dev/null @@ -1,174 +0,0 @@ - - - - - Coverage for src/debputy/lsp/debputy_ls.py: 74% - - - - - -
-
-

- Coverage for src/debputy/lsp/debputy_ls.py: - 74% -

- -

- 48 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from typing import Optional, List, Any 

-

2 

-

3from debputy.linting.lint_util import LintState 

-

4from debputy.lsp.text_util import LintCapablePositionCodec 

-

5from debputy.plugin.api.feature_set import PluginProvidedFeatureSet 

-

6 

-

7try: 

-

8 from pygls.server import LanguageServer 

-

9 from pygls.workspace import TextDocument 

-

10except ImportError as e: 

-

11 

-

12 class LanguageServer: 

-

13 def __init__(self, *args, **kwargs) -> None: 

-

14 """Placeholder to work if pygls is not installed""" 

-

15 # Should not be called 

-

16 raise e 

-

17 

-

18 

-

19class LSProvidedLintState(LintState): 

-

20 def __init__(self, ls: "DebputyLanguageServer", doc: "TextDocument") -> None: 

-

21 self._ls = ls 

-

22 self._doc = doc 

-

23 # Cache lines (doc.lines re-splits everytime) 

-

24 self._lines = doc.lines 

-

25 

-

26 @property 

-

27 def plugin_feature_set(self) -> PluginProvidedFeatureSet: 

-

28 return self._ls.plugin_feature_set 

-

29 

-

30 @property 

-

31 def doc_uri(self) -> str: 

-

32 return self._doc.uri 

-

33 

-

34 @property 

-

35 def path(self) -> str: 

-

36 return self._doc.path 

-

37 

-

38 @property 

-

39 def lines(self) -> List[str]: 

-

40 return self._lines 

-

41 

-

42 @property 

-

43 def position_codec(self) -> LintCapablePositionCodec: 

-

44 return self._doc.position_codec 

-

45 

-

46 

-

47class DebputyLanguageServer(LanguageServer): 

-

48 

-

49 def __init__( 

-

50 self, 

-

51 *args: Any, 

-

52 **kwargs: Any, 

-

53 ): 

-

54 super().__init__(*args, **kwargs) 

-

55 self._plugin_feature_set: Optional[PluginProvidedFeatureSet] = None 

-

56 

-

57 @property 

-

58 def plugin_feature_set(self) -> PluginProvidedFeatureSet: 

-

59 res = self._plugin_feature_set 

-

60 if res is None: 60 ↛ 61line 60 didn't jump to line 61, because the condition on line 60 was never true

-

61 raise RuntimeError( 

-

62 "Initialization error: The plugin feature set has not been initialized before it was needed." 

-

63 ) 

-

64 return res 

-

65 

-

66 @plugin_feature_set.setter 

-

67 def plugin_feature_set(self, plugin_feature_set: PluginProvidedFeatureSet) -> None: 

-

68 if self._plugin_feature_set is not None: 68 ↛ 69line 68 didn't jump to line 69, because the condition on line 68 was never true

-

69 raise RuntimeError( 

-

70 "The plugin_feature_set attribute cannot be changed once set" 

-

71 ) 

-

72 self._plugin_feature_set = plugin_feature_set 

-

73 

-

74 def lint_state(self, doc: "TextDocument") -> LintState: 

-

75 return LSProvidedLintState(self, doc) 

-
- - - diff --git a/coverage-report/d_5d0ec0d5422112df_lsp_debian_changelog_py.html b/coverage-report/d_5d0ec0d5422112df_lsp_debian_changelog_py.html deleted file mode 100644 index 5d9cb94..0000000 --- a/coverage-report/d_5d0ec0d5422112df_lsp_debian_changelog_py.html +++ /dev/null @@ -1,392 +0,0 @@ - - - - - Coverage for src/debputy/lsp/lsp_debian_changelog.py: 21% - - - - - -
-
-

- Coverage for src/debputy/lsp/lsp_debian_changelog.py: - 21% -

- -

- 108 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import sys 

-

2from email.utils import parsedate_to_datetime 

-

3from typing import ( 

-

4 Union, 

-

5 List, 

-

6 Dict, 

-

7 Iterator, 

-

8 Optional, 

-

9 Iterable, 

-

10) 

-

11 

-

12from lsprotocol.types import ( 

-

13 Diagnostic, 

-

14 DidOpenTextDocumentParams, 

-

15 DidChangeTextDocumentParams, 

-

16 TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL, 

-

17 TEXT_DOCUMENT_CODE_ACTION, 

-

18 DidCloseTextDocumentParams, 

-

19 Range, 

-

20 Position, 

-

21 DiagnosticSeverity, 

-

22) 

-

23 

-

24from debputy.linting.lint_util import LintState 

-

25from debputy.lsp.lsp_features import lsp_diagnostics, lsp_standard_handler 

-

26from debputy.lsp.quickfixes import ( 

-

27 propose_correct_text_quick_fix, 

-

28) 

-

29from debputy.lsp.spellchecking import spellcheck_line 

-

30from debputy.lsp.text_util import ( 

-

31 LintCapablePositionCodec, 

-

32) 

-

33 

-

34try: 

-

35 from debian._deb822_repro.locatable import Position as TEPosition, Ranage as TERange 

-

36 

-

37 from pygls.server import LanguageServer 

-

38 from pygls.workspace import TextDocument 

-

39except ImportError: 

-

40 pass 

-

41 

-

42 

-

43# Same as Lintian 

-

44_MAXIMUM_WIDTH: int = 82 

-

45_LANGUAGE_IDS = [ 

-

46 "debian/changelog", 

-

47 # emacs's name 

-

48 "debian-changelog", 

-

49 # vim's name 

-

50 "debchangelog", 

-

51] 

-

52 

-

53_WEEKDAYS_BY_IDX = [ 

-

54 "Mon", 

-

55 "Tue", 

-

56 "Wed", 

-

57 "Thu", 

-

58 "Fri", 

-

59 "Sat", 

-

60 "Sun", 

-

61] 

-

62_KNOWN_WEEK_DAYS = frozenset(_WEEKDAYS_BY_IDX) 

-

63 

-

64DOCUMENT_VERSION_TABLE: Dict[str, int] = {} 

-

65 

-

66 

-

67def _handle_close( 

-

68 ls: "LanguageServer", 

-

69 params: DidCloseTextDocumentParams, 

-

70) -> None: 

-

71 try: 

-

72 del DOCUMENT_VERSION_TABLE[params.text_document.uri] 

-

73 except KeyError: 

-

74 pass 

-

75 

-

76 

-

77def is_doc_at_version(uri: str, version: int) -> bool: 

-

78 dv = DOCUMENT_VERSION_TABLE.get(uri) 

-

79 return dv == version 

-

80 

-

81 

-

82lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_CODE_ACTION) 

-

83lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL) 

-

84 

-

85 

-

86@lsp_diagnostics(_LANGUAGE_IDS) 

-

87def _diagnostics_debian_changelog( 

-

88 ls: "LanguageServer", 

-

89 params: Union[DidOpenTextDocumentParams, DidChangeTextDocumentParams], 

-

90) -> Iterable[List[Diagnostic]]: 

-

91 doc_uri = params.text_document.uri 

-

92 doc = ls.workspace.get_text_document(doc_uri) 

-

93 lines = doc.lines 

-

94 max_words = 1_000 

-

95 delta_update_size = 10 

-

96 max_lines_between_update = 10 

-

97 scanner = _scan_debian_changelog_for_diagnostics( 

-

98 lines, 

-

99 doc.position_codec, 

-

100 delta_update_size, 

-

101 max_words, 

-

102 max_lines_between_update, 

-

103 ) 

-

104 

-

105 yield from scanner 

-

106 

-

107 

-

108def _check_footer_line( 

-

109 line: str, 

-

110 line_no: int, 

-

111 lines: List[str], 

-

112 position_codec: LintCapablePositionCodec, 

-

113) -> Iterator[Diagnostic]: 

-

114 try: 

-

115 end_email_idx = line.rindex("> ") 

-

116 except ValueError: 

-

117 # Syntax error; flag later 

-

118 return 

-

119 line_len = len(line) 

-

120 start_date_idx = end_email_idx + 3 

-

121 # 3 characters for the day name (Mon), then a comma plus a space followed by the 

-

122 # actual date. The 6 characters limit is a gross under estimation of the real 

-

123 # size. 

-

124 if line_len < start_date_idx + 6: 

-

125 range_server_units = Range( 

-

126 Position( 

-

127 line_no, 

-

128 start_date_idx, 

-

129 ), 

-

130 Position( 

-

131 line_no, 

-

132 line_len, 

-

133 ), 

-

134 ) 

-

135 yield Diagnostic( 

-

136 position_codec.range_to_client_units(lines, range_server_units), 

-

137 "Expected a date in RFC822 format (Tue, 12 Mar 2024 12:34:56 +0000)", 

-

138 severity=DiagnosticSeverity.Error, 

-

139 source="debputy", 

-

140 ) 

-

141 return 

-

142 day_name_range_server_units = Range( 

-

143 Position( 

-

144 line_no, 

-

145 start_date_idx, 

-

146 ), 

-

147 Position( 

-

148 line_no, 

-

149 start_date_idx + 3, 

-

150 ), 

-

151 ) 

-

152 day_name = line[start_date_idx : start_date_idx + 3] 

-

153 if day_name not in _KNOWN_WEEK_DAYS: 

-

154 yield Diagnostic( 

-

155 position_codec.range_to_client_units(lines, day_name_range_server_units), 

-

156 "Expected a three letter date here (Mon, Tue, ..., Sun).", 

-

157 severity=DiagnosticSeverity.Error, 

-

158 source="debputy", 

-

159 ) 

-

160 return 

-

161 

-

162 date_str = line[start_date_idx + 5 :] 

-

163 

-

164 if line[start_date_idx + 3 : start_date_idx + 5] != ", ": 

-

165 sep = line[start_date_idx + 3 : start_date_idx + 5] 

-

166 range_server_units = Range( 

-

167 Position( 

-

168 line_no, 

-

169 start_date_idx + 3, 

-

170 ), 

-

171 Position( 

-

172 line_no, 

-

173 start_date_idx + 4, 

-

174 ), 

-

175 ) 

-

176 yield Diagnostic( 

-

177 position_codec.range_to_client_units(lines, range_server_units), 

-

178 f'Improper formatting of date. Expected ", " here, not "{sep}"', 

-

179 severity=DiagnosticSeverity.Error, 

-

180 source="debputy", 

-

181 ) 

-

182 return 

-

183 

-

184 try: 

-

185 # FIXME: this parser is too forgiving (it ignores trailing garbage) 

-

186 date = parsedate_to_datetime(date_str) 

-

187 except ValueError as e: 

-

188 range_server_units = Range( 

-

189 Position( 

-

190 line_no, 

-

191 start_date_idx + 5, 

-

192 ), 

-

193 Position( 

-

194 line_no, 

-

195 line_len, 

-

196 ), 

-

197 ) 

-

198 yield Diagnostic( 

-

199 position_codec.range_to_client_units(lines, range_server_units), 

-

200 f"Unable to the date as a valid RFC822 date: {e.args[0]}", 

-

201 severity=DiagnosticSeverity.Error, 

-

202 source="debputy", 

-

203 ) 

-

204 return 

-

205 expected_week_day = _WEEKDAYS_BY_IDX[date.weekday()] 

-

206 if expected_week_day != day_name: 

-

207 yield Diagnostic( 

-

208 position_codec.range_to_client_units(lines, day_name_range_server_units), 

-

209 f"The date was a {expected_week_day}day.", 

-

210 severity=DiagnosticSeverity.Warning, 

-

211 source="debputy", 

-

212 data=[propose_correct_text_quick_fix(expected_week_day)], 

-

213 ) 

-

214 

-

215 

-

216def _scan_debian_changelog_for_diagnostics( 

-

217 lines: List[str], 

-

218 position_codec: LintCapablePositionCodec, 

-

219 delta_update_size: int, 

-

220 max_words: int, 

-

221 max_lines_between_update: int, 

-

222 *, 

-

223 max_line_length: int = _MAXIMUM_WIDTH, 

-

224) -> Iterator[List[Diagnostic]]: 

-

225 diagnostics = [] 

-

226 diagnostics_at_last_update = 0 

-

227 lines_since_last_update = 0 

-

228 for line_no, line in enumerate(lines): 

-

229 orig_line = line 

-

230 line = line.rstrip() 

-

231 if not line: 

-

232 continue 

-

233 if line.startswith(" --"): 

-

234 diagnostics.extend(_check_footer_line(line, line_no, lines, position_codec)) 

-

235 continue 

-

236 if not line.startswith(" "): 

-

237 continue 

-

238 # minus 1 for newline 

-

239 orig_line_len = len(orig_line) - 1 

-

240 if orig_line_len > max_line_length: 

-

241 range_server_units = Range( 

-

242 Position( 

-

243 line_no, 

-

244 max_line_length, 

-

245 ), 

-

246 Position( 

-

247 line_no, 

-

248 orig_line_len, 

-

249 ), 

-

250 ) 

-

251 diagnostics.append( 

-

252 Diagnostic( 

-

253 position_codec.range_to_client_units(lines, range_server_units), 

-

254 f"Line exceeds {max_line_length} characters", 

-

255 severity=DiagnosticSeverity.Hint, 

-

256 source="debputy", 

-

257 ) 

-

258 ) 

-

259 if len(line) > 3 and line[2] == "[" and line[-1] == "]": 

-

260 # Do not spell check [ X ] as X is usually a name 

-

261 continue 

-

262 lines_since_last_update += 1 

-

263 if max_words > 0: 

-

264 typos = list(spellcheck_line(lines, position_codec, line_no, line)) 

-

265 new_diagnostics = len(typos) 

-

266 max_words -= new_diagnostics 

-

267 diagnostics.extend(typos) 

-

268 

-

269 current_diagnostics_len = len(diagnostics) 

-

270 if ( 

-

271 lines_since_last_update >= max_lines_between_update 

-

272 or current_diagnostics_len - diagnostics_at_last_update > delta_update_size 

-

273 ): 

-

274 diagnostics_at_last_update = current_diagnostics_len 

-

275 lines_since_last_update = 0 

-

276 

-

277 yield diagnostics 

-

278 if not diagnostics or diagnostics_at_last_update != len(diagnostics): 

-

279 yield diagnostics 

-

280 

-

281 

-

282def _lint_debian_changelog( 

-

283 lint_state: LintState, 

-

284) -> Optional[List[Diagnostic]]: 

-

285 limits = sys.maxsize 

-

286 scanner = _scan_debian_changelog_for_diagnostics( 

-

287 lint_state.lines, 

-

288 lint_state.position_codec, 

-

289 limits, 

-

290 limits, 

-

291 limits, 

-

292 ) 

-

293 return next(iter(scanner), None) 

-
- - - diff --git a/coverage-report/d_5d0ec0d5422112df_lsp_debian_control_py.html b/coverage-report/d_5d0ec0d5422112df_lsp_debian_control_py.html deleted file mode 100644 index d004ff7..0000000 --- a/coverage-report/d_5d0ec0d5422112df_lsp_debian_control_py.html +++ /dev/null @@ -1,729 +0,0 @@ - - - - - Coverage for src/debputy/lsp/lsp_debian_control.py: 72% - - - - - -
-
-

- Coverage for src/debputy/lsp/lsp_debian_control.py: - 72% -

- -

- 216 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from typing import ( 

-

2 Union, 

-

3 Sequence, 

-

4 Tuple, 

-

5 Iterator, 

-

6 Optional, 

-

7 Iterable, 

-

8 Mapping, 

-

9 List, 

-

10) 

-

11 

-

12from lsprotocol.types import ( 

-

13 DiagnosticSeverity, 

-

14 Range, 

-

15 Diagnostic, 

-

16 Position, 

-

17 DidOpenTextDocumentParams, 

-

18 DidChangeTextDocumentParams, 

-

19 FoldingRange, 

-

20 FoldingRangeParams, 

-

21 CompletionItem, 

-

22 CompletionList, 

-

23 CompletionParams, 

-

24 TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL, 

-

25 DiagnosticRelatedInformation, 

-

26 Location, 

-

27 HoverParams, 

-

28 Hover, 

-

29 TEXT_DOCUMENT_CODE_ACTION, 

-

30 SemanticTokens, 

-

31 SemanticTokensParams, 

-

32) 

-

33 

-

34from debputy.linting.lint_util import LintState 

-

35from debputy.lsp.lsp_debian_control_reference_data import ( 

-

36 DctrlKnownField, 

-

37 BINARY_FIELDS, 

-

38 SOURCE_FIELDS, 

-

39 DctrlFileMetadata, 

-

40) 

-

41from debputy.lsp.lsp_features import ( 

-

42 lint_diagnostics, 

-

43 lsp_completer, 

-

44 lsp_hover, 

-

45 lsp_standard_handler, 

-

46 lsp_folding_ranges, 

-

47 lsp_semantic_tokens_full, 

-

48) 

-

49from debputy.lsp.lsp_generic_deb822 import ( 

-

50 deb822_completer, 

-

51 deb822_hover, 

-

52 deb822_folding_ranges, 

-

53 deb822_semantic_tokens_full, 

-

54) 

-

55from debputy.lsp.quickfixes import ( 

-

56 propose_remove_line_quick_fix, 

-

57 range_compatible_with_remove_line_fix, 

-

58 propose_correct_text_quick_fix, 

-

59) 

-

60from debputy.lsp.spellchecking import default_spellchecker 

-

61from debputy.lsp.text_util import ( 

-

62 normalize_dctrl_field_name, 

-

63 LintCapablePositionCodec, 

-

64 detect_possible_typo, 

-

65 te_range_to_lsp, 

-

66) 

-

67from debputy.lsp.vendoring._deb822_repro import ( 

-

68 parse_deb822_file, 

-

69 Deb822FileElement, 

-

70 Deb822ParagraphElement, 

-

71) 

-

72from debputy.lsp.vendoring._deb822_repro.parsing import ( 

-

73 Deb822KeyValuePairElement, 

-

74 LIST_SPACE_SEPARATED_INTERPRETATION, 

-

75) 

-

76from debputy.lsp.vendoring._deb822_repro.tokens import ( 

-

77 Deb822Token, 

-

78) 

-

79from debputy.util import _info 

-

80 

-

81try: 

-

82 from debputy.lsp.vendoring._deb822_repro.locatable import ( 

-

83 Position as TEPosition, 

-

84 Range as TERange, 

-

85 START_POSITION, 

-

86 ) 

-

87 

-

88 from pygls.server import LanguageServer 

-

89 from pygls.workspace import TextDocument 

-

90except ImportError: 

-

91 pass 

-

92 

-

93 

-

94_LANGUAGE_IDS = [ 

-

95 "debian/control", 

-

96 # emacs's name 

-

97 "debian-control", 

-

98 # vim's name 

-

99 "debcontrol", 

-

100] 

-

101 

-

102 

-

103_DCTRL_FILE_METADATA = DctrlFileMetadata() 

-

104 

-

105 

-

106lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_CODE_ACTION) 

-

107lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL) 

-

108 

-

109 

-

110@lsp_hover(_LANGUAGE_IDS) 

-

111def _debian_control_hover( 

-

112 ls: "LanguageServer", 

-

113 params: HoverParams, 

-

114) -> Optional[Hover]: 

-

115 return deb822_hover(ls, params, _DCTRL_FILE_METADATA) 

-

116 

-

117 

-

118@lsp_completer(_LANGUAGE_IDS) 

-

119def _debian_control_completions( 

-

120 ls: "LanguageServer", 

-

121 params: CompletionParams, 

-

122) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]: 

-

123 return deb822_completer(ls, params, _DCTRL_FILE_METADATA) 

-

124 

-

125 

-

126@lsp_folding_ranges(_LANGUAGE_IDS) 

-

127def _debian_control_folding_ranges( 

-

128 ls: "LanguageServer", 

-

129 params: FoldingRangeParams, 

-

130) -> Optional[Sequence[FoldingRange]]: 

-

131 return deb822_folding_ranges(ls, params, _DCTRL_FILE_METADATA) 

-

132 

-

133 

-

134def _deb822_token_iter( 

-

135 tokens: Iterable[Deb822Token], 

-

136) -> Iterator[Tuple[Deb822Token, int, int, int, int, int]]: 

-

137 line_no = 0 

-

138 line_offset = 0 

-

139 

-

140 for token in tokens: 

-

141 start_line = line_no 

-

142 start_line_offset = line_offset 

-

143 

-

144 newlines = token.text.count("\n") 

-

145 line_no += newlines 

-

146 text_len = len(token.text) 

-

147 if newlines: 

-

148 if token.text.endswith("\n"): 148 ↛ 152line 148 didn't jump to line 152, because the condition on line 148 was never false

-

149 line_offset = 0 

-

150 else: 

-

151 # -2, one to remove the "\n" and one to get 0-offset 

-

152 line_offset = text_len - token.text.rindex("\n") - 2 

-

153 else: 

-

154 line_offset += text_len 

-

155 

-

156 yield token, start_line, start_line_offset, line_no, line_offset 

-

157 

-

158 

-

159def _paragraph_representation_field( 

-

160 paragraph: Deb822ParagraphElement, 

-

161) -> Deb822KeyValuePairElement: 

-

162 return next(iter(paragraph.iter_parts_of_type(Deb822KeyValuePairElement))) 

-

163 

-

164 

-

165def _extract_first_value_and_position( 

-

166 kvpair: Deb822KeyValuePairElement, 

-

167 stanza_pos: "TEPosition", 

-

168 position_codec: "LintCapablePositionCodec", 

-

169 lines: List[str], 

-

170) -> Tuple[Optional[str], Optional[Range]]: 

-

171 kvpair_pos = kvpair.position_in_parent().relative_to(stanza_pos) 

-

172 value_element_pos = kvpair.value_element.position_in_parent().relative_to( 

-

173 kvpair_pos 

-

174 ) 

-

175 for value_ref in kvpair.interpret_as( 175 ↛ 188line 175 didn't jump to line 188, because the loop on line 175 didn't complete

-

176 LIST_SPACE_SEPARATED_INTERPRETATION 

-

177 ).iter_value_references(): 

-

178 v = value_ref.value 

-

179 section_value_loc = value_ref.locatable 

-

180 value_range_te = section_value_loc.range_in_parent().relative_to( 

-

181 value_element_pos 

-

182 ) 

-

183 section_range_server_units = te_range_to_lsp(value_range_te) 

-

184 section_range = position_codec.range_to_client_units( 

-

185 lines, section_range_server_units 

-

186 ) 

-

187 return v, section_range 

-

188 return None, None 

-

189 

-

190 

-

191def _binary_package_checks( 

-

192 stanza: Deb822ParagraphElement, 

-

193 stanza_position: "TEPosition", 

-

194 source_stanza: Deb822ParagraphElement, 

-

195 representation_field_range: Range, 

-

196 position_codec: "LintCapablePositionCodec", 

-

197 lines: List[str], 

-

198 diagnostics: List[Diagnostic], 

-

199) -> None: 

-

200 package_name = stanza.get("Package", "") 

-

201 source_section = source_stanza.get("Section") 

-

202 section_kvpair = stanza.get_kvpair_element("Section", use_get=True) 

-

203 section: Optional[str] = None 

-

204 if section_kvpair is not None: 

-

205 section, section_range = _extract_first_value_and_position( 

-

206 section_kvpair, 

-

207 stanza_position, 

-

208 position_codec, 

-

209 lines, 

-

210 ) 

-

211 else: 

-

212 section_range = representation_field_range 

-

213 effective_section = section or source_section or "unknown" 

-

214 package_type = stanza.get("Package-Type", "") 

-

215 component_prefix = "" 

-

216 if "/" in effective_section: 216 ↛ 217line 216 didn't jump to line 217, because the condition on line 216 was never true

-

217 component_prefix, effective_section = effective_section.split("/", maxsplit=1) 

-

218 component_prefix += "/" 

-

219 

-

220 if package_name.endswith("-udeb") or package_type == "udeb": 220 ↛ 221line 220 didn't jump to line 221, because the condition on line 220 was never true

-

221 if package_type != "udeb": 

-

222 package_type_kvpair = stanza.get_kvpair_element( 

-

223 "Package-Type", use_get=True 

-

224 ) 

-

225 package_type_range = None 

-

226 if package_type_kvpair is not None: 

-

227 _, package_type_range = _extract_first_value_and_position( 

-

228 package_type_kvpair, 

-

229 stanza_position, 

-

230 position_codec, 

-

231 lines, 

-

232 ) 

-

233 if package_type_range is None: 

-

234 package_type_range = representation_field_range 

-

235 diagnostics.append( 

-

236 Diagnostic( 

-

237 package_type_range, 

-

238 'The Package-Type should be "udeb" given the package name', 

-

239 severity=DiagnosticSeverity.Warning, 

-

240 source="debputy", 

-

241 ) 

-

242 ) 

-

243 if effective_section != "debian-installer": 

-

244 quickfix_data = None 

-

245 if section is not None: 

-

246 quickfix_data = [ 

-

247 propose_correct_text_quick_fix( 

-

248 f"{component_prefix}debian-installer" 

-

249 ) 

-

250 ] 

-

251 diagnostics.append( 

-

252 Diagnostic( 

-

253 section_range, 

-

254 f'The Section should be "{component_prefix}debian-installer" for udebs', 

-

255 severity=DiagnosticSeverity.Warning, 

-

256 source="debputy", 

-

257 data=quickfix_data, 

-

258 ) 

-

259 ) 

-

260 

-

261 

-

262def _diagnostics_for_paragraph( 

-

263 stanza: Deb822ParagraphElement, 

-

264 stanza_position: "TEPosition", 

-

265 source_stanza: Deb822ParagraphElement, 

-

266 known_fields: Mapping[str, DctrlKnownField], 

-

267 other_known_fields: Mapping[str, DctrlKnownField], 

-

268 is_binary_paragraph: bool, 

-

269 doc_reference: str, 

-

270 position_codec: "LintCapablePositionCodec", 

-

271 lines: List[str], 

-

272 diagnostics: List[Diagnostic], 

-

273) -> None: 

-

274 representation_field = _paragraph_representation_field(stanza) 

-

275 representation_field_pos = representation_field.position_in_parent().relative_to( 

-

276 stanza_position 

-

277 ) 

-

278 representation_field_range_server_units = te_range_to_lsp( 

-

279 TERange.from_position_and_size( 

-

280 representation_field_pos, representation_field.size() 

-

281 ) 

-

282 ) 

-

283 representation_field_range = position_codec.range_to_client_units( 

-

284 lines, 

-

285 representation_field_range_server_units, 

-

286 ) 

-

287 for known_field in known_fields.values(): 

-

288 missing_field_severity = known_field.missing_field_severity 

-

289 if missing_field_severity is None or known_field.name in stanza: 

-

290 continue 

-

291 

-

292 if known_field.inherits_from_source and known_field.name in source_stanza: 

-

293 continue 

-

294 

-

295 diagnostics.append( 

-

296 Diagnostic( 

-

297 representation_field_range, 

-

298 f"Stanza is missing field {known_field.name}", 

-

299 severity=missing_field_severity, 

-

300 source="debputy", 

-

301 ) 

-

302 ) 

-

303 

-

304 if is_binary_paragraph: 

-

305 _binary_package_checks( 

-

306 stanza, 

-

307 stanza_position, 

-

308 source_stanza, 

-

309 representation_field_range, 

-

310 position_codec, 

-

311 lines, 

-

312 diagnostics, 

-

313 ) 

-

314 

-

315 seen_fields = {} 

-

316 

-

317 for kvpair in stanza.iter_parts_of_type(Deb822KeyValuePairElement): 

-

318 field_name_token = kvpair.field_token 

-

319 field_name = field_name_token.text 

-

320 field_name_lc = field_name.lower() 

-

321 normalized_field_name_lc = normalize_dctrl_field_name(field_name_lc) 

-

322 known_field = known_fields.get(normalized_field_name_lc) 

-

323 field_value = stanza[field_name] 

-

324 field_range_te = kvpair.range_in_parent().relative_to(stanza_position) 

-

325 field_position_te = field_range_te.start_pos 

-

326 field_range_server_units = te_range_to_lsp(field_range_te) 

-

327 field_range = position_codec.range_to_client_units( 

-

328 lines, 

-

329 field_range_server_units, 

-

330 ) 

-

331 field_name_typo_detected = False 

-

332 existing_field_range = seen_fields.get(normalized_field_name_lc) 

-

333 if existing_field_range is not None: 333 ↛ 334line 333 didn't jump to line 334, because the condition on line 333 was never true

-

334 existing_field_range[3].append(field_range) 

-

335 else: 

-

336 normalized_field_name = normalize_dctrl_field_name(field_name) 

-

337 seen_fields[field_name_lc] = ( 

-

338 field_name, 

-

339 normalized_field_name, 

-

340 field_range, 

-

341 [], 

-

342 ) 

-

343 

-

344 if known_field is None: 

-

345 candidates = detect_possible_typo(normalized_field_name_lc, known_fields) 

-

346 if candidates: 

-

347 known_field = known_fields[candidates[0]] 

-

348 token_range_server_units = te_range_to_lsp( 

-

349 TERange.from_position_and_size( 

-

350 field_position_te, kvpair.field_token.size() 

-

351 ) 

-

352 ) 

-

353 field_range = position_codec.range_to_client_units( 

-

354 lines, 

-

355 token_range_server_units, 

-

356 ) 

-

357 field_name_typo_detected = True 

-

358 diagnostics.append( 

-

359 Diagnostic( 

-

360 field_range, 

-

361 f'The "{field_name}" looks like a typo of "{known_field.name}".', 

-

362 severity=DiagnosticSeverity.Warning, 

-

363 source="debputy", 

-

364 data=[ 

-

365 propose_correct_text_quick_fix(known_fields[m].name) 

-

366 for m in candidates 

-

367 ], 

-

368 ) 

-

369 ) 

-

370 if known_field is None: 

-

371 known_else_where = other_known_fields.get(normalized_field_name_lc) 

-

372 if known_else_where is not None: 372 ↛ 373line 372 didn't jump to line 373, because the condition on line 372 was never true

-

373 intended_usage = "Source" if is_binary_paragraph else "Package" 

-

374 diagnostics.append( 

-

375 Diagnostic( 

-

376 field_range, 

-

377 f'The {field_name} is defined for use in the "{intended_usage}" stanza.' 

-

378 f" Please move it to the right place or remove it", 

-

379 severity=DiagnosticSeverity.Error, 

-

380 source="debputy", 

-

381 ) 

-

382 ) 

-

383 continue 

-

384 

-

385 if field_value.strip() == "": 385 ↛ 386line 385 didn't jump to line 386, because the condition on line 385 was never true

-

386 diagnostics.append( 

-

387 Diagnostic( 

-

388 field_range, 

-

389 f"The {field_name} has no value. Either provide a value or remove it.", 

-

390 severity=DiagnosticSeverity.Error, 

-

391 source="debputy", 

-

392 ) 

-

393 ) 

-

394 continue 

-

395 diagnostics.extend( 

-

396 known_field.field_diagnostics( 

-

397 kvpair, 

-

398 stanza, 

-

399 stanza_position, 

-

400 position_codec, 

-

401 lines, 

-

402 field_name_typo_reported=field_name_typo_detected, 

-

403 ) 

-

404 ) 

-

405 if known_field.spellcheck_value: 

-

406 words = kvpair.interpret_as(LIST_SPACE_SEPARATED_INTERPRETATION) 

-

407 spell_checker = default_spellchecker() 

-

408 value_position = kvpair.value_element.position_in_parent().relative_to( 

-

409 field_position_te 

-

410 ) 

-

411 for word_ref in words.iter_value_references(): 

-

412 token = word_ref.value 

-

413 for word, pos, endpos in spell_checker.iter_words(token): 

-

414 corrections = spell_checker.provide_corrections_for(word) 

-

415 if not corrections: 415 ↛ 417line 415 didn't jump to line 417, because the condition on line 415 was never false

-

416 continue 

-

417 word_loc = word_ref.locatable 

-

418 word_pos_te = word_loc.position_in_parent().relative_to( 

-

419 value_position 

-

420 ) 

-

421 if pos: 

-

422 word_pos_te = TEPosition(0, pos).relative_to(word_pos_te) 

-

423 word_range = TERange( 

-

424 START_POSITION, 

-

425 TEPosition(0, endpos - pos), 

-

426 ) 

-

427 word_range_server_units = te_range_to_lsp( 

-

428 TERange.from_position_and_size(word_pos_te, word_range) 

-

429 ) 

-

430 word_range = position_codec.range_to_client_units( 

-

431 lines, 

-

432 word_range_server_units, 

-

433 ) 

-

434 diagnostics.append( 

-

435 Diagnostic( 

-

436 word_range, 

-

437 f'Spelling "{word}"', 

-

438 severity=DiagnosticSeverity.Hint, 

-

439 source="debputy", 

-

440 data=[ 

-

441 propose_correct_text_quick_fix(c) for c in corrections 

-

442 ], 

-

443 ) 

-

444 ) 

-

445 source_value = source_stanza.get(field_name) 

-

446 if known_field.warn_if_default and field_value == known_field.default_value: 446 ↛ 447line 446 didn't jump to line 447, because the condition on line 446 was never true

-

447 diagnostics.append( 

-

448 Diagnostic( 

-

449 field_range, 

-

450 f"The {field_name} is redundant as it is set to the default value and the field should only be" 

-

451 " used in exceptional cases.", 

-

452 severity=DiagnosticSeverity.Warning, 

-

453 source="debputy", 

-

454 ) 

-

455 ) 

-

456 

-

457 if known_field.inherits_from_source and field_value == source_value: 457 ↛ 458line 457 didn't jump to line 458, because the condition on line 457 was never true

-

458 if range_compatible_with_remove_line_fix(field_range): 

-

459 fix_data = propose_remove_line_quick_fix() 

-

460 else: 

-

461 fix_data = None 

-

462 diagnostics.append( 

-

463 Diagnostic( 

-

464 field_range, 

-

465 f"The field {field_name} duplicates the value from the Source stanza.", 

-

466 severity=DiagnosticSeverity.Information, 

-

467 source="debputy", 

-

468 data=fix_data, 

-

469 ) 

-

470 ) 

-

471 for ( 

-

472 field_name, 

-

473 normalized_field_name, 

-

474 field_range, 

-

475 duplicates, 

-

476 ) in seen_fields.values(): 

-

477 if not duplicates: 477 ↛ 479line 477 didn't jump to line 479

-

478 continue 

-

479 related_information = [ 

-

480 DiagnosticRelatedInformation( 

-

481 location=Location(doc_reference, field_range), 

-

482 message=f"First definition of {field_name}", 

-

483 ) 

-

484 ] 

-

485 related_information.extend( 

-

486 DiagnosticRelatedInformation( 

-

487 location=Location(doc_reference, r), 

-

488 message=f"Duplicate of {field_name}", 

-

489 ) 

-

490 for r in duplicates 

-

491 ) 

-

492 for dup_range in duplicates: 

-

493 diagnostics.append( 

-

494 Diagnostic( 

-

495 dup_range, 

-

496 f"The {normalized_field_name} field name was used multiple times in this stanza." 

-

497 f" Please ensure the field is only used once per stanza. Note that {normalized_field_name} and" 

-

498 f" X[BCS]-{normalized_field_name} are considered the same field.", 

-

499 severity=DiagnosticSeverity.Error, 

-

500 source="debputy", 

-

501 related_information=related_information, 

-

502 ) 

-

503 ) 

-

504 

-

505 

-

506def _scan_for_syntax_errors_and_token_level_diagnostics( 

-

507 deb822_file: Deb822FileElement, 

-

508 position_codec: LintCapablePositionCodec, 

-

509 lines: List[str], 

-

510 diagnostics: List[Diagnostic], 

-

511) -> int: 

-

512 first_error = len(lines) + 1 

-

513 spell_checker = default_spellchecker() 

-

514 for ( 

-

515 token, 

-

516 start_line, 

-

517 start_offset, 

-

518 end_line, 

-

519 end_offset, 

-

520 ) in _deb822_token_iter(deb822_file.iter_tokens()): 

-

521 if token.is_error: 521 ↛ 522line 521 didn't jump to line 522, because the condition on line 521 was never true

-

522 first_error = min(first_error, start_line) 

-

523 start_pos = Position( 

-

524 start_line, 

-

525 start_offset, 

-

526 ) 

-

527 end_pos = Position( 

-

528 end_line, 

-

529 end_offset, 

-

530 ) 

-

531 token_range = position_codec.range_to_client_units( 

-

532 lines, Range(start_pos, end_pos) 

-

533 ) 

-

534 diagnostics.append( 

-

535 Diagnostic( 

-

536 token_range, 

-

537 "Syntax error", 

-

538 severity=DiagnosticSeverity.Error, 

-

539 source="debputy (python-debian parser)", 

-

540 ) 

-

541 ) 

-

542 elif token.is_comment: 

-

543 for word, pos, end_pos in spell_checker.iter_words(token.text): 

-

544 corrections = spell_checker.provide_corrections_for(word) 

-

545 if not corrections: 545 ↛ 547line 545 didn't jump to line 547, because the condition on line 545 was never false

-

546 continue 

-

547 start_pos = Position( 

-

548 start_line, 

-

549 pos, 

-

550 ) 

-

551 end_pos = Position( 

-

552 start_line, 

-

553 end_pos, 

-

554 ) 

-

555 word_range = position_codec.range_to_client_units( 

-

556 lines, Range(start_pos, end_pos) 

-

557 ) 

-

558 diagnostics.append( 

-

559 Diagnostic( 

-

560 word_range, 

-

561 f'Spelling "{word}"', 

-

562 severity=DiagnosticSeverity.Hint, 

-

563 source="debputy", 

-

564 data=[propose_correct_text_quick_fix(c) for c in corrections], 

-

565 ) 

-

566 ) 

-

567 return first_error 

-

568 

-

569 

-

570@lint_diagnostics(_LANGUAGE_IDS) 

-

571def _lint_debian_control( 

-

572 lint_state: LintState, 

-

573) -> Optional[List[Diagnostic]]: 

-

574 lines = lint_state.lines 

-

575 position_codec = lint_state.position_codec 

-

576 doc_reference = lint_state.doc_uri 

-

577 diagnostics = [] 

-

578 deb822_file = parse_deb822_file( 

-

579 lines, 

-

580 accept_files_with_duplicated_fields=True, 

-

581 accept_files_with_error_tokens=True, 

-

582 ) 

-

583 

-

584 first_error = _scan_for_syntax_errors_and_token_level_diagnostics( 

-

585 deb822_file, 

-

586 position_codec, 

-

587 lines, 

-

588 diagnostics, 

-

589 ) 

-

590 

-

591 paragraphs = list(deb822_file) 

-

592 source_paragraph = paragraphs[0] if paragraphs else None 

-

593 

-

594 for paragraph_no, paragraph in enumerate(paragraphs, start=1): 

-

595 paragraph_pos = paragraph.position_in_file() 

-

596 if paragraph_pos.line_position >= first_error: 596 ↛ 597line 596 didn't jump to line 597, because the condition on line 596 was never true

-

597 break 

-

598 is_binary_paragraph = paragraph_no != 1 

-

599 if is_binary_paragraph: 

-

600 known_fields = BINARY_FIELDS 

-

601 other_known_fields = SOURCE_FIELDS 

-

602 else: 

-

603 known_fields = SOURCE_FIELDS 

-

604 other_known_fields = BINARY_FIELDS 

-

605 _diagnostics_for_paragraph( 

-

606 paragraph, 

-

607 paragraph_pos, 

-

608 source_paragraph, 

-

609 known_fields, 

-

610 other_known_fields, 

-

611 is_binary_paragraph, 

-

612 doc_reference, 

-

613 position_codec, 

-

614 lines, 

-

615 diagnostics, 

-

616 ) 

-

617 

-

618 return diagnostics 

-

619 

-

620 

-

621@lsp_semantic_tokens_full(_LANGUAGE_IDS) 

-

622def _semantic_tokens_full( 

-

623 ls: "LanguageServer", 

-

624 request: SemanticTokensParams, 

-

625) -> Optional[SemanticTokens]: 

-

626 return deb822_semantic_tokens_full( 

-

627 ls, 

-

628 request, 

-

629 _DCTRL_FILE_METADATA, 

-

630 ) 

-
- - - diff --git a/coverage-report/d_5d0ec0d5422112df_lsp_debian_control_reference_data_py.html b/coverage-report/d_5d0ec0d5422112df_lsp_debian_control_reference_data_py.html deleted file mode 100644 index 70398a4..0000000 --- a/coverage-report/d_5d0ec0d5422112df_lsp_debian_control_reference_data_py.html +++ /dev/null @@ -1,2896 +0,0 @@ - - - - - Coverage for src/debputy/lsp/lsp_debian_control_reference_data.py: 59% - - - - - -
-
-

- Coverage for src/debputy/lsp/lsp_debian_control_reference_data.py: - 59% -

- -

- 335 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2import functools 

-

3import itertools 

-

4import re 

-

5import sys 

-

6import textwrap 

-

7from abc import ABC 

-

8from enum import Enum, auto 

-

9from typing import ( 

-

10 FrozenSet, 

-

11 Optional, 

-

12 cast, 

-

13 Mapping, 

-

14 Iterable, 

-

15 List, 

-

16 Generic, 

-

17 TypeVar, 

-

18 Union, 

-

19 Callable, 

-

20 Tuple, 

-

21 Any, 

-

22) 

-

23 

-

24from debian.debian_support import DpkgArchTable 

-

25from lsprotocol.types import DiagnosticSeverity, Diagnostic, DiagnosticTag, Range 

-

26 

-

27from debputy.lsp.quickfixes import ( 

-

28 propose_correct_text_quick_fix, 

-

29 propose_remove_line_quick_fix, 

-

30) 

-

31from debputy.lsp.text_util import ( 

-

32 normalize_dctrl_field_name, 

-

33 LintCapablePositionCodec, 

-

34 detect_possible_typo, 

-

35 te_range_to_lsp, 

-

36) 

-

37from debputy.lsp.vendoring._deb822_repro.parsing import ( 

-

38 Deb822KeyValuePairElement, 

-

39 LIST_SPACE_SEPARATED_INTERPRETATION, 

-

40 Deb822ParagraphElement, 

-

41 Deb822FileElement, 

-

42 Interpretation, 

-

43 LIST_COMMA_SEPARATED_INTERPRETATION, 

-

44 ListInterpretation, 

-

45 _parsed_value_render_factory, 

-

46 Deb822ParsedValueElement, 

-

47 LIST_UPLOADERS_INTERPRETATION, 

-

48 _parse_whitespace_list_value, 

-

49) 

-

50from debputy.lsp.vendoring._deb822_repro.tokens import ( 

-

51 Deb822FieldNameToken, 

-

52 _value_line_tokenizer, 

-

53 Deb822ValueToken, 

-

54 Deb822Token, 

-

55 _RE_WHITESPACE_SEPARATED_WORD_LIST, 

-

56 Deb822SpaceSeparatorToken, 

-

57) 

-

58from debputy.util import PKGNAME_REGEX 

-

59 

-

60try: 

-

61 from debputy.lsp.vendoring._deb822_repro.locatable import ( 

-

62 Position as TEPosition, 

-

63 Range as TERange, 

-

64 START_POSITION, 

-

65 ) 

-

66except ImportError: 

-

67 pass 

-

68 

-

69 

-

70F = TypeVar("F", bound="Deb822KnownField") 

-

71S = TypeVar("S", bound="StanzaMetadata") 

-

72 

-

73 

-

74# FIXME: should go into python3-debian 

-

75_RE_COMMA = re.compile("([^,]*),([^,]*)") 

-

76 

-

77 

-

78@_value_line_tokenizer 

-

79def comma_or_space_split_tokenizer(v): 

-

80 # type: (str) -> Iterable[Deb822Token] 

-

81 assert "\n" not in v 

-

82 for match in _RE_WHITESPACE_SEPARATED_WORD_LIST.finditer(v): 

-

83 space_before, word, space_after = match.groups() 

-

84 if space_before: 

-

85 yield Deb822SpaceSeparatorToken(sys.intern(space_before)) 

-

86 if "," in word: 

-

87 for m in _RE_COMMA.finditer(word): 

-

88 word_before, word_after = m.groups() 

-

89 if word_before: 

-

90 yield Deb822ValueToken(word_before) 

-

91 # ... not quite a whitespace, but it is too much pain to make it a non-whitespace token. 

-

92 yield Deb822SpaceSeparatorToken(",") 

-

93 if word_after: 

-

94 yield Deb822ValueToken(word_after) 

-

95 else: 

-

96 yield Deb822ValueToken(word) 

-

97 if space_after: 

-

98 yield Deb822SpaceSeparatorToken(sys.intern(space_after)) 

-

99 

-

100 

-

101# FIXME: should go into python3-debian 

-

102LIST_COMMA_OR_SPACE_SEPARATED_INTERPRETATION = ListInterpretation( 

-

103 comma_or_space_split_tokenizer, 

-

104 _parse_whitespace_list_value, 

-

105 Deb822ParsedValueElement, 

-

106 Deb822SpaceSeparatorToken, 

-

107 Deb822SpaceSeparatorToken, 

-

108 _parsed_value_render_factory, 

-

109) 

-

110 

-

111CustomFieldCheck = Callable[ 

-

112 [ 

-

113 "F", 

-

114 Deb822KeyValuePairElement, 

-

115 "TERange", 

-

116 Deb822ParagraphElement, 

-

117 "TEPosition", 

-

118 "LintCapablePositionCodec", 

-

119 List[str], 

-

120 ], 

-

121 Iterable[Diagnostic], 

-

122] 

-

123 

-

124 

-

125ALL_SECTIONS_WITHOUT_COMPONENT = frozenset( 

-

126 [ 

-

127 "admin", 

-

128 "cli-mono", 

-

129 "comm", 

-

130 "database", 

-

131 "debian-installer", 

-

132 "debug", 

-

133 "devel", 

-

134 "doc", 

-

135 "editors", 

-

136 "education", 

-

137 "electronics", 

-

138 "embedded", 

-

139 "fonts", 

-

140 "games", 

-

141 "gnome", 

-

142 "gnu-r", 

-

143 "gnustep", 

-

144 "graphics", 

-

145 "hamradio", 

-

146 "haskell", 

-

147 "interpreters", 

-

148 "introspection", 

-

149 "java", 

-

150 "javascript", 

-

151 "kde", 

-

152 "kernel", 

-

153 "libdevel", 

-

154 "libs", 

-

155 "lisp", 

-

156 "localization", 

-

157 "mail", 

-

158 "math", 

-

159 "metapackages", 

-

160 "misc", 

-

161 "net", 

-

162 "news", 

-

163 "ocaml", 

-

164 "oldlibs", 

-

165 "otherosfs", 

-

166 "perl", 

-

167 "php", 

-

168 "python", 

-

169 "ruby", 

-

170 "rust", 

-

171 "science", 

-

172 "shells", 

-

173 "sound", 

-

174 "tasks", 

-

175 "tex", 

-

176 "text", 

-

177 "utils", 

-

178 "vcs", 

-

179 "video", 

-

180 "virtual", 

-

181 "web", 

-

182 "x11", 

-

183 "xfce", 

-

184 "zope", 

-

185 ] 

-

186) 

-

187 

-

188ALL_COMPONENTS = frozenset( 

-

189 [ 

-

190 "main", 

-

191 "restricted", # Ubuntu 

-

192 "non-free", 

-

193 "non-free-firmware", 

-

194 "contrib", 

-

195 ] 

-

196) 

-

197 

-

198 

-

199def _fields(*fields: F) -> Mapping[str, F]: 

-

200 return {normalize_dctrl_field_name(f.name.lower()): f for f in fields} 

-

201 

-

202 

-

203@dataclasses.dataclass(slots=True, frozen=True) 

-

204class Keyword: 

-

205 value: str 

-

206 hover_text: Optional[str] = None 

-

207 is_obsolete: bool = False 

-

208 replaced_by: Optional[str] = None 

-

209 

-

210 

-

211def _allowed_values(*values: Union[str, Keyword]) -> Mapping[str, Keyword]: 

-

212 as_keywords = [k if isinstance(k, Keyword) else Keyword(k) for k in values] 

-

213 as_mapping = {k.value: k for k in as_keywords if k.value} 

-

214 # Simple bug check 

-

215 assert len(as_keywords) == len(as_mapping) 

-

216 return as_mapping 

-

217 

-

218 

-

219ALL_SECTIONS = _allowed_values( 

-

220 *[ 

-

221 s if c is None else f"{c}/{s}" 

-

222 for c, s in itertools.product( 

-

223 itertools.chain(cast("Iterable[Optional[str]]", [None]), ALL_COMPONENTS), 

-

224 ALL_SECTIONS_WITHOUT_COMPONENT, 

-

225 ) 

-

226 ] 

-

227) 

-

228 

-

229ALL_PRIORITIES = _allowed_values( 

-

230 Keyword( 

-

231 "required", 

-

232 hover_text=textwrap.dedent( 

-

233 """\ 

-

234 The package is necessary for the proper functioning of the system (read: dpkg needs it). 

-

235 

-

236 Applicable if dpkg *needs* this package to function and it is not a library. 

-

237 

-

238 No two packages that both have a priority of *standard* or higher may conflict with 

-

239 each other. 

-

240 """ 

-

241 ), 

-

242 ), 

-

243 Keyword( 

-

244 "important", 

-

245 hover_text=textwrap.dedent( 

-

246 """\ 

-

247 The *important* packages are a bare minimum of commonly-expected and necessary tools. 

-

248 

-

249 Applicable if 99% of all users in the distribution needs this package and it is not a library. 

-

250 

-

251 No two packages that both have a priority of *standard* or higher may conflict with 

-

252 each other. 

-

253 """ 

-

254 ), 

-

255 ), 

-

256 Keyword( 

-

257 "standard", 

-

258 hover_text=textwrap.dedent( 

-

259 """\ 

-

260 These packages provide a reasonable small but not too limited character-mode system. This is 

-

261 what will be installed by default (by the debian-installer) if the user does not select anything 

-

262 else. This does not include many large applications. 

-

263 

-

264 Applicable if your distribution installer will install this package by default on a new system 

-

265 and it is not a library. 

-

266 

-

267 No two packages that both have a priority of *standard* or higher may conflict with 

-

268 each other. 

-

269 """ 

-

270 ), 

-

271 ), 

-

272 Keyword( 

-

273 "optional", 

-

274 hover_text="This is the default priority and used by the majority of all packages" 

-

275 " in the Debian archive", 

-

276 ), 

-

277 Keyword( 

-

278 "extra", 

-

279 is_obsolete=True, 

-

280 replaced_by="optional", 

-

281 hover_text="Obsolete alias of `optional`.", 

-

282 ), 

-

283) 

-

284 

-

285 

-

286def all_architectures_and_wildcards(arch2table) -> Iterable[Union[str, Keyword]]: 

-

287 wildcards = set() 

-

288 yield Keyword( 

-

289 "any", 

-

290 hover_text=textwrap.dedent( 

-

291 """\ 

-

292 The package is an architecture dependent package and need to be compiled for each and every 

-

293 architecture it. 

-

294 

-

295 The name `any` refers to the fact that this is an architecture *wildcard* matching 

-

296 *any machine architecture* supported by dpkg. 

-

297 """ 

-

298 ), 

-

299 ) 

-

300 yield Keyword( 

-

301 "all", 

-

302 hover_text=textwrap.dedent( 

-

303 """\ 

-

304 The package is an architecture independent package. This is typically fitting for packages containing 

-

305 only scripts, data or documentation. 

-

306 

-

307 This name `all` refers to the fact that the package can be used for *all* architectures at the same. 

-

308 Though note that it is still subject to the rules of the `Multi-Arch` field. 

-

309 """ 

-

310 ), 

-

311 ) 

-

312 for arch_name, quad_tuple in arch2table.items(): 

-

313 yield arch_name 

-

314 cpu_wc = "any-" + quad_tuple.cpu_name 

-

315 os_wc = quad_tuple.os_name + "-any" 

-

316 if cpu_wc not in wildcards: 

-

317 yield cpu_wc 

-

318 wildcards.add(cpu_wc) 

-

319 if os_wc not in wildcards: 

-

320 yield os_wc 

-

321 wildcards.add(os_wc) 

-

322 # Add the remaining wildcards 

-

323 

-

324 

-

325@functools.lru_cache 

-

326def dpkg_arch_and_wildcards() -> FrozenSet[str]: 

-

327 dpkg_arch_table = DpkgArchTable.load_arch_table() 

-

328 return frozenset(all_architectures_and_wildcards(dpkg_arch_table._arch2table)) 

-

329 

-

330 

-

331def _extract_first_value_and_position( 

-

332 kvpair: Deb822KeyValuePairElement, 

-

333 stanza_pos: "TEPosition", 

-

334 position_codec: "LintCapablePositionCodec", 

-

335 lines: List[str], 

-

336) -> Tuple[Optional[str], Optional[Range]]: 

-

337 kvpair_pos = kvpair.position_in_parent().relative_to(stanza_pos) 

-

338 value_element_pos = kvpair.value_element.position_in_parent().relative_to( 

-

339 kvpair_pos 

-

340 ) 

-

341 for value_ref in kvpair.interpret_as( 

-

342 LIST_SPACE_SEPARATED_INTERPRETATION 

-

343 ).iter_value_references(): 

-

344 v = value_ref.value 

-

345 section_value_loc = value_ref.locatable 

-

346 value_range_te = section_value_loc.range_in_parent().relative_to( 

-

347 value_element_pos 

-

348 ) 

-

349 value_range_server_units = te_range_to_lsp(value_range_te) 

-

350 value_range = position_codec.range_to_client_units( 

-

351 lines, value_range_server_units 

-

352 ) 

-

353 return v, value_range 

-

354 return None, None 

-

355 

-

356 

-

357def _dctrl_ma_field_validation( 

-

358 _known_field: "F", 

-

359 _kvpair: Deb822KeyValuePairElement, 

-

360 _field_range: "TERange", 

-

361 stanza: Deb822ParagraphElement, 

-

362 stanza_position: "TEPosition", 

-

363 position_codec: "LintCapablePositionCodec", 

-

364 lines: List[str], 

-

365) -> Iterable[Diagnostic]: 

-

366 ma_kvpair = stanza.get_kvpair_element("Multi-Arch", use_get=True) 

-

367 arch = stanza.get("Architecture", "any") 

-

368 if arch == "all" and ma_kvpair is not None: 

-

369 ma_value, ma_value_range = _extract_first_value_and_position( 

-

370 ma_kvpair, 

-

371 stanza_position, 

-

372 position_codec, 

-

373 lines, 

-

374 ) 

-

375 if ma_value == "same": 

-

376 yield Diagnostic( 

-

377 ma_value_range, 

-

378 "Multi-Arch: same is not valid for Architecture: all packages. Maybe you want foreign?", 

-

379 severity=DiagnosticSeverity.Error, 

-

380 source="debputy", 

-

381 ) 

-

382 

-

383 

-

384def _udeb_only_field_validation( 

-

385 known_field: "F", 

-

386 _kvpair: Deb822KeyValuePairElement, 

-

387 field_range_te: "TERange", 

-

388 stanza: Deb822ParagraphElement, 

-

389 _stanza_position: "TEPosition", 

-

390 position_codec: "LintCapablePositionCodec", 

-

391 lines: List[str], 

-

392) -> Iterable[Diagnostic]: 

-

393 package_type = stanza.get("Package-Type") 

-

394 if package_type != "udeb": 

-

395 field_range_server_units = te_range_to_lsp(field_range_te) 

-

396 field_range = position_codec.range_to_client_units( 

-

397 lines, 

-

398 field_range_server_units, 

-

399 ) 

-

400 yield Diagnostic( 

-

401 field_range, 

-

402 f"The {known_field.name} field is only applicable to udeb packages (`Package-Type: udeb`)", 

-

403 severity=DiagnosticSeverity.Warning, 

-

404 source="debputy", 

-

405 ) 

-

406 

-

407 

-

408def _arch_not_all_only_field_validation( 

-

409 known_field: "F", 

-

410 _kvpair: Deb822KeyValuePairElement, 

-

411 field_range_te: "TERange", 

-

412 stanza: Deb822ParagraphElement, 

-

413 _stanza_position: "TEPosition", 

-

414 position_codec: "LintCapablePositionCodec", 

-

415 lines: List[str], 

-

416) -> Iterable[Diagnostic]: 

-

417 architecture = stanza.get("Architecture") 

-

418 if architecture == "all": 

-

419 field_range_server_units = te_range_to_lsp(field_range_te) 

-

420 field_range = position_codec.range_to_client_units( 

-

421 lines, 

-

422 field_range_server_units, 

-

423 ) 

-

424 yield Diagnostic( 

-

425 field_range, 

-

426 f"The {known_field.name} field is not applicable to arch:all packages (`Architecture: all`)", 

-

427 severity=DiagnosticSeverity.Warning, 

-

428 source="debputy", 

-

429 ) 

-

430 

-

431 

-

432def _each_value_match_regex_validation( 

-

433 regex: re.Pattern, 

-

434 *, 

-

435 diagnostic_severity: DiagnosticSeverity = DiagnosticSeverity.Error, 

-

436) -> CustomFieldCheck: 

-

437 

-

438 def _validator( 

-

439 _known_field: "F", 

-

440 kvpair: Deb822KeyValuePairElement, 

-

441 field_range_te: "TERange", 

-

442 _stanza: Deb822ParagraphElement, 

-

443 _stanza_position: "TEPosition", 

-

444 position_codec: "LintCapablePositionCodec", 

-

445 lines: List[str], 

-

446 ) -> Iterable[Diagnostic]: 

-

447 

-

448 value_element_pos = kvpair.value_element.position_in_parent().relative_to( 

-

449 field_range_te.start_pos 

-

450 ) 

-

451 for value_ref in kvpair.interpret_as( 

-

452 LIST_SPACE_SEPARATED_INTERPRETATION 

-

453 ).iter_value_references(): 

-

454 v = value_ref.value 

-

455 m = regex.fullmatch(v) 

-

456 if m is not None: 456 ↛ 459line 456 didn't jump to line 459, because the condition on line 456 was never false

-

457 continue 

-

458 

-

459 section_value_loc = value_ref.locatable 

-

460 value_range_te = section_value_loc.range_in_parent().relative_to( 

-

461 value_element_pos 

-

462 ) 

-

463 value_range_server_units = te_range_to_lsp(value_range_te) 

-

464 value_range = position_codec.range_to_client_units( 

-

465 lines, value_range_server_units 

-

466 ) 

-

467 yield Diagnostic( 

-

468 value_range, 

-

469 f'The value "{v}" does not match the regex {regex.pattern}.', 

-

470 severity=diagnostic_severity, 

-

471 source="debputy", 

-

472 ) 

-

473 

-

474 return _validator 

-

475 

-

476 

-

477def _combined_custom_field_check(*checks: CustomFieldCheck) -> CustomFieldCheck: 

-

478 def _validator( 

-

479 known_field: "F", 

-

480 kvpair: Deb822KeyValuePairElement, 

-

481 field_range_te: "TERange", 

-

482 stanza: Deb822ParagraphElement, 

-

483 stanza_position: "TEPosition", 

-

484 position_codec: "LintCapablePositionCodec", 

-

485 lines: List[str], 

-

486 ) -> Iterable[Diagnostic]: 

-

487 for check in checks: 

-

488 yield from check( 

-

489 known_field, 

-

490 kvpair, 

-

491 field_range_te, 

-

492 stanza, 

-

493 stanza_position, 

-

494 position_codec, 

-

495 lines, 

-

496 ) 

-

497 

-

498 return _validator 

-

499 

-

500 

-

501class FieldValueClass(Enum): 

-

502 SINGLE_VALUE = auto(), LIST_SPACE_SEPARATED_INTERPRETATION 

-

503 SPACE_SEPARATED_LIST = auto(), LIST_SPACE_SEPARATED_INTERPRETATION 

-

504 BUILD_PROFILES_LIST = auto(), None # TODO 

-

505 COMMA_SEPARATED_LIST = auto(), LIST_COMMA_SEPARATED_INTERPRETATION 

-

506 COMMA_SEPARATED_EMAIL_LIST = auto(), LIST_UPLOADERS_INTERPRETATION 

-

507 COMMA_OR_SPACE_SEPARATED_LIST = auto(), LIST_COMMA_OR_SPACE_SEPARATED_INTERPRETATION 

-

508 FREE_TEXT_FIELD = auto(), None 

-

509 DEP5_FILE_LIST = auto(), None # TODO 

-

510 

-

511 def interpreter(self) -> Optional[Interpretation[Any]]: 

-

512 return self.value[1] 

-

513 

-

514 

-

515@dataclasses.dataclass(slots=True, frozen=True) 

-

516class Deb822KnownField: 

-

517 name: str 

-

518 field_value_class: FieldValueClass 

-

519 warn_if_default: bool = True 

-

520 replaced_by: Optional[str] = None 

-

521 deprecated_with_no_replacement: bool = False 

-

522 missing_field_severity: Optional[DiagnosticSeverity] = None 

-

523 default_value: Optional[str] = None 

-

524 known_values: Optional[Mapping[str, Keyword]] = None 

-

525 unknown_value_diagnostic_severity: Optional[DiagnosticSeverity] = ( 

-

526 DiagnosticSeverity.Error 

-

527 ) 

-

528 hover_text: Optional[str] = None 

-

529 spellcheck_value: bool = False 

-

530 is_stanza_name: bool = False 

-

531 is_single_value_field: bool = True 

-

532 custom_field_check: Optional[CustomFieldCheck] = None 

-

533 

-

534 def field_diagnostics( 

-

535 self, 

-

536 kvpair: Deb822KeyValuePairElement, 

-

537 stanza: Deb822ParagraphElement, 

-

538 stanza_position: "TEPosition", 

-

539 position_codec: "LintCapablePositionCodec", 

-

540 lines: List[str], 

-

541 *, 

-

542 field_name_typo_reported: bool = False, 

-

543 ) -> Iterable[Diagnostic]: 

-

544 field_name_token = kvpair.field_token 

-

545 field_range_te = kvpair.range_in_parent().relative_to(stanza_position) 

-

546 field_position_te = field_range_te.start_pos 

-

547 yield from self._diagnostics_for_field_name( 

-

548 field_name_token, 

-

549 field_position_te, 

-

550 field_name_typo_reported, 

-

551 position_codec, 

-

552 lines, 

-

553 ) 

-

554 if self.custom_field_check is not None: 

-

555 yield from self.custom_field_check( 

-

556 self, 

-

557 kvpair, 

-

558 field_range_te, 

-

559 stanza, 

-

560 stanza_position, 

-

561 position_codec, 

-

562 lines, 

-

563 ) 

-

564 if not self.spellcheck_value: 

-

565 yield from self._known_value_diagnostics( 

-

566 kvpair, field_position_te, position_codec, lines 

-

567 ) 

-

568 

-

569 def _diagnostics_for_field_name( 

-

570 self, 

-

571 token: Deb822FieldNameToken, 

-

572 token_position: "TEPosition", 

-

573 typo_detected: bool, 

-

574 position_codec: "LintCapablePositionCodec", 

-

575 lines: List[str], 

-

576 ) -> Iterable[Diagnostic]: 

-

577 field_name = token.text 

-

578 # Defeat the case-insensitivity from python-debian 

-

579 field_name_cased = str(field_name) 

-

580 token_range_server_units = te_range_to_lsp( 

-

581 TERange.from_position_and_size(token_position, token.size()) 

-

582 ) 

-

583 token_range = position_codec.range_to_client_units( 

-

584 lines, 

-

585 token_range_server_units, 

-

586 ) 

-

587 if self.deprecated_with_no_replacement: 587 ↛ 588line 587 didn't jump to line 588, because the condition on line 587 was never true

-

588 yield Diagnostic( 

-

589 token_range, 

-

590 f"{field_name_cased} is deprecated and no longer used", 

-

591 severity=DiagnosticSeverity.Warning, 

-

592 source="debputy", 

-

593 tags=[DiagnosticTag.Deprecated], 

-

594 data=propose_remove_line_quick_fix(), 

-

595 ) 

-

596 elif self.replaced_by is not None: 596 ↛ 597line 596 didn't jump to line 597, because the condition on line 596 was never true

-

597 yield Diagnostic( 

-

598 token_range, 

-

599 f"{field_name_cased} is a deprecated name for {self.replaced_by}", 

-

600 severity=DiagnosticSeverity.Warning, 

-

601 source="debputy", 

-

602 tags=[DiagnosticTag.Deprecated], 

-

603 data=propose_correct_text_quick_fix(self.replaced_by), 

-

604 ) 

-

605 

-

606 if not typo_detected and field_name_cased != self.name: 606 ↛ 607line 606 didn't jump to line 607, because the condition on line 606 was never true

-

607 yield Diagnostic( 

-

608 token_range, 

-

609 f"Non-canonical spelling of {self.name}", 

-

610 severity=DiagnosticSeverity.Information, 

-

611 source="debputy", 

-

612 data=propose_correct_text_quick_fix(self.name), 

-

613 ) 

-

614 

-

615 def _known_value_diagnostics( 

-

616 self, 

-

617 kvpair: Deb822KeyValuePairElement, 

-

618 field_position_te: "TEPosition", 

-

619 position_codec: "LintCapablePositionCodec", 

-

620 lines: List[str], 

-

621 ) -> Iterable[Diagnostic]: 

-

622 unknown_value_severity = self.unknown_value_diagnostic_severity 

-

623 allowed_values = self.known_values 

-

624 interpreter = self.field_value_class.interpreter() 

-

625 if not allowed_values or interpreter is None: 

-

626 return 

-

627 hint_text = None 

-

628 values = kvpair.interpret_as(interpreter) 

-

629 value_off = kvpair.value_element.position_in_parent().relative_to( 

-

630 field_position_te 

-

631 ) 

-

632 first_value = True 

-

633 for value_ref in values.iter_value_references(): 

-

634 value = value_ref.value 

-

635 if ( 635 ↛ 639line 635 didn't jump to line 639

-

636 not first_value 

-

637 and self.field_value_class == FieldValueClass.SINGLE_VALUE 

-

638 ): 

-

639 value_loc = value_ref.locatable 

-

640 value_position_te = value_loc.position_in_parent().relative_to( 

-

641 value_off 

-

642 ) 

-

643 value_range_in_server_units = te_range_to_lsp( 

-

644 TERange.from_position_and_size(value_position_te, value_loc.size()) 

-

645 ) 

-

646 value_range = position_codec.range_to_client_units( 

-

647 lines, 

-

648 value_range_in_server_units, 

-

649 ) 

-

650 yield Diagnostic( 

-

651 value_range, 

-

652 f"The field {self.name} can only have exactly one value.", 

-

653 severity=DiagnosticSeverity.Error, 

-

654 source="debputy", 

-

655 ) 

-

656 # TODO: Add quickfix if the value is also invalid 

-

657 continue 

-

658 first_value = False 

-

659 

-

660 known_value = self.known_values.get(value) 

-

661 if known_value is None: 

-

662 candidates = detect_possible_typo( 

-

663 value, 

-

664 self.known_values, 

-

665 ) 

-

666 if hint_text is None: 666 ↛ 672line 666 didn't jump to line 672, because the condition on line 666 was never false

-

667 if len(self.known_values) < 5: 667 ↛ 668line 667 didn't jump to line 668, because the condition on line 667 was never true

-

668 values = ", ".join(sorted(self.known_values)) 

-

669 hint_text = f" Known values for this field: {values}" 

-

670 else: 

-

671 hint_text = "" 

-

672 fix_data = None 

-

673 severity = unknown_value_severity 

-

674 fix_text = hint_text 

-

675 if candidates: 675 ↛ 676line 675 didn't jump to line 676, because the condition on line 675 was never true

-

676 match = candidates[0] 

-

677 fix_text = f' It is possible that the value is a typo of "{match}".{fix_text}' 

-

678 fix_data = [propose_correct_text_quick_fix(m) for m in candidates] 

-

679 elif severity is None: 679 ↛ 680line 679 didn't jump to line 680, because the condition on line 679 was never true

-

680 continue 

-

681 if severity is None: 681 ↛ 682line 681 didn't jump to line 682, because the condition on line 681 was never true

-

682 severity = DiagnosticSeverity.Warning 

-

683 message = fix_text 

-

684 else: 

-

685 message = f'The value "{value}" is not supported in {self.name}.{fix_text}' 

-

686 elif known_value.is_obsolete: 686 ↛ 687line 686 didn't jump to line 687, because the condition on line 686 was never true

-

687 replacement = known_value.replaced_by 

-

688 if replacement is not None: 

-

689 message = f'The value "{value}" has been replaced by {replacement}' 

-

690 severity = DiagnosticSeverity.Warning 

-

691 fix_data = [propose_correct_text_quick_fix(replacement)] 

-

692 else: 

-

693 message = ( 

-

694 f'The value "{value}" is obsolete without a single replacement' 

-

695 ) 

-

696 severity = DiagnosticSeverity.Warning 

-

697 fix_data = None 

-

698 else: 

-

699 # All good 

-

700 continue 

-

701 

-

702 value_loc = value_ref.locatable 

-

703 value_position_te = value_loc.position_in_parent().relative_to(value_off) 

-

704 value_range_in_server_units = te_range_to_lsp( 

-

705 TERange.from_position_and_size(value_position_te, value_loc.size()) 

-

706 ) 

-

707 value_range = position_codec.range_to_client_units( 

-

708 lines, 

-

709 value_range_in_server_units, 

-

710 ) 

-

711 yield Diagnostic( 

-

712 value_range, 

-

713 message, 

-

714 severity=severity, 

-

715 source="debputy", 

-

716 data=fix_data, 

-

717 ) 

-

718 

-

719 

-

720@dataclasses.dataclass(slots=True, frozen=True) 

-

721class DctrlKnownField(Deb822KnownField): 

-

722 inherits_from_source: bool = False 

-

723 

-

724 

-

725SOURCE_FIELDS = _fields( 

-

726 DctrlKnownField( 

-

727 "Source", 

-

728 FieldValueClass.SINGLE_VALUE, 

-

729 custom_field_check=_each_value_match_regex_validation(PKGNAME_REGEX), 

-

730 missing_field_severity=DiagnosticSeverity.Error, 

-

731 is_stanza_name=True, 

-

732 hover_text=textwrap.dedent( 

-

733 """\ 

-

734 Declares the name of the source package. 

-

735 

-

736 Note this must match the name in the first entry of `debian/changelog` file. 

-

737 """ 

-

738 ), 

-

739 ), 

-

740 DctrlKnownField( 

-

741 "Standards-Version", 

-

742 FieldValueClass.SINGLE_VALUE, 

-

743 missing_field_severity=DiagnosticSeverity.Error, 

-

744 hover_text=textwrap.dedent( 

-

745 """\ 

-

746 Declares the last semantic version of the Debian Policy this package as last checked against. 

-

747 

-

748 **Example**: 

-

749 ``` 

-

750 Standards-Version: 4.5.2 

-

751 ``` 

-

752 

-

753 Note that the last version part of the full Policy version (the **.X** in 4.5.2**.X**) is 

-

754 typically omitted as it is used solely for editorial changes to the policy (e.g. typo fixes). 

-

755 """ 

-

756 ), 

-

757 ), 

-

758 DctrlKnownField( 

-

759 "Section", 

-

760 FieldValueClass.SINGLE_VALUE, 

-

761 known_values=ALL_SECTIONS, 

-

762 unknown_value_diagnostic_severity=DiagnosticSeverity.Warning, 

-

763 hover_text=textwrap.dedent( 

-

764 """\ 

-

765 Define the default section for packages in this source package. 

-

766 

-

767 **Example**: 

-

768 ``` 

-

769 Section: devel 

-

770 ``` 

-

771 

-

772 Please see <https://packages.debian.org/unstable> for more details about the sections. 

-

773 """ 

-

774 ), 

-

775 ), 

-

776 DctrlKnownField( 

-

777 "Priority", 

-

778 FieldValueClass.SINGLE_VALUE, 

-

779 default_value="optional", 

-

780 warn_if_default=False, 

-

781 known_values=ALL_PRIORITIES, 

-

782 hover_text=textwrap.dedent( 

-

783 """\ 

-

784 Define the default priority for packages in this source package. 

-

785 

-

786 The priority field describes how important the package is for the functionality of the system. 

-

787 

-

788 **Example**: 

-

789 ``` 

-

790 Priority: optional 

-

791 ``` 

-

792 

-

793 Unless you know you need a different value, you should choose **optional** for your packages. 

-

794 """ 

-

795 ), 

-

796 ), 

-

797 DctrlKnownField( 

-

798 "Maintainer", 

-

799 FieldValueClass.SINGLE_VALUE, 

-

800 missing_field_severity=DiagnosticSeverity.Error, 

-

801 hover_text=textwrap.dedent( 

-

802 """\ 

-

803 The maintainer of the package. 

-

804 

-

805 **Example**: 

-

806 ``` 

-

807 Maintainer: Jane Contributor <jane@janes.email-provider.org> 

-

808 ``` 

-

809 

-

810 Note: If a person is listed in the Maintainer field, they should *not* be listed in Uploaders field. 

-

811 """ 

-

812 ), 

-

813 ), 

-

814 DctrlKnownField( 

-

815 "Uploaders", 

-

816 FieldValueClass.COMMA_SEPARATED_EMAIL_LIST, 

-

817 hover_text=textwrap.dedent( 

-

818 """\ 

-

819 Comma separated list of uploaders associated with the package. 

-

820 

-

821 **Example**: 

-

822 ``` 

-

823 Uploaders: 

-

824 John Doe <john@doe.org>, 

-

825 Lisbeth Worker <lis@worker.org>, 

-

826 ``` 

-

827 

-

828 Formally uploaders are considered co-maintainers for the package with the party listed in the 

-

829 **Maintainer** field being the primary maintainer. In practice, each maintainer or maintenance 

-

830 team can have their own ruleset about the difference between the **Maintainer** and the 

-

831 **Uploaders**. As an example, the Python packaging team has a different rule set for how to 

-

832 react to a package depending on whether the packaging team is the **Maintainer** or in the 

-

833 **Uploaders** field. 

-

834 

-

835 Note: If a person is listed in the Maintainer field, they should *not* be listed in Uploaders field. 

-

836 """ 

-

837 ), 

-

838 ), 

-

839 DctrlKnownField( 

-

840 "Vcs-Browser", 

-

841 FieldValueClass.SINGLE_VALUE, 

-

842 hover_text=textwrap.dedent( 

-

843 """\ 

-

844 URL to the Version control system repo used for the packaging. The URL should be usable with a 

-

845 browser *without* requiring any login. 

-

846 

-

847 This should be used together with one of the other **Vcs-** fields. 

-

848 """ 

-

849 ), 

-

850 ), 

-

851 DctrlKnownField( 

-

852 "Vcs-Git", 

-

853 FieldValueClass.SPACE_SEPARATED_LIST, 

-

854 hover_text=textwrap.dedent( 

-

855 """\ 

-

856 URL to the git repo used for the packaging. The URL should be usable with `git clone` 

-

857 *without* requiring any login. 

-

858 

-

859 This should be used together with the **Vcs-Browser** field provided there is a web UI for the repo. 

-

860 

-

861 Note it is possible to specify a branch via the `-b` option. 

-

862 

-

863 ``` 

-

864 Vcs-Git: https://salsa.debian.org/some/packaging-repo -b debian/unstable 

-

865 ``` 

-

866 """ 

-

867 ), 

-

868 ), 

-

869 DctrlKnownField( 

-

870 "Vcs-Svn", 

-

871 FieldValueClass.SPACE_SEPARATED_LIST, # TODO: Might be a single value 

-

872 hover_text=textwrap.dedent( 

-

873 """\ 

-

874 URL to the git repo used for the packaging. The URL should be usable with `svn checkout` 

-

875 *without* requiring any login. 

-

876 

-

877 This should be used together with the **Vcs-Browser** field provided there is a web UI for the repo. 

-

878 ``` 

-

879 """ 

-

880 ), 

-

881 ), 

-

882 DctrlKnownField( 

-

883 "Vcs-Arch", 

-

884 FieldValueClass.SPACE_SEPARATED_LIST, # TODO: Might be a single value 

-

885 hover_text=textwrap.dedent( 

-

886 """\ 

-

887 URL to the git repo used for the packaging. The URL should be usable for getting a copy of the 

-

888 sources *without* requiring any login. 

-

889 

-

890 This should be used together with the **Vcs-Browser** field provided there is a web UI for the repo. 

-

891 """ 

-

892 ), 

-

893 ), 

-

894 DctrlKnownField( 

-

895 "Vcs-Cvs", 

-

896 FieldValueClass.SPACE_SEPARATED_LIST, # TODO: Might be a single value 

-

897 hover_text=textwrap.dedent( 

-

898 """\ 

-

899 URL to the git repo used for the packaging. The URL should be usable for getting a copy of the 

-

900 sources *without* requiring any login. 

-

901 

-

902 This should be used together with the **Vcs-Browser** field provided there is a web UI for the repo. 

-

903 """ 

-

904 ), 

-

905 ), 

-

906 DctrlKnownField( 

-

907 "Vcs-Darcs", 

-

908 FieldValueClass.SPACE_SEPARATED_LIST, # TODO: Might be a single value 

-

909 hover_text=textwrap.dedent( 

-

910 """\ 

-

911 URL to the git repo used for the packaging. The URL should be usable for getting a copy of the 

-

912 sources *without* requiring any login. 

-

913 

-

914 This should be used together with the **Vcs-Browser** field provided there is a web UI for the repo. 

-

915 """ 

-

916 ), 

-

917 ), 

-

918 DctrlKnownField( 

-

919 "Vcs-Hg", 

-

920 FieldValueClass.SPACE_SEPARATED_LIST, # TODO: Might be a single value 

-

921 hover_text=textwrap.dedent( 

-

922 """\ 

-

923 URL to the git repo used for the packaging. The URL should be usable for getting a copy of the 

-

924 sources *without* requiring any login. 

-

925 

-

926 This should be used together with the **Vcs-Browser** field provided there is a web UI for the repo. 

-

927 """ 

-

928 ), 

-

929 ), 

-

930 DctrlKnownField( 

-

931 "Vcs-Mtn", 

-

932 FieldValueClass.SPACE_SEPARATED_LIST, # TODO: Might be a single value 

-

933 hover_text=textwrap.dedent( 

-

934 """\ 

-

935 URL to the git repo used for the packaging. The URL should be usable for getting a copy of the 

-

936 sources *without* requiring any login. 

-

937 

-

938 This should be used together with the **Vcs-Browser** field provided there is a web UI for the repo. 

-

939 """ 

-

940 ), 

-

941 ), 

-

942 DctrlKnownField( 

-

943 "DM-Upload-Allowed", 

-

944 FieldValueClass.SINGLE_VALUE, 

-

945 deprecated_with_no_replacement=True, 

-

946 default_value="no", 

-

947 known_values=_allowed_values("yes", "no"), 

-

948 hover_text=textwrap.dedent( 

-

949 """\ 

-

950 Obsolete field 

-

951 

-

952 It was used to enabling Debian Maintainers to upload the package without requiring a Debian Developer 

-

953 to sign the package. This mechanism has been replaced by a new authorization mechanism. 

-

954 

-

955 Please see <https://lists.debian.org/debian-devel-announce/2012/09/msg00008.html> for details about the 

-

956 replacement. 

-

957 ``` 

-

958 """ 

-

959 ), 

-

960 ), 

-

961 DctrlKnownField( 

-

962 "Build-Depends", 

-

963 FieldValueClass.COMMA_SEPARATED_LIST, 

-

964 hover_text=textwrap.dedent( 

-

965 """\ 

-

966 All minimum build-dependencies for this source package. Needed for any target including **clean**. 

-

967 """ 

-

968 ), 

-

969 ), 

-

970 DctrlKnownField( 

-

971 "Build-Depends-Arch", 

-

972 FieldValueClass.COMMA_SEPARATED_LIST, 

-

973 hover_text=textwrap.dedent( 

-

974 """\ 

-

975 Build-dependencies required for building the architecture dependent binary packages of this source 

-

976 package. 

-

977 

-

978 These build-dependencies must be satisfied when executing the **build-arch** and **binary-arch** 

-

979 targets either directly or indirectly in addition to those listed in **Build-Depends**. 

-

980 

-

981 Note that these dependencies are *not* available during **clean**. 

-

982 """ 

-

983 ), 

-

984 ), 

-

985 DctrlKnownField( 

-

986 "Build-Depends-Indep", 

-

987 FieldValueClass.COMMA_SEPARATED_LIST, 

-

988 hover_text=textwrap.dedent( 

-

989 """\ 

-

990 Build-dependencies required for building the architecture independent binary packages of this source 

-

991 package. 

-

992 

-

993 These build-dependencies must be satisfied when executing the **build-indep** and **binary-indep** 

-

994 targets either directly or indirectly in addition to those listed in **Build-Depends**. 

-

995 

-

996 Note that these dependencies are *not* available during **clean**. 

-

997 """ 

-

998 ), 

-

999 ), 

-

1000 DctrlKnownField( 

-

1001 "Build-Conflicts", 

-

1002 FieldValueClass.COMMA_SEPARATED_LIST, 

-

1003 hover_text=textwrap.dedent( 

-

1004 """\ 

-

1005 Packages that must **not** be installed during **any** part of the build, including the **clean** 

-

1006 target **clean**. 

-

1007 

-

1008 Where possible, it is often better to configure the build so that it does not react to the package 

-

1009 being present in the first place. Usually this is a question of using a `--without-foo` or 

-

1010 `--disable-foo` or such to the build configuration. 

-

1011 """ 

-

1012 ), 

-

1013 ), 

-

1014 DctrlKnownField( 

-

1015 "Build-Conflicts-Arch", 

-

1016 FieldValueClass.COMMA_SEPARATED_LIST, 

-

1017 hover_text=textwrap.dedent( 

-

1018 """\ 

-

1019 Packages that must **not** be installed during the **build-arch** or **binary-arch** targets. 

-

1020 This also applies when these targets are run implicitly such as via the **binary** target. 

-

1021 

-

1022 Where possible, it is often better to configure the build so that it does not react to the package 

-

1023 being present in the first place. Usually this is a question of using a `--without-foo` or 

-

1024 `--disable-foo` or such to the build configuration. 

-

1025 """ 

-

1026 ), 

-

1027 ), 

-

1028 DctrlKnownField( 

-

1029 "Build-Conflicts-Indep", 

-

1030 FieldValueClass.COMMA_SEPARATED_LIST, 

-

1031 hover_text=textwrap.dedent( 

-

1032 """\ 

-

1033 Packages that must **not** be installed during the **build-indep** or **binary-indep** targets. 

-

1034 This also applies when these targets are run implicitly such as via the **binary** target. 

-

1035 

-

1036 Where possible, it is often better to configure the build so that it does not react to the package 

-

1037 being present in the first place. Usually this is a question of using a `--without-foo` or 

-

1038 `--disable-foo` or such to the build configuration. 

-

1039 """ 

-

1040 ), 

-

1041 ), 

-

1042 DctrlKnownField( 

-

1043 "Testsuite", 

-

1044 FieldValueClass.SPACE_SEPARATED_LIST, 

-

1045 hover_text=textwrap.dedent( 

-

1046 """\ 

-

1047 Declares that this package provides or should run install time tests via `autopkgtest`. 

-

1048 

-

1049 This field can be used to request an automatically generated autopkgtests via the **autodep8** package. 

-

1050 Please refer to the documentation of the **autodep8** package for which values you can put into 

-

1051 this field and what kind of testsuite the keywords will provide. 

-

1052 

-

1053 Declaring this field in `debian/control` is only necessary when you want additional tests beyond 

-

1054 those in `debian/tests/control` as **dpkg** automatically records the package provided ones from 

-

1055 `debian/tests/control`. 

-

1056 """ 

-

1057 ), 

-

1058 ), 

-

1059 DctrlKnownField( 

-

1060 "Homepage", 

-

1061 FieldValueClass.SINGLE_VALUE, 

-

1062 hover_text=textwrap.dedent( 

-

1063 """\ 

-

1064 Link to the upstream homepage for this source package. 

-

1065 

-

1066 **Example**: 

-

1067 ``` 

-

1068 Homepage: https://www.janes-tools.org/frob-cleaner 

-

1069 ``` 

-

1070 """ 

-

1071 ), 

-

1072 ), 

-

1073 DctrlKnownField( 

-

1074 "Rules-Requires-Root", 

-

1075 FieldValueClass.SPACE_SEPARATED_LIST, 

-

1076 unknown_value_diagnostic_severity=None, 

-

1077 known_values=_allowed_values( 

-

1078 Keyword( 

-

1079 "no", 

-

1080 hover_text=textwrap.dedent( 

-

1081 """\ 

-

1082 The build process will not require root or fakeroot during any step. This enables 

-

1083 dpkg-buildpackage, debhelper or/and `debputy` to perform several optimizations during the build. 

-

1084 

-

1085 This is the default with dpkg-build-api at version 1 or later. 

-

1086 """ 

-

1087 ), 

-

1088 ), 

-

1089 Keyword( 

-

1090 "binary-targets", 

-

1091 hover_text=textwrap.dedent( 

-

1092 """\ 

-

1093 The build process assumes that dpkg-buildpackage will run the relevant binary 

-

1094 target with root or fakeroot. This was the historical default behaviour. 

-

1095 

-

1096 This is the default with dpkg-build-api at version 0. 

-

1097 """ 

-

1098 ), 

-

1099 ), 

-

1100 ), 

-

1101 hover_text=textwrap.dedent( 

-

1102 """\ 

-

1103 Declare if and when the package build assumes it is run as root or fakeroot. 

-

1104 

-

1105 Most packages do not need to run as root or fakeroot and the legacy behaviour comes with a 

-

1106 performance cost. This field can be used to explicitly declare that the legacy behaviour is 

-

1107 unnecessary. 

-

1108 

-

1109 **Example**: 

-

1110 ``` 

-

1111 Rules-Requires-Root: no 

-

1112 ``` 

-

1113 

-

1114 Setting this field to `no` *can* cause the package to stop building if it requires root. 

-

1115 Depending on the situation, it might require some trivial or some complicated changes to fix that. 

-

1116 If it breaks and you cannot figure out how to fix it, then reset the field to `binary-targets` 

-

1117 and move on until you have time to fix it. 

-

1118 

-

1119 The default value for this field depends on the `dpkg-build-api` version. If the package 

-

1120 ` Build-Depends` on `dpkg-build-api (>= 1)` or later, the default is `no`. Otherwise, 

-

1121 the default is `binary-target` 

-

1122 

-

1123 Note it is **not** possible to require running the package as "true root". 

-

1124 """ 

-

1125 ), 

-

1126 ), 

-

1127 DctrlKnownField( 

-

1128 "Bugs", 

-

1129 FieldValueClass.SINGLE_VALUE, 

-

1130 hover_text=textwrap.dedent( 

-

1131 """\ 

-

1132 Provide a custom bug tracker URL 

-

1133 

-

1134 This field is *not* used by packages uploaded to Debian or most derivatives as the distro tooling 

-

1135 has a default bugtracker built-in. It is primarily useful for third-party provided packages such 

-

1136 that bug reporting tooling can redirect the user to their bug tracker. 

-

1137 """ 

-

1138 ), 

-

1139 ), 

-

1140 DctrlKnownField( 

-

1141 "Origin", 

-

1142 FieldValueClass.SINGLE_VALUE, 

-

1143 hover_text=textwrap.dedent( 

-

1144 """\ 

-

1145 Declare the origin of the package. 

-

1146 

-

1147 This field is *not* used by packages uploaded to Debian or most derivatives as the origin would 

-

1148 be the distribution. It is primarily useful for third-party provided packages as some tools will 

-

1149 detect this field. 

-

1150 """ 

-

1151 ), 

-

1152 ), 

-

1153 DctrlKnownField( 

-

1154 "X-Python-Version", 

-

1155 FieldValueClass.COMMA_SEPARATED_LIST, 

-

1156 replaced_by="X-Python3-Version", 

-

1157 hover_text=textwrap.dedent( 

-

1158 """\ 

-

1159 Obsolete field for declaring the supported Python2 versions 

-

1160 

-

1161 Since Python2 is no longer supported, this field is now redundant. For Python3, the field is 

-

1162 called **X-Python3-Version**. 

-

1163 """ 

-

1164 ), 

-

1165 ), 

-

1166 DctrlKnownField( 

-

1167 "X-Python3-Version", 

-

1168 FieldValueClass.COMMA_SEPARATED_LIST, 

-

1169 hover_text=textwrap.dedent( 

-

1170 # Too lazy to provide a better description 

-

1171 """\ 

-

1172 For declaring the supported Python3 versions 

-

1173 

-

1174 This is used by the tools from `dh-python` package. Please see the documentation of that package 

-

1175 for when and how to use it. 

-

1176 """ 

-

1177 ), 

-

1178 ), 

-

1179 DctrlKnownField( 

-

1180 "XS-Autobuild", 

-

1181 FieldValueClass.SINGLE_VALUE, 

-

1182 known_values=_allowed_values("yes"), 

-

1183 hover_text=textwrap.dedent( 

-

1184 """\ 

-

1185 Used for non-free packages to denote that they may be auto-build on the Debian build infrastructure 

-

1186 

-

1187 Note that adding this field **must** be combined with following the instructions at 

-

1188 <https://www.debian.org/doc/manuals/developers-reference/pkgs.html#non-free-buildd> 

-

1189 """ 

-

1190 ), 

-

1191 ), 

-

1192 DctrlKnownField( 

-

1193 "Description", 

-

1194 FieldValueClass.FREE_TEXT_FIELD, 

-

1195 spellcheck_value=True, 

-

1196 hover_text=textwrap.dedent( 

-

1197 """\ 

-

1198 This field contains a human-readable description of the package. However, it is not used directly. 

-

1199 

-

1200 Binary packages can reference parts of it via the `${source:Synopsis}` and the 

-

1201 `${source:Extended-Description}` substvars. Without any of these substvars, the `Description` field 

-

1202 of the `Source` stanza remains unused. 

-

1203 

-

1204 The first line immediately after the field is called the *Synopsis* and is a short "noun-phrase" 

-

1205 intended to provide a one-line summary of a package. The lines after the **Synopsis** is known 

-

1206 as the **Extended Description** and is intended as a longer summary of a package. 

-

1207 

-

1208 **Example**: 

-

1209 ``` 

-

1210 Description: documentation generator for Python projects 

-

1211 Sphinx is a tool for producing documentation for Python projects, using 

-

1212 reStructuredText as markup language. 

-

1213 . 

-

1214 Sphinx features: 

-

1215 * HTML, CHM, LaTeX output, 

-

1216 * Cross-referencing source code, 

-

1217 * Automatic indices, 

-

1218 * Code highlighting, using Pygments, 

-

1219 * Extensibility. Existing extensions: 

-

1220 - automatic testing of code snippets, 

-

1221 - including docstrings from Python modules. 

-

1222 . 

-

1223 Build-depend on sphinx if your package uses /usr/bin/sphinx-* 

-

1224 executables. Build-depend on python3-sphinx if your package uses 

-

1225 the Python API (for instance by calling python3 -m sphinx). 

-

1226 ``` 

-

1227 

-

1228 The **Synopsis** is usually displayed in cases where there is limited space such as when reviewing 

-

1229 the search results from `apt search foo`. It is often a good idea to imagine that the **Synopsis** 

-

1230 part is inserted into a sentence like "The package provides {{Synopsis-goes-here}}". The 

-

1231 **Extended Description** is a standalone description that should describe what the package does and 

-

1232 how it relates to the rest of the system (in terms of, for example, which subsystem it is which part of). 

-

1233 Please see <https://www.debian.org/doc/debian-policy/ch-controlfields.html#description> for more details 

-

1234 about the description field and suggestions for how to write it. 

-

1235 """ 

-

1236 ), 

-

1237 ), 

-

1238) 

-

1239 

-

1240 

-

1241BINARY_FIELDS = _fields( 

-

1242 DctrlKnownField( 

-

1243 "Package", 

-

1244 FieldValueClass.SINGLE_VALUE, 

-

1245 custom_field_check=_each_value_match_regex_validation(PKGNAME_REGEX), 

-

1246 is_stanza_name=True, 

-

1247 missing_field_severity=DiagnosticSeverity.Error, 

-

1248 hover_text="Declares the name of a binary package", 

-

1249 ), 

-

1250 DctrlKnownField( 

-

1251 "Package-Type", 

-

1252 FieldValueClass.SINGLE_VALUE, 

-

1253 default_value="deb", 

-

1254 known_values=_allowed_values( 

-

1255 Keyword("deb", hover_text="The package will be built as a regular deb."), 

-

1256 Keyword( 

-

1257 "udeb", 

-

1258 hover_text="The package will be built as a micro-deb (also known as a udeb). These are solely used by the debian-installer.", 

-

1259 ), 

-

1260 ), 

-

1261 hover_text=textwrap.dedent( 

-

1262 """\ 

-

1263 **Special-purpose only**. *This field is a special purpose field and is rarely needed.* 

-

1264 *You are recommended to omit unless you know you need it or someone told you to use it.* 

-

1265 

-

1266 Determines the type of package. This field can be used to declare that a given package is a different 

-

1267 type of package than usual. The primary case where this is known to be useful is for building 

-

1268 micro-debs ("udeb") to be consumed by the debian-installer. 

-

1269 """ 

-

1270 ), 

-

1271 ), 

-

1272 DctrlKnownField( 

-

1273 "Architecture", 

-

1274 FieldValueClass.SPACE_SEPARATED_LIST, 

-

1275 missing_field_severity=DiagnosticSeverity.Error, 

-

1276 unknown_value_diagnostic_severity=None, 

-

1277 known_values=_allowed_values(*dpkg_arch_and_wildcards()), 

-

1278 hover_text=textwrap.dedent( 

-

1279 """\ 

-

1280 Determines which architectures this package can be compiled for or if it is an architecture-independent 

-

1281 package. The value is a space-separated list of dpkg architecture names or wildcards. 

-

1282 

-

1283 **Example**: 

-

1284 ``` 

-

1285 Package: architecture-specific-package 

-

1286 Architecture: any 

-

1287 # ... 

-

1288 

-

1289 

-

1290 Package: data-only-package 

-

1291 Architecture: all 

-

1292 Multi-Arch: foreign 

-

1293 # ... 

-

1294 

-

1295 

-

1296 Package: linux-only-package 

-

1297 Architecture: linux-any 

-

1298 # ... 

-

1299 ``` 

-

1300 

-

1301 When in doubt, stick to the values **all** (for scripts, data or documentation, etc.) or **any** 

-

1302 (for anything that can be compiled). For official Debian packages, it is often easier to attempt the 

-

1303 compilation for unsupported architectures than to maintain the list of machine architectures that work. 

-

1304 """ 

-

1305 ), 

-

1306 ), 

-

1307 DctrlKnownField( 

-

1308 "Essential", 

-

1309 FieldValueClass.SINGLE_VALUE, 

-

1310 default_value="no", 

-

1311 known_values=_allowed_values( 

-

1312 Keyword( 

-

1313 "yes", 

-

1314 hover_text="The package is essential and uninstalling it will completely and utterly break the" 

-

1315 " system beyond repair.", 

-

1316 ), 

-

1317 Keyword( 

-

1318 "no", 

-

1319 hover_text=textwrap.dedent( 

-

1320 """\ 

-

1321 The package is a regular package. This is the default and recommended. 

-

1322 

-

1323 Note that declaring a package to be "Essential: no" is the same as not having the field except omitting 

-

1324 the field wastes fewer bytes on everyone's hard disk. 

-

1325 """ 

-

1326 ), 

-

1327 ), 

-

1328 ), 

-

1329 hover_text=textwrap.dedent( 

-

1330 """\ 

-

1331 **Special-purpose only**. *This field is a special purpose field and is rarely needed.* 

-

1332 *You are recommended to omit unless you know you need it or someone told you to use it.* 

-

1333 

-

1334 Whether the package should be considered Essential as defined by Debian Policy. 

-

1335 

-

1336 Essential packages are subject to several distinct but very important rules: 

-

1337 

-

1338 * Essential packages are considered essential for the system to work. The packaging system 

-

1339 (APT and dpkg) will refuse to uninstall it without some very insisting force options and warnings. 

-

1340 

-

1341 * Other packages are not required to declare explicit dependencies on essential packages as a 

-

1342 side-effect of the above except as to ensure a that the given essential package is upgraded 

-

1343 to a given minimum version. 

-

1344 

-

1345 * Once installed, essential packages function must at all time no matter where dpkg is in its 

-

1346 installation or upgrade process. During bootstrapping or installation, this requirement is 

-

1347 relaxed. 

-

1348 """ 

-

1349 ), 

-

1350 ), 

-

1351 DctrlKnownField( 

-

1352 "XB-Important", 

-

1353 FieldValueClass.SINGLE_VALUE, 

-

1354 replaced_by="Protected", 

-

1355 default_value="no", 

-

1356 known_values=_allowed_values( 

-

1357 Keyword( 

-

1358 "yes", 

-

1359 hover_text="The package is protected and attempts to uninstall it will cause strong warnings to the" 

-

1360 " user that they might be breaking the system.", 

-

1361 ), 

-

1362 Keyword( 

-

1363 "no", 

-

1364 hover_text=textwrap.dedent( 

-

1365 """\ 

-

1366 The package is a regular package. This is the default and recommended. 

-

1367 

-

1368 Note that declaring a package to be `XB-Important: no` is the same as not having the field 

-

1369 except omitting the field wastes fewer bytes on everyone's hard-disk. 

-

1370 """ 

-

1371 ), 

-

1372 ), 

-

1373 ), 

-

1374 ), 

-

1375 DctrlKnownField( 

-

1376 "Protected", 

-

1377 FieldValueClass.SINGLE_VALUE, 

-

1378 default_value="no", 

-

1379 known_values=_allowed_values( 

-

1380 Keyword( 

-

1381 "yes", 

-

1382 hover_text="The package is protected and attempts to uninstall it will cause strong warnings to the" 

-

1383 " user that they might be breaking the system.", 

-

1384 ), 

-

1385 Keyword( 

-

1386 "no", 

-

1387 hover_text=textwrap.dedent( 

-

1388 """\ 

-

1389 The package is a regular package. This is the default and recommended. 

-

1390 

-

1391 Note that declaring a package to be `Protected: no` is the same as not having the field 

-

1392 except omitting the field wastes fewer bytes on everyone's hard-disk. 

-

1393 """ 

-

1394 ), 

-

1395 ), 

-

1396 ), 

-

1397 ), 

-

1398 DctrlKnownField( 

-

1399 "Pre-Depends", 

-

1400 FieldValueClass.COMMA_SEPARATED_LIST, 

-

1401 hover_text=textwrap.dedent( 

-

1402 """\ 

-

1403 **Advanced field**. *This field covers an advanced topic. If you are new to packaging, you are* 

-

1404 *probably not looking for this field (except to set a **${misc:Pre-Depends}** relation. Incorrect use* 

-

1405 *of this field can cause issues - among other causing issues during upgrades that users cannot work* 

-

1406 *around without passing `--force-*` options to dpkg.* 

-

1407 

-

1408 This field is like *Depends*, except that is also forces dpkg to complete installation of the packages 

-

1409 named before even starting the installation of the package which declares the pre-dependency. 

-

1410 

-

1411 **Example**: 

-

1412 ``` 

-

1413 Pre-Depends: ${misc:Pre-Depends} 

-

1414 ``` 

-

1415 

-

1416 Note this is a very strong dependency and not all packages support being a pre-dependency because it 

-

1417 puts additional requirements on the package being depended on. Use of **${misc:Pre-Depends}** is 

-

1418 pre-approved and recommended. Essential packages are known to support being in **Pre-Depends**. 

-

1419 However, careless use of **Pre-Depends** for essential packages can still cause dependency resolvers 

-

1420 problems. 

-

1421 """ 

-

1422 ), 

-

1423 ), 

-

1424 DctrlKnownField( 

-

1425 "Depends", 

-

1426 FieldValueClass.COMMA_SEPARATED_LIST, 

-

1427 hover_text=textwrap.dedent( 

-

1428 """\ 

-

1429 Lists the packages that must be installed, before this package is installed. 

-

1430 

-

1431 **Example**: 

-

1432 ``` 

-

1433 Package: foo 

-

1434 Architecture: any 

-

1435 Depends: ${misc:Depends}, 

-

1436 ${shlibs:Depends}, 

-

1437 libfoo1 (= ${binary:Version}), 

-

1438 foo-data (= ${source:Version}), 

-

1439 ``` 

-

1440 

-

1441 This field declares an absolute dependency. Before installing the package, **dpkg** will require 

-

1442 all dependencies to be in state `configured` first. Though, if there is a circular dependency between 

-

1443 two or more packages, **dpkg** will break that circle at an arbitrary point where necessary based on 

-

1444 built-in heuristics. 

-

1445 

-

1446 This field should be used if the depended-on package is required for the depending package to provide a 

-

1447 *significant amount of functionality* or when it is used in the **postinst** or **prerm** maintainer 

-

1448 scripts. 

-

1449 """ 

-

1450 ), 

-

1451 ), 

-

1452 DctrlKnownField( 

-

1453 "Recommends", 

-

1454 FieldValueClass.COMMA_SEPARATED_LIST, 

-

1455 hover_text=textwrap.dedent( 

-

1456 """\ 

-

1457 Lists the packages that *should* be installed when this package is installed in all but 

-

1458 *unusual installations*. 

-

1459 

-

1460 **Example**: 

-

1461 ``` 

-

1462 Recommends: foo-optional 

-

1463 ``` 

-

1464 

-

1465 By default, APT will attempt to install recommends unless they cannot be installed or the user 

-

1466 has configured APT skip recommends. Notably, during automated package builds for the Debian 

-

1467 archive, **Recommends** are **not** installed. 

-

1468 

-

1469 As implied, the package must have some core functionality that works **without** the 

-

1470 **Recommends** being satisfied as they are not guaranteed to be there. If the package cannot 

-

1471 provide any functionality without a given package, that package should be in **Depends**. 

-

1472 """ 

-

1473 ), 

-

1474 ), 

-

1475 DctrlKnownField( 

-

1476 "Suggests", 

-

1477 FieldValueClass.COMMA_SEPARATED_LIST, 

-

1478 hover_text=textwrap.dedent( 

-

1479 """\ 

-

1480 Lists the packages that may make this package more useful but not installing them is perfectly 

-

1481 reasonable as well. Suggests can also be useful for add-ons that only make sense in particular 

-

1482 corner cases like supporting a non-standard file format. 

-

1483 

-

1484 **Example**: 

-

1485 ``` 

-

1486 Suggests: bar 

-

1487 ``` 

-

1488 """ 

-

1489 ), 

-

1490 ), 

-

1491 DctrlKnownField( 

-

1492 "Enhances", 

-

1493 FieldValueClass.COMMA_SEPARATED_LIST, 

-

1494 hover_text=textwrap.dedent( 

-

1495 """\ 

-

1496 This field is similar to Suggests but works in the opposite direction. It is used to declare that 

-

1497 this package can enhance the functionality of another package. 

-

1498 

-

1499 **Example**: 

-

1500 ``` 

-

1501 Package: foo 

-

1502 Provide: debputy-plugin-foo 

-

1503 Enhances: debputy 

-

1504 ``` 

-

1505 """ 

-

1506 ), 

-

1507 ), 

-

1508 DctrlKnownField( 

-

1509 "Provides", 

-

1510 FieldValueClass.COMMA_SEPARATED_LIST, 

-

1511 hover_text=textwrap.dedent( 

-

1512 """\ 

-

1513 Declare this package also provide one or more other packages. This means that this package can 

-

1514 substitute for the provided package in some relations. 

-

1515 

-

1516 **Example**: 

-

1517 ``` 

-

1518 Package: foo 

-

1519 ... 

-

1520 

-

1521 Package: foo-plus 

-

1522 Provides: foo (= ${source:Upstream-Version}) 

-

1523 ``` 

-

1524 

-

1525 If the provides relation is versioned, it must use a "strictly equals" version. If it does not 

-

1526 declare a version, then it *cannot* be used to satisfy a dependency with a version restriction. 

-

1527 Consider the following example: 

-

1528 

-

1529 **Archive scenario**: (This is *not* a `debian/control` file, despite the resemblance) 

-

1530 ``` 

-

1531 Package foo 

-

1532 Depends: bar (>= 1.0) 

-

1533 

-

1534 Package: bar 

-

1535 Version: 0.9 

-

1536 

-

1537 Package: bar-plus 

-

1538 Provides: bar (= 1.0) 

-

1539 

-

1540 Package: bar-clone 

-

1541 Provides: bar 

-

1542 ``` 

-

1543 

-

1544 In this archive scenario, the `bar-plus` package will satisfy the dependency of `foo` as the 

-

1545 only one. The `bar` package fails because the version is only *0.9* and `bar-clone` because 

-

1546 the provides is unversioned, but the dependency clause is versioned. 

-

1547 """ 

-

1548 ), 

-

1549 ), 

-

1550 DctrlKnownField( 

-

1551 "Conflicts", 

-

1552 FieldValueClass.COMMA_SEPARATED_LIST, 

-

1553 hover_text=textwrap.dedent( 

-

1554 """\ 

-

1555 **Warning**: *You may be looking for Breaks instead of Conflicts*. 

-

1556 

-

1557 This package cannot be installed together with the packages listed in the Conflicts field. This 

-

1558 is a *bigger hammer* than **Breaks** and is used sparingly. Notably, if you want to do a versioned 

-

1559 **Conflicts** then you *almost certainly* want **Breaks** instead. 

-

1560 

-

1561 **Example**: 

-

1562 ``` 

-

1563 Conflicts: bar 

-

1564 ``` 

-

1565 

-

1566 Please check the description of the **Breaks** field for when you would use **Breaks** vs. 

-

1567 **Conflicts**. 

-

1568 

-

1569 Note if a package conflicts with itself (indirectly or via **Provides**), then it is using a 

-

1570 special rule for **Conflicts**. See section 

-

1571 7.6.2 "[Replacing whole packages, forcing their removal]" in the Debian Policy Manual. 

-

1572 

-

1573 [Replacing whole packages, forcing their removal]: https://www.debian.org/doc/debian-policy/ch-relationships.html#replacing-whole-packages-forcing-their-removal 

-

1574 """ 

-

1575 ), 

-

1576 ), 

-

1577 DctrlKnownField( 

-

1578 "Breaks", 

-

1579 FieldValueClass.COMMA_SEPARATED_LIST, 

-

1580 hover_text=textwrap.dedent( 

-

1581 """\ 

-

1582 This package cannot be installed together with the packages listed in the `Breaks` field. 

-

1583 

-

1584 This is often use to declare versioned issues such as "This package does not work with foo if 

-

1585 it is version 1.0 or less". In comparison, `Conflicts` is generally used to declare that 

-

1586 "This package does not work at all as long as foo is installed". 

-

1587 

-

1588 **Example**: 

-

1589 ``` 

-

1590 Breaks: bar (<= 1.0~) 

-

1591 ```` 

-

1592 

-

1593 **Breaks vs. Conflicts**: 

-

1594 

-

1595 * I moved files from **foo** to **bar** in version X, what should I do? 

-

1596 

-

1597 Add `Breaks: foo (<< X~)` + `Replaces: foo (<< X~)` to **bar** 

-

1598 

-

1599 * Upgrading **bar** while **foo** is version X or less causes problems **foo** or **bar** to break. 

-

1600 How do I solve this? 

-

1601 

-

1602 Add `Breaks: foo (<< X~)` to **bar** 

-

1603 

-

1604 * The **foo** and **bar** packages provide the same functionality (interface) but different 

-

1605 implementations and there can be at most one of them. What should I do? 

-

1606 

-

1607 See section 7.6.2 [Replacing whole packages, forcing their removal] in the Debian Policy Manual. 

-

1608 

-

1609 * How to handle when **foo** and **bar** packages are unrelated but happen to provide the same binary? 

-

1610 

-

1611 Attempt to resolve the name conflict by renaming the clashing files in question on either (or both) sides. 

-

1612 

-

1613 Note the use of *~* in version numbers in the answers are generally used to ensure this works correctly in 

-

1614 case of a backports (in the Debian archive), where the package is rebuilt with the "~bpo" suffix in its 

-

1615 version. 

-

1616 

-

1617 [Replacing whole packages, forcing their removal]: https://www.debian.org/doc/debian-policy/ch-relationships.html#replacing-whole-packages-forcing-their-removal 

-

1618 """ 

-

1619 ), 

-

1620 ), 

-

1621 DctrlKnownField( 

-

1622 "Replaces", 

-

1623 FieldValueClass.COMMA_SEPARATED_LIST, 

-

1624 hover_text=textwrap.dedent( 

-

1625 """\ 

-

1626 This package either replaces another package or overwrites files that used to be provided by 

-

1627 another package. 

-

1628 

-

1629 **Attention**: The `Replaces` field is **always** used with either `Breaks` or `Conflicts` field. 

-

1630 

-

1631 **Example**: 

-

1632 ``` 

-

1633 Package: foo 

-

1634 ... 

-

1635 

-

1636 # The foo package was split to move data files into foo-data in version 1.2-3 

-

1637 Package: foo-data 

-

1638 Replaces: foo (<< 1.2-3~) 

-

1639 Breaks: foo (<< 1.2-3~) 

-

1640 ``` 

-

1641 

-

1642 Please check the description of the `Breaks` field for when you would use `Breaks` vs. `Conflicts`. 

-

1643 It also covers common uses of `Replaces`. 

-

1644 """ 

-

1645 ), 

-

1646 ), 

-

1647 DctrlKnownField( 

-

1648 "Build-Profiles", 

-

1649 FieldValueClass.BUILD_PROFILES_LIST, 

-

1650 hover_text=textwrap.dedent( 

-

1651 """\ 

-

1652 **Advanced field**. *This field covers an advanced topic. If you are new to packaging, you are* 

-

1653 *advised to leave it at its default until you have a working basic package or lots of time to understand* 

-

1654 *this topic.* 

-

1655 

-

1656 Declare that the package will only built when the given build-profiles are satisfied. 

-

1657 

-

1658 This field is primarily used in combination with build profiles inside the build dependency related fields 

-

1659 to reduce the number of build dependencies required during bootstrapping of a new architecture. 

-

1660 

-

1661 **Example**: 

-

1662 ``` 

-

1663 Package: foo 

-

1664 ... 

-

1665 

-

1666 Package: foo-udeb 

-

1667 Package-Type: udeb 

-

1668 # Skip building foo-udeb when the build profile "noudeb" is set (e.g., via dpkg-buildpackage -Pnoudeb) 

-

1669 Build-Profiles: <!noudeb> 

-

1670 ``` 

-

1671 

-

1672 Note that there is an official list of "common" build profiles with predefined purposes along with rules 

-

1673 for how and when the can be used. This list can be found at 

-

1674 <https://wiki.debian.org/BuildProfileSpec#Registered_profile_names>. 

-

1675 """ 

-

1676 ), 

-

1677 ), 

-

1678 DctrlKnownField( 

-

1679 "Section", 

-

1680 FieldValueClass.SINGLE_VALUE, 

-

1681 missing_field_severity=DiagnosticSeverity.Error, 

-

1682 inherits_from_source=True, 

-

1683 known_values=ALL_SECTIONS, 

-

1684 unknown_value_diagnostic_severity=DiagnosticSeverity.Warning, 

-

1685 hover_text=textwrap.dedent( 

-

1686 """\ 

-

1687 Define the section for this package. 

-

1688 

-

1689 **Example**: 

-

1690 ``` 

-

1691 Section: devel 

-

1692 ``` 

-

1693 

-

1694 Please see <https://packages.debian.org/unstable> for more details about the sections. 

-

1695 """ 

-

1696 ), 

-

1697 ), 

-

1698 DctrlKnownField( 

-

1699 "Priority", 

-

1700 FieldValueClass.SINGLE_VALUE, 

-

1701 default_value="optional", 

-

1702 warn_if_default=False, 

-

1703 missing_field_severity=DiagnosticSeverity.Error, 

-

1704 inherits_from_source=True, 

-

1705 known_values=ALL_PRIORITIES, 

-

1706 hover_text=textwrap.dedent( 

-

1707 """\ 

-

1708 Define the priority this package. 

-

1709 

-

1710 The priority field describes how important the package is for the functionality of the system. 

-

1711 

-

1712 **Example**: 

-

1713 ``` 

-

1714 Priority: optional 

-

1715 ``` 

-

1716 

-

1717 Unless you know you need a different value, you should choose **optional** for your packages. 

-

1718 """ 

-

1719 ), 

-

1720 ), 

-

1721 DctrlKnownField( 

-

1722 "Multi-Arch", 

-

1723 FieldValueClass.SINGLE_VALUE, 

-

1724 # Explicit "no" tends to be used as "someone reviewed this and concluded no", so we do 

-

1725 # not warn about it being explicitly "no". 

-

1726 warn_if_default=False, 

-

1727 default_value="no", 

-

1728 custom_field_check=_dctrl_ma_field_validation, 

-

1729 known_values=_allowed_values( 

-

1730 Keyword( 

-

1731 "no", 

-

1732 hover_text=textwrap.dedent( 

-

1733 """\ 

-

1734 The default. The package can be installed for at most one architecture at the time. It can 

-

1735 *only* satisfy relations for the same architecture as itself. Note that `Architecture: all` 

-

1736 packages are considered as a part of the system's "primary" architecture (see output of 

-

1737 `dpkg --print-architecture`). 

-

1738 

-

1739 Note: Despite the "no", the package *can* be installed for a foreign architecture (as an example, 

-

1740 you can install a 32-bit version of a package on a 64-bit system). However, packages depending 

-

1741 on it must also be installed for the foreign architecture. 

-

1742 """ 

-

1743 ), 

-

1744 ), 

-

1745 Keyword( 

-

1746 "foreign", 

-

1747 hover_text=textwrap.dedent( 

-

1748 """\ 

-

1749 The package can be installed for at most one architecture at the time. However, it can 

-

1750 satisfy relations for packages regardless of their architecture. This is often useful for packages 

-

1751 solely providing data or binaries that have "Multi-Arch neutral interfaces". 

-

1752 

-

1753 Sadly, describing a "Multi-Arch neutral interface" is hard and often only done by Multi-Arch 

-

1754 experts on a case-by-case basis. Some programs and scripts have "Multi-Arch dependent interfaces" 

-

1755 and are not safe to declare as `Multi-Arch: foreign`. 

-

1756 

-

1757 The name "foreign" refers to the fact that the package can satisfy relations for native 

-

1758 *and foreign* architectures at the same time. 

-

1759 """ 

-

1760 ), 

-

1761 ), 

-

1762 Keyword( 

-

1763 "same", 

-

1764 hover_text=textwrap.dedent( 

-

1765 """\ 

-

1766 The same version of the package can be co-installed for multiple architecture. However, 

-

1767 for this to work, the package *must* ship all files in architecture unique paths (usually 

-

1768 beneath `/usr/lib/<DEB_HOST_MULTIARCH>`) or have bit-for-bit identical content 

-

1769 in files that are in non-architecture unique paths (such as files beneath `/usr/share/doc`). 

-

1770 

-

1771 The name `same` refers to the fact that the package can satisfy relations only for the `same` 

-

1772 architecture as itself. However, in this case, it is co-installable with itself as noted above. 

-

1773 Note: This value **cannot** be used with `Architecture: all`. 

-

1774 """ 

-

1775 ), 

-

1776 ), 

-

1777 Keyword( 

-

1778 "allowed", 

-

1779 hover_text=textwrap.dedent( 

-

1780 """\ 

-

1781 **Advanced value**. The package is *not* co-installable with itself but can satisfy Multi-Arch 

-

1782 foreign and Multi-Arch same relations at the same. This is useful for implementations of 

-

1783 scripting languages (such as Perl or Python). Here the interpreter contextually need to 

-

1784 satisfy some relations as `Multi-Arch: foreign` and others as `Multi-Arch: same`. 

-

1785 

-

1786 Typically, native extensions or plugins will need a `Multi-Arch: same`-relation as they only 

-

1787 work with the interpreter compiled for the same machine architecture as themselves whereas 

-

1788 scripts are usually less picky and can rely on the `Multi-Arch: foreign` relation. Packages 

-

1789 wanting to rely on the "Multi-Arch: foreign" interface must explicitly declare this adding a 

-

1790 `:any` suffix to the package name in the dependency relation (e.g. `Depends: python3:any`). 

-

1791 However, the `:any"`suffix cannot be used unconditionally and should not be used unless you 

-

1792 know you need it. 

-

1793 """ 

-

1794 ), 

-

1795 ), 

-

1796 ), 

-

1797 hover_text=textwrap.dedent( 

-

1798 """\ 

-

1799 **Advanced field**. *This field covers an advanced topic. If you are new to packaging, you are* 

-

1800 *advised to leave it at its default until you have a working basic package or lots of time to understand* 

-

1801 *this topic.* 

-

1802 

-

1803 This field is used to declare the Multi-Arch interface of the package. 

-

1804 

-

1805 The `Multi-Arch` field is used to inform the installation system (APT and dpkg) about how it should handle 

-

1806 dependency relations involving this package and foreign architectures. This is useful for multiple purposes 

-

1807 such as cross-building without emulation and installing 32-bit packages on a 64-bit system. The latter is 

-

1808 often done to use legacy apps or old games that was never ported to 64-bit machines. 

-

1809 

-

1810 **Example**: 

-

1811 ``` 

-

1812 Multi-Arch: foreign 

-

1813 ``` 

-

1814 

-

1815 The rules for `Multi-Arch` can be quite complicated, but in many cases the following simple rules of thumb 

-

1816 gets you a long way: 

-

1817 

-

1818 * If the [Multi-Arch hinter] comes with a hint, then it almost certainly correct. You are recommended 

-

1819 to check the hint for further details (some changes can be complicated to do). Note that the 

-

1820 Multi-Arch hinter is only run for official Debian packages and may not be applicable to your case. 

-

1821 

-

1822 * If you have an `Architecture: all` data-only package, then it often want to be `Multi-Arch: foreign` 

-

1823 

-

1824 * If you have an architecture dependent package, where everything is installed in 

-

1825 `/usr/lib/${DEB_HOST_MULTIARCH}` (plus a bit of standard documentation in `/usr/share/doc`), then 

-

1826 you *probably* want `Multi-Arch: same` 

-

1827 

-

1828 * If none of the above applies, then omit the field unless you know what you are doing or you are 

-

1829 receiving advice from a Multi-Arch expert. 

-

1830 

-

1831 

-

1832 There are 4 possible values for the Multi-Arch field, though not all values are applicable to all packages: 

-

1833 

-

1834 

-

1835 * `no` - The default. The package can be installed for at most one architecture at the time. It can 

-

1836 *only* satisfy relations for the same architecture as itself. Note that `Architecture: all` packages 

-

1837 are considered as a part of the system's "primary" architecture (see output of `dpkg --print-architecture`). 

-

1838 

-

1839 Use of an explicit `no` over omitting the field is commonly done to signal that someone took the 

-

1840 effort to understand the situation and concluded `no` was the right answer. 

-

1841 

-

1842 Note: Despite the `no`, the package *can* be installed for a foreign architecture (e.g. you can 

-

1843 install a 32-bit version of a package on a 64-bit system). However, packages depending on it must also 

-

1844 be installed for the foreign architecture. 

-

1845 

-

1846 

-

1847 * `foreign` - The package can be installed for at most one architecture at the time. However, it can 

-

1848 satisfy relations for packages regardless of their architecture. This is often useful for packages 

-

1849 solely providing data or binaries that have "Multi-Arch neutral interfaces". Sadly, describing 

-

1850 a "Multi-Arch neutral interface" is hard and often only done by Multi-Arch experts on a case-by-case 

-

1851 basis. Among other, scripts despite being the same on all architectures can still have a "non-neutral" 

-

1852 "Multi-Arch" interface if their output is architecture dependent or if they dependencies force them 

-

1853 out of the `foreign` role. The dependency issue usually happens when depending indirectly on an 

-

1854 `Multi-Arch: allowed` package. 

-

1855 

-

1856 Some programs are have "Multi-Arch dependent interfaces" and are not safe to declare as 

-

1857 `Multi-Arch: foreign`. The name `foreign` refers to the fact that the package can satisfy relations 

-

1858 for native *and foreign* architectures at the same time. 

-

1859 

-

1860 

-

1861 * `same` - The same version of the package can be co-installed for multiple architecture. However, 

-

1862 for this to work, the package **must** ship all files in architecture unique paths (usually 

-

1863 beneath `/usr/lib/${DEB_HOST_MULTIARCH}`) **or** have bit-for-bit identical content in files 

-

1864 that are in non-architecture unique paths (e.g. `/usr/share/doc`). Note that these packages 

-

1865 typically do not contain configuration files or **dpkg** `conffile`s. 

-

1866 

-

1867 The name `same` refers to the fact that the package can satisfy relations only for the "same" 

-

1868 architecture as itself. However, in this case, it is co-installable with itself as noted above. 

-

1869 

-

1870 Note: This value **cannot** be used with `Architecture: all`. 

-

1871 

-

1872 

-

1873 * `allowed` - **Advanced value**. This value is for a complex use-case that most people does not 

-

1874 need. Consider it only if none of the other values seem to do the trick. 

-

1875 

-

1876 The package is **NOT** co-installable with itself but can satisfy Multi-Arch foreign and Multi-Arch same 

-

1877 relations at the same. This is useful for implementations of scripting languages (e.g. Perl or Python). 

-

1878 Here the interpreter contextually need to satisfy some relations as `Multi-Arch: foreign` and others as 

-

1879 `Multi-Arch: same` (or `Multi-Arch: no`). 

-

1880 

-

1881 Typically, native extensions or plugins will need a `Multi-Arch: same`-relation as they only work with 

-

1882 the interpreter compiled for the same machine architecture as themselves whereas scripts are usually 

-

1883 less picky and can rely on the `Multi-Arch: foreign` relation. Packages wanting to rely on the 

-

1884 `Multi-Arch: foreign` interface must explicitly declare this adding a `:any` suffix to the package name 

-

1885 in the dependency relation (such as `Depends: python3:any`). However, the `:any` suffix cannot be used 

-

1886 unconditionally and should not be used unless you know you need it. 

-

1887 

-

1888 Note that depending indirectly on a `Multi-Arch: allowed` package can require a `Architecture: all` + 

-

1889 `Multi-Arch: foreign` package to be converted to a `Architecture: any` package. This case is named 

-

1890 the "Multi-Arch interpreter problem", since it is commonly seen with script interpreters. However, 

-

1891 despite the name, it can happen to any kind of package. The bug [Debian#984701] is an example of 

-

1892 this happen in practice. 

-

1893 

-

1894 [Multi-Arch hinter]: https://wiki.debian.org/MultiArch/Hints 

-

1895 [Debian#984701]: https://bugs.debian.org/984701 

-

1896 """ 

-

1897 ), 

-

1898 ), 

-

1899 DctrlKnownField( 

-

1900 "XB-Installer-Menu-Item", 

-

1901 FieldValueClass.SINGLE_VALUE, 

-

1902 custom_field_check=_combined_custom_field_check( 

-

1903 _udeb_only_field_validation, 

-

1904 _each_value_match_regex_validation(re.compile(r"^[1-9]\d{3,4}$")), 

-

1905 ), 

-

1906 hover_text=textwrap.dedent( 

-

1907 """\ 

-

1908 This field is only relevant for `udeb` packages (debian-installer). 

-

1909 

-

1910 The field is used to declare where in the installer menu this package's menu item should 

-

1911 be placed (assuming it has any menu item). For packages targeting the Debian archive, 

-

1912 any new package should have its menu item number aligned with the debian-installer team 

-

1913 before upload. 

-

1914 

-

1915 A menu item is 4-5 digits (In the range `1000 <= X <= 99999`). In rare cases, the menu 

-

1916 item can be architecture dependent. For architecture dependent menu item values, use a 

-

1917 custom substvar. 

-

1918 

-

1919 See <https://d-i.debian.org/doc/internals/apa.html> for the full list of menu item ranges 

-

1920 and for how to request a number. 

-

1921 """ 

-

1922 ), 

-

1923 ), 

-

1924 DctrlKnownField( 

-

1925 "X-DH-Build-For-Type", 

-

1926 FieldValueClass.SINGLE_VALUE, 

-

1927 custom_field_check=_arch_not_all_only_field_validation, 

-

1928 default_value="host", 

-

1929 known_values=_allowed_values( 

-

1930 Keyword( 

-

1931 "host", 

-

1932 hover_text="The package should be compiled for `DEB_HOST_TARGET` (the default).", 

-

1933 ), 

-

1934 Keyword( 

-

1935 "target", 

-

1936 hover_text="The package should be compiled for `DEB_TARGET_ARCH`.", 

-

1937 ), 

-

1938 ), 

-

1939 hover_text=textwrap.dedent( 

-

1940 """\ 

-

1941 **Special-purpose only**. *This field is a special purpose field and is rarely needed.* 

-

1942 *You are recommended to omit unless you know you need it or someone told you to use it.* 

-

1943 

-

1944 This field is used when building a cross-compiling C-compiler (or similar cases), some packages need 

-

1945 to be build for target (DEB_**TARGET**_ARCH) rather than the host (DEB_**HOST**_ARCH) architecture. 

-

1946 

-

1947 **Example**: 

-

1948 ``` 

-

1949 Package: gcc 

-

1950 Architecture: any 

-

1951 # ... 

-

1952 

-

1953 Package: libgcc-s1 

-

1954 Architecture: any 

-

1955 # When building a cross-compiling gcc, then this library needs to be built for the target architecture 

-

1956 # as binaries compiled by gcc will link with this library. 

-

1957 X-DH-Build-For-Type: target 

-

1958 # ... 

-

1959 ``` 

-

1960 

-

1961 If you are in doubt, then you probably do **not** need this field. 

-

1962 """ 

-

1963 ), 

-

1964 ), 

-

1965 DctrlKnownField( 

-

1966 "X-Time64-Compat", 

-

1967 FieldValueClass.SINGLE_VALUE, 

-

1968 custom_field_check=_each_value_match_regex_validation(PKGNAME_REGEX), 

-

1969 hover_text=textwrap.dedent( 

-

1970 """\ 

-

1971 Special purpose field related to the 64-bit time transition. 

-

1972 

-

1973 It is used to inform packaging helpers what the original (non-transitioned) package name 

-

1974 was when the auto-detection is inadequate. The non-transitioned package name is then 

-

1975 conditionally provided in the `${t64:Provides}` substitution variable. 

-

1976 """ 

-

1977 ), 

-

1978 ), 

-

1979 DctrlKnownField( 

-

1980 "Homepage", 

-

1981 FieldValueClass.SINGLE_VALUE, 

-

1982 hover_text=textwrap.dedent( 

-

1983 """\ 

-

1984 Link to the upstream homepage for this binary package. 

-

1985 

-

1986 This field is rarely used in Package stanzas as most binary packages should have the 

-

1987 same homepage as the source package. Though, in the exceptional case where a particular 

-

1988 binary package should have a more specific homepage than the source package, you can 

-

1989 use this field to override the source package field. 

-

1990 ``` 

-

1991 """ 

-

1992 ), 

-

1993 ), 

-

1994 DctrlKnownField( 

-

1995 "Description", 

-

1996 FieldValueClass.FREE_TEXT_FIELD, 

-

1997 spellcheck_value=True, 

-

1998 # It will build just fine. But no one will know what it is for, so it probably won't be installed 

-

1999 missing_field_severity=DiagnosticSeverity.Warning, 

-

2000 hover_text=textwrap.dedent( 

-

2001 """\ 

-

2002 A human-readable description of the package. This field consists of two related but distinct parts. 

-

2003 

-

2004 

-

2005 The first line immediately after the field is called the *Synopsis* and is a short "noun-phrase" 

-

2006 intended to provide a one-line summary of the package. The lines after the **Synopsis** is known 

-

2007 as the **Extended Description** and is intended as a longer summary of the package. 

-

2008 

-

2009 **Example**: 

-

2010 ``` 

-

2011 Description: documentation generator for Python projects 

-

2012 Sphinx is a tool for producing documentation for Python projects, using 

-

2013 reStructuredText as markup language. 

-

2014 . 

-

2015 Sphinx features: 

-

2016 * HTML, CHM, LaTeX output, 

-

2017 * Cross-referencing source code, 

-

2018 * Automatic indices, 

-

2019 * Code highlighting, using Pygments, 

-

2020 * Extensibility. Existing extensions: 

-

2021 - automatic testing of code snippets, 

-

2022 - including docstrings from Python modules. 

-

2023 . 

-

2024 Build-depend on sphinx if your package uses /usr/bin/sphinx-* 

-

2025 executables. Build-depend on python3-sphinx if your package uses 

-

2026 the Python API (for instance by calling python3 -m sphinx). 

-

2027 ``` 

-

2028 

-

2029 The **Synopsis** is usually displayed in cases where there is limited space such as when reviewing 

-

2030 the search results from `apt search foo`. It is often a good idea to imagine that the **Synopsis** 

-

2031 part is inserted into a sentence like "The package provides {{Synopsis-goes-here}}". The 

-

2032 **Extended Description** is a standalone description that should describe what the package does and 

-

2033 how it relates to the rest of the system (in terms of, for example, which subsystem it is which part of). 

-

2034 Please see <https://www.debian.org/doc/debian-policy/ch-controlfields.html#description> for more details 

-

2035 about the description field and suggestions for how to write it. 

-

2036 """ 

-

2037 ), 

-

2038 ), 

-

2039 DctrlKnownField( 

-

2040 "XB-Cnf-Visible-Pkgname", 

-

2041 FieldValueClass.SINGLE_VALUE, 

-

2042 custom_field_check=_each_value_match_regex_validation(PKGNAME_REGEX), 

-

2043 hover_text=textwrap.dedent( 

-

2044 """\ 

-

2045 **Special-case field**: *This field is only useful in very special circumstances.* 

-

2046 *Consider whether you truly need it before adding this field.* 

-

2047 

-

2048 This field is used by `command-not-found` and can be used to override which package 

-

2049 `command-not-found` should propose the user to install. 

-

2050 

-

2051 Normally, when `command-not-found` detects a missing command, it will suggest the 

-

2052 user to install the package name listed in the `Package` field. In most cases, this 

-

2053 is what you want. However, in certain special-cases, the binary is provided by a 

-

2054 minimal package for technical reasons (like `python3-minimal`) and the user should 

-

2055 really install a package that provides more features (such as `python3` to follow 

-

2056 the example). 

-

2057 

-

2058 **Example**: 

-

2059 ``` 

-

2060 Package: python3-minimal 

-

2061 XB-Cnf-Visible-Pkgname: python3 

-

2062 ``` 

-

2063 

-

2064 Related bug: <https://bugs.launchpad.net/ubuntu/+source/python-defaults/+bug/1867157> 

-

2065 """ 

-

2066 ), 

-

2067 ), 

-

2068 DctrlKnownField( 

-

2069 "X-DhRuby-Root", 

-

2070 FieldValueClass.SINGLE_VALUE, 

-

2071 hover_text=textwrap.dedent( 

-

2072 """\ 

-

2073 Used by `dh_ruby` to request "multi-binary" layout and where the root for the given 

-

2074 package is. 

-

2075 

-

2076 Please refer to the documentation of `dh_ruby` for more details. 

-

2077 

-

2078 <https://manpages.debian.org/dh_ruby> 

-

2079 """ 

-

2080 ), 

-

2081 ), 

-

2082) 

-

2083_DEP5_HEADER_FIELDS = _fields( 

-

2084 Deb822KnownField( 

-

2085 "Format", 

-

2086 FieldValueClass.SINGLE_VALUE, 

-

2087 is_stanza_name=True, 

-

2088 missing_field_severity=DiagnosticSeverity.Error, 

-

2089 ), 

-

2090 Deb822KnownField( 

-

2091 "Upstream-Name", 

-

2092 FieldValueClass.FREE_TEXT_FIELD, 

-

2093 ), 

-

2094 Deb822KnownField( 

-

2095 "Upstream-Contact", 

-

2096 FieldValueClass.FREE_TEXT_FIELD, 

-

2097 ), 

-

2098 Deb822KnownField( 

-

2099 "Source", 

-

2100 FieldValueClass.FREE_TEXT_FIELD, 

-

2101 ), 

-

2102 Deb822KnownField( 

-

2103 "Disclaimer", 

-

2104 FieldValueClass.FREE_TEXT_FIELD, 

-

2105 spellcheck_value=True, 

-

2106 ), 

-

2107 Deb822KnownField( 

-

2108 "Comment", 

-

2109 FieldValueClass.FREE_TEXT_FIELD, 

-

2110 spellcheck_value=True, 

-

2111 ), 

-

2112 Deb822KnownField( 

-

2113 "License", 

-

2114 FieldValueClass.FREE_TEXT_FIELD, 

-

2115 # Do not tempt people to change legal text because the spellchecker wants to do a typo fix. 

-

2116 spellcheck_value=False, 

-

2117 ), 

-

2118) 

-

2119_DEP5_FILES_FIELDS = _fields( 

-

2120 Deb822KnownField( 

-

2121 "Files", 

-

2122 FieldValueClass.DEP5_FILE_LIST, 

-

2123 is_stanza_name=True, 

-

2124 missing_field_severity=DiagnosticSeverity.Error, 

-

2125 ), 

-

2126 Deb822KnownField( 

-

2127 "Copyright", 

-

2128 FieldValueClass.FREE_TEXT_FIELD, 

-

2129 # Mostly going to be names with very little free-text; high risk of false positives with low value 

-

2130 spellcheck_value=False, 

-

2131 missing_field_severity=DiagnosticSeverity.Error, 

-

2132 ), 

-

2133 Deb822KnownField( 

-

2134 "License", 

-

2135 FieldValueClass.FREE_TEXT_FIELD, 

-

2136 missing_field_severity=DiagnosticSeverity.Error, 

-

2137 # Do not tempt people to change legal text because the spellchecker wants to do a typo fix. 

-

2138 spellcheck_value=False, 

-

2139 ), 

-

2140 Deb822KnownField( 

-

2141 "Comment", 

-

2142 FieldValueClass.FREE_TEXT_FIELD, 

-

2143 spellcheck_value=True, 

-

2144 ), 

-

2145) 

-

2146_DEP5_LICENSE_FIELDS = _fields( 

-

2147 Deb822KnownField( 

-

2148 "License", 

-

2149 FieldValueClass.FREE_TEXT_FIELD, 

-

2150 is_stanza_name=True, 

-

2151 # Do not tempt people to change legal text because the spellchecker wants to do a typo fix. 

-

2152 spellcheck_value=False, 

-

2153 missing_field_severity=DiagnosticSeverity.Error, 

-

2154 ), 

-

2155 Deb822KnownField( 

-

2156 "Comment", 

-

2157 FieldValueClass.FREE_TEXT_FIELD, 

-

2158 spellcheck_value=True, 

-

2159 ), 

-

2160) 

-

2161 

-

2162_DTESTSCTRL_FIELDS = _fields( 

-

2163 Deb822KnownField( 

-

2164 "Architecture", 

-

2165 FieldValueClass.SPACE_SEPARATED_LIST, 

-

2166 unknown_value_diagnostic_severity=None, 

-

2167 known_values=_allowed_values(*dpkg_arch_and_wildcards()), 

-

2168 hover_text=textwrap.dedent( 

-

2169 """\ 

-

2170 When package tests are only supported on a limited set of 

-

2171 architectures, or are known to not work on a particular (set of) 

-

2172 architecture(s), this field can be used to define the supported 

-

2173 architectures. The autopkgtest will be skipped when the 

-

2174 architecture of the testbed doesn't match the content of this 

-

2175 field. The format is the same as in (Build-)Depends, with the 

-

2176 understanding that `all` is not allowed, and `any` means that 

-

2177 the test will be run on every architecture, which is the default 

-

2178 when not specifying this field at all. 

-

2179 """ 

-

2180 ), 

-

2181 ), 

-

2182 Deb822KnownField( 

-

2183 "Classes", 

-

2184 FieldValueClass.FREE_TEXT_FIELD, 

-

2185 hover_text=textwrap.dedent( 

-

2186 """\ 

-

2187 Most package tests should work in a minimal environment and are 

-

2188 usually not hardware specific. However, some packages like the 

-

2189 kernel, X.org, or graphics drivers should be tested on particular 

-

2190 hardware, and also run on a set of different platforms rather than 

-

2191 just a single virtual testbeds. 

-

2192 

-

2193 This field can specify a list of abstract class names such as 

-

2194 "desktop" or "graphics-driver". Consumers of autopkgtest can then 

-

2195 map these class names to particular machines/platforms/policies. 

-

2196 Unknown class names should be ignored. 

-

2197 

-

2198 This is purely an informational field for autopkgtest itself and 

-

2199 will be ignored. 

-

2200 """ 

-

2201 ), 

-

2202 ), 

-

2203 Deb822KnownField( 

-

2204 "Depends", 

-

2205 FieldValueClass.COMMA_SEPARATED_LIST, 

-

2206 default_value="@", 

-

2207 hover_text="""\ 

-

2208 Declares that the specified packages must be installed for the test 

-

2209 to go ahead. This supports all features of dpkg dependencies, including 

-

2210 the architecture qualifiers (see 

-

2211 <https://www.debian.org/doc/debian-policy/ch-relationships.html>), 

-

2212 plus the following extensions: 

-

2213 

-

2214 `@` stands for the package(s) generated by the source package 

-

2215 containing the tests; each dependency (strictly, or-clause, which 

-

2216 may contain `|`s but not commas) containing `@` is replicated 

-

2217 once for each such binary package, with the binary package name 

-

2218 substituted for each `@` (but normally `@` should occur only 

-

2219 once and without a version restriction). 

-

2220 

-

2221 `@builddeps@` will be replaced by the package's 

-

2222 `Build-Depends:`, `Build-Depends-Indep:`, `Build-Depends-Arch:`, and 

-

2223 `build-essential`. This is useful if you have many build 

-

2224 dependencies which are only necessary for running the test suite and 

-

2225 you don't want to replicate them in the test `Depends:`. However, 

-

2226 please use this sparingly, as this can easily lead to missing binary 

-

2227 package dependencies being overlooked if they get pulled in via 

-

2228 build dependencies. 

-

2229 

-

2230 `@recommends@` stands for all the packages listed in the 

-

2231 `Recommends:` fields of all the binary packages mentioned in the 

-

2232 `debian/control` file. Please note that variables are stripped, 

-

2233 so if some required test dependencies aren't explicitly mentioned, 

-

2234 they may not be installed. 

-

2235 

-

2236 If no Depends field is present, `Depends: @` is assumed. Note that 

-

2237 the source tree's Build-Dependencies are *not* necessarily 

-

2238 installed, and if you specify any Depends, no binary packages from 

-

2239 the source are installed unless explicitly requested. 

-

2240 """, 

-

2241 ), 

-

2242 Deb822KnownField( 

-

2243 "Features", 

-

2244 FieldValueClass.COMMA_OR_SPACE_SEPARATED_LIST, 

-

2245 hover_text=textwrap.dedent( 

-

2246 """\ 

-

2247 Declares some additional capabilities or good properties of the 

-

2248 tests defined in this stanza. Any unknown features declared will be 

-

2249 completely ignored. See below for the defined features. 

-

2250 

-

2251 Features are separated by commas and/or whitespace. 

-

2252 """ 

-

2253 ), 

-

2254 ), 

-

2255 Deb822KnownField( 

-

2256 "Restrictions", 

-

2257 FieldValueClass.COMMA_OR_SPACE_SEPARATED_LIST, 

-

2258 unknown_value_diagnostic_severity=DiagnosticSeverity.Warning, 

-

2259 known_values=_allowed_values( 

-

2260 Keyword( 

-

2261 "allow-stderr", 

-

2262 hover_text=textwrap.dedent( 

-

2263 """\ 

-

2264 Output to stderr is not considered a failure. This is useful for 

-

2265 tests which write e. g. lots of logging to stderr. 

-

2266 """ 

-

2267 ), 

-

2268 ), 

-

2269 Keyword( 

-

2270 "breaks-testbed", 

-

2271 hover_text=textwrap.dedent( 

-

2272 """\ 

-

2273 The test, when run, is liable to break the testbed system. This 

-

2274 includes causing data loss, causing services that the machine is 

-

2275 running to malfunction, or permanently disabling services; it does 

-

2276 not include causing services on the machine to temporarily fail. 

-

2277 

-

2278 When this restriction is present the test will usually be skipped 

-

2279 unless the testbed's virtualisation arrangements are sufficiently 

-

2280 powerful, or alternatively if the user explicitly requests. 

-

2281 """ 

-

2282 ), 

-

2283 ), 

-

2284 Keyword( 

-

2285 "build-needed", 

-

2286 hover_text=textwrap.dedent( 

-

2287 """\ 

-

2288 The tests need to be run from a built source tree. The test runner 

-

2289 will build the source tree (honouring the source package's build 

-

2290 dependencies), before running the tests. However, the tests are 

-

2291 *not* entitled to assume that the source package's build 

-

2292 dependencies will be installed when the test is run. 

-

2293 

-

2294 Please use this considerately, as for large builds it unnecessarily 

-

2295 builds the entire project when you only need a tiny subset (like the 

-

2296 `tests/` subdirectory). It is often possible to run `make -C tests` 

-

2297 instead, or copy the test code to `$AUTOPKGTEST_TMP` and build it 

-

2298 there with some custom commands. This cuts down the load on the 

-

2299 Continuous Integration servers and also makes tests more robust as 

-

2300 it prevents accidentally running them against the built source tree 

-

2301 instead of the installed packages. 

-

2302 """ 

-

2303 ), 

-

2304 ), 

-

2305 Keyword( 

-

2306 "flaky", 

-

2307 hover_text=textwrap.dedent( 

-

2308 """\ 

-

2309 The test is expected to fail intermittently, and is not suitable for 

-

2310 gating continuous integration. This indicates a bug in either the 

-

2311 package under test, a dependency or the test itself, but such bugs 

-

2312 can be difficult to fix, and it is often difficult to know when the 

-

2313 bug has been fixed without running the test for a while. If a 

-

2314 `flaky` test succeeds, it will be treated like any other 

-

2315 successful test, but if it fails it will be treated as though it 

-

2316 had been skipped. 

-

2317 """ 

-

2318 ), 

-

2319 ), 

-

2320 Keyword( 

-

2321 "hint-testsuite-triggers", 

-

2322 hover_text=textwrap.dedent( 

-

2323 """\ 

-

2324 This test exists purely as a hint to suggest when rerunning the 

-

2325 tests is likely to be useful. Specifically, it exists to 

-

2326 influence the way dpkg-source generates the Testsuite-Triggers 

-

2327 .dsc header from test metadata: the Depends for this test are 

-

2328 to be added to Testsuite-Triggers. (Just as they are for any other 

-

2329 test.) 

-

2330 

-

2331 The test with the hint-testsuite-triggers restriction should not 

-

2332 actually be run. 

-

2333 

-

2334 The packages listed as Depends for this test are usually indirect 

-

2335 dependencies, updates to which are considered to pose a risk of 

-

2336 regressions in other tests defined in this package. 

-

2337 

-

2338 There is currently no way to specify this hint on a per-test 

-

2339 basis; but in any case the debian.org machinery is not able to 

-

2340 think about triggering individual tests. 

-

2341 """ 

-

2342 ), 

-

2343 ), 

-

2344 Keyword( 

-

2345 "isolation-container", 

-

2346 hover_text=textwrap.dedent( 

-

2347 """\ 

-

2348 The test wants to start services or open network TCP ports. This 

-

2349 commonly fails in a simple chroot/schroot, so tests need to be run 

-

2350 in their own container (e. g. autopkgtest-virt-lxc) or their own 

-

2351 machine/VM (e. g. autopkgtest-virt-qemu or autopkgtest-virt-null). 

-

2352 When running the test in a virtualization server which does not 

-

2353 provide this (like autopkgtest-schroot) it will be skipped. 

-

2354 

-

2355 Tests may assume that this restriction implies that process 1 in the 

-

2356 container's process namespace is a system service manager (init system) 

-

2357 such as systemd or sysvinit + sysv-rc, and therefore system services 

-

2358 are available via the `service(8)`, `invoke-rc.d(8)` and 

-

2359 `update-rc.d(8))` interfaces. 

-

2360 

-

2361 Tests must not assume that a specific init system is in use: a 

-

2362 dependency such as `systemd-sysv` or `sysvinit-core` does not work 

-

2363 in practice, because switching the init system often cannot be done 

-

2364 automatically. Tests that require a specific init system should use the 

-

2365 `skippable` restriction, and skip the test if the required init system 

-

2366 was not detected. 

-

2367 

-

2368 Many implementations of the `isolation-container` restriction will 

-

2369 also provide `systemd-logind(8)` or a compatible interface, but this 

-

2370 is not guaranteed. Tests requiring a login session registered with 

-

2371 logind should declare a dependency on `default-logind | logind` 

-

2372 or on a more specific implementation of `logind`, and should use the 

-

2373 `skippable` restriction to exit gracefully if its functionality is 

-

2374 not available at runtime. 

-

2375 

-

2376 """ 

-

2377 ), 

-

2378 ), 

-

2379 Keyword( 

-

2380 "isolation-machine", 

-

2381 hover_text=textwrap.dedent( 

-

2382 """\ 

-

2383 The test wants to interact with the kernel, reboot the machine, or 

-

2384 other things which fail in a simple schroot and even a container. 

-

2385 Those tests need to be run in their own machine/VM (e. g. 

-

2386 autopkgtest-virt-qemu or autopkgtest-virt-null). When running the 

-

2387 test in a virtualization server which does not provide this it will 

-

2388 be skipped. 

-

2389 

-

2390 This restriction also provides the same facilities as 

-

2391 `isolation-container`. 

-

2392 """ 

-

2393 ), 

-

2394 ), 

-

2395 Keyword( 

-

2396 "needs-internet", 

-

2397 hover_text=textwrap.dedent( 

-

2398 """\ 

-

2399 The test needs unrestricted internet access, e.g. to download test data 

-

2400 that's not shipped as a package, or to test a protocol implementation 

-

2401 against a test server. Please also see the note about Network access later 

-

2402 in this document. 

-

2403 """ 

-

2404 ), 

-

2405 ), 

-

2406 Keyword( 

-

2407 "needs-reboot", 

-

2408 hover_text=textwrap.dedent( 

-

2409 """\ 

-

2410 The test wants to reboot the machine using 

-

2411 `/tmp/autopkgtest-reboot`. 

-

2412 """ 

-

2413 ), 

-

2414 ), 

-

2415 Keyword( 

-

2416 "needs-recommends", 

-

2417 is_obsolete=True, 

-

2418 hover_text=textwrap.dedent( 

-

2419 """\ 

-

2420 Please use `@recommends@` in your test `Depends:` instead. 

-

2421 """ 

-

2422 ), 

-

2423 ), 

-

2424 Keyword( 

-

2425 "needs-root", 

-

2426 hover_text=textwrap.dedent( 

-

2427 """\ 

-

2428 The test script must be run as root. 

-

2429 

-

2430 While running tests with this restriction, some test runners will 

-

2431 set the `AUTOPKGTEST_NORMAL_USER` environment variable to the name 

-

2432 of an ordinary user account. If so, the test script may drop 

-

2433 privileges from root to that user, for example via the `runuser` 

-

2434 command. Test scripts must not assume that this environment variable 

-

2435 will always be set. 

-

2436 

-

2437 For tests that declare both the `needs-root` and `isolation-machine` 

-

2438 restrictions, the test may assume that it has "global root" with full 

-

2439 control over the kernel that is running the test, and not just root 

-

2440 in a container (more formally, it has uid 0 and full capabilities in 

-

2441 the initial user namespace as defined in `user_namespaces(7)`). 

-

2442 For example, it can expect that mounting block devices will succeed. 

-

2443 

-

2444 For tests that declare the `needs-root` restriction but not the 

-

2445 `isolation-machine` restriction, the test will be run as uid 0 in 

-

2446 a user namespace with a reasonable range of system and user uids 

-

2447 available, but will not necessarily have full control over the kernel, 

-

2448 and in particular it is not guaranteed to have elevated capabilities 

-

2449 in the initial user namespace as defined by `user_namespaces(7)`. 

-

2450 For example, it might be run in a namespace where uid 0 is mapped to 

-

2451 an ordinary uid in the initial user namespace, or it might run in a 

-

2452 Docker-style container where global uid 0 is used but its ability to 

-

2453 carry out operations that affect the whole system is restricted by 

-

2454 capabilities and system call filtering. Tests requiring particular 

-

2455 privileges should use the `skippable` restriction to check for 

-

2456 required functionality at runtime. 

-

2457 """ 

-

2458 ), 

-

2459 ), 

-

2460 Keyword( 

-

2461 "needs-sudo", 

-

2462 hover_text=textwrap.dedent( 

-

2463 """\ 

-

2464 The test script needs to be run as a non-root user who is a member of 

-

2465 the `sudo` group, and has the ability to elevate privileges to root 

-

2466 on-demand. 

-

2467 

-

2468 This is useful for testing user components which should not normally 

-

2469 be run as root, in test scenarios that require configuring a system 

-

2470 service to support the test. For example, gvfs has a test-case which 

-

2471 uses sudo for privileged configuration of a Samba server, so that 

-

2472 the unprivileged gvfs service under test can communicate with that server. 

-

2473 

-

2474 While running a test with this restriction, `sudo(8)` will be 

-

2475 installed and configured to allow members of the `sudo` group to run 

-

2476 any command without password authentication. 

-

2477 

-

2478 Because the test user is a member of the `sudo` group, they will 

-

2479 also gain the ability to take any other privileged actions that are 

-

2480 controlled by membership in that group. In particular, several packages 

-

2481 install `polkit(8)` policies allowing members of group `sudo` to 

-

2482 take administrative actions with or without authentication. 

-

2483 

-

2484 If the test requires access to additional privileged actions, it may 

-

2485 use its access to `sudo(8)` to install additional configuration 

-

2486 files, for example configuring `polkit(8)` or `doas.conf(5)` 

-

2487 to allow running `pkexec(1)` or `doas(1)` without authentication. 

-

2488 

-

2489 Commands run via `sudo(8)` or another privilege-elevation tool could 

-

2490 be run with either "global root" or root in a container, depending 

-

2491 on the presence or absence of the `isolation-machine` restriction, 

-

2492 in the same way described for `needs-root`. 

-

2493 """ 

-

2494 ), 

-

2495 ), 

-

2496 Keyword( 

-

2497 "rw-build-tree", 

-

2498 hover_text=textwrap.dedent( 

-

2499 """\ 

-

2500 The test(s) needs write access to the built source tree (so it may 

-

2501 need to be copied first). Even with this restriction, the test is 

-

2502 not allowed to make any change to the built source tree which (i) 

-

2503 isn't cleaned up by `debian/rules clean`, (ii) affects the future 

-

2504 results of any test, or (iii) affects binary packages produced by 

-

2505 the build tree in the future. 

-

2506 """ 

-

2507 ), 

-

2508 ), 

-

2509 Keyword( 

-

2510 "skip-not-installable", 

-

2511 hover_text=textwrap.dedent( 

-

2512 """\ 

-

2513 This restrictions may cause a test to miss a regression due to 

-

2514 installability issues, so use with caution. If one only wants to 

-

2515 skip certain architectures, use the `Architecture` field for 

-

2516 that. 

-

2517 

-

2518 This test might have test dependencies that can't be fulfilled in 

-

2519 all suites or in derivatives. Therefore, when apt-get installs the 

-

2520 test dependencies, it will fail. Don't treat this as a test 

-

2521 failure, but instead treat it as if the test was skipped. 

-

2522 """ 

-

2523 ), 

-

2524 ), 

-

2525 Keyword( 

-

2526 "skippable", 

-

2527 hover_text=textwrap.dedent( 

-

2528 """\ 

-

2529 The test might need to be skipped for reasons that cannot be 

-

2530 described by an existing restriction such as isolation-machine or 

-

2531 breaks-testbed, but must instead be detected at runtime. If the 

-

2532 test exits with status 77 (a convention borrowed from Automake), it 

-

2533 will be treated as though it had been skipped. If it exits with any 

-

2534 other status, its success or failure will be derived from the exit 

-

2535 status and stderr as usual. Test authors must be careful to ensure 

-

2536 that `skippable` tests never exit with status 77 for reasons that 

-

2537 should be treated as a failure. 

-

2538 """ 

-

2539 ), 

-

2540 ), 

-

2541 Keyword( 

-

2542 "superficial", 

-

2543 hover_text=textwrap.dedent( 

-

2544 """\ 

-

2545 The test does not provide significant test coverage, so if it 

-

2546 passes, that does not necessarily mean that the package under test 

-

2547 is actually functional. If a `superficial` test fails, it will be 

-

2548 treated like any other failing test, but if it succeeds, this is 

-

2549 only a weak indication of success. Continuous integration systems 

-

2550 should treat a package where all non-superficial tests are skipped as 

-

2551 equivalent to a package where all tests are skipped. 

-

2552 

-

2553 For example, a C library might have a superficial test that simply 

-

2554 compiles, links and executes a "hello world" program against the 

-

2555 library under test but does not attempt to make use of the library's 

-

2556 functionality, while a Python or Perl library might have a 

-

2557 superficial test that runs `import foo` or `require Foo;` but 

-

2558 does not attempt to use the library beyond that. 

-

2559 """ 

-

2560 ), 

-

2561 ), 

-

2562 ), 

-

2563 hover_text=textwrap.dedent( 

-

2564 """\ 

-

2565 Declares some restrictions or problems with the tests defined in 

-

2566 this stanza. Depending on the test environment capabilities, user 

-

2567 requests, and so on, restrictions can cause tests to be skipped or 

-

2568 can cause the test to be run in a different manner. Tests which 

-

2569 declare unknown restrictions will be skipped. See below for the 

-

2570 defined restrictions. 

-

2571 

-

2572 Restrictions are separated by commas and/or whitespace. 

-

2573 """ 

-

2574 ), 

-

2575 ), 

-

2576 Deb822KnownField( 

-

2577 "Tests", 

-

2578 FieldValueClass.COMMA_OR_SPACE_SEPARATED_LIST, 

-

2579 hover_text=textwrap.dedent( 

-

2580 """\ 

-

2581 This field names the tests which are defined by this stanza, and map 

-

2582 to executables/scripts in the test directory. All of the other 

-

2583 fields in the same stanza apply to all of the named tests. Either 

-

2584 this field or `Test-Command:` must be present. 

-

2585 

-

2586 Test names are separated by comma and/or whitespace and should 

-

2587 contain only characters which are legal in package names. It is 

-

2588 permitted, but not encouraged, to use upper-case characters as well. 

-

2589 """ 

-

2590 ), 

-

2591 ), 

-

2592 Deb822KnownField( 

-

2593 "Test-Command", 

-

2594 FieldValueClass.FREE_TEXT_FIELD, 

-

2595 hover_text=textwrap.dedent( 

-

2596 """\ 

-

2597 If your test only contains a shell command or two, or you want to 

-

2598 reuse an existing upstream test executable and just need to wrap it 

-

2599 with some command like `dbus-launch` or `env`, you can use this 

-

2600 field to specify the shell command directly. It will be run under 

-

2601 `bash -e`. This is mutually exclusive with the `Tests:` field. 

-

2602 

-

2603 This is also useful for running the same script under different 

-

2604 interpreters and/or with different dependencies, such as 

-

2605 `Test-Command: python debian/tests/mytest.py` and 

-

2606 `Test-Command: python3 debian/tests/mytest.py`. 

-

2607 """ 

-

2608 ), 

-

2609 ), 

-

2610 Deb822KnownField( 

-

2611 "Test-Directory", 

-

2612 FieldValueClass.FREE_TEXT_FIELD, # TODO: Single path 

-

2613 hover_text=textwrap.dedent( 

-

2614 """\ 

-

2615 Replaces the path segment `debian/tests` in the filenames of the 

-

2616 test programs with `path`. I. e., the tests are run by executing 

-

2617 `built/source/tree/path/testname`. `path` must be a relative 

-

2618 path and is interpreted starting from the root of the built source 

-

2619 tree. 

-

2620 

-

2621 This allows tests to live outside the `debian/` metadata area, so that 

-

2622 they can more palatably be shared with non-Debian distributions. 

-

2623 """ 

-

2624 ), 

-

2625 ), 

-

2626) 

-

2627 

-

2628 

-

2629@dataclasses.dataclass(slots=True, frozen=True) 

-

2630class StanzaMetadata(Mapping[str, F], Generic[F], ABC): 

-

2631 stanza_type_name: str 

-

2632 stanza_fields: Mapping[str, F] 

-

2633 

-

2634 def stanza_diagnostics( 

-

2635 self, 

-

2636 stanza: Deb822ParagraphElement, 

-

2637 stanza_position_in_file: "TEPosition", 

-

2638 ) -> Iterable[Diagnostic]: 

-

2639 raise NotImplementedError 

-

2640 

-

2641 def __getitem__(self, key: str) -> F: 

-

2642 key_lc = key.lower() 

-

2643 key_norm = normalize_dctrl_field_name(key_lc) 

-

2644 return self.stanza_fields[key_norm] 

-

2645 

-

2646 def __len__(self) -> int: 

-

2647 return len(self.stanza_fields) 

-

2648 

-

2649 def __iter__(self): 

-

2650 return iter(self.stanza_fields.keys()) 

-

2651 

-

2652 

-

2653@dataclasses.dataclass(slots=True, frozen=True) 

-

2654class Dep5StanzaMetadata(StanzaMetadata[Deb822KnownField]): 

-

2655 def stanza_diagnostics( 

-

2656 self, 

-

2657 stanza: Deb822ParagraphElement, 

-

2658 stanza_position_in_file: "TEPosition", 

-

2659 ) -> Iterable[Diagnostic]: 

-

2660 pass 

-

2661 

-

2662 

-

2663@dataclasses.dataclass(slots=True, frozen=True) 

-

2664class DctrlStanzaMetadata(StanzaMetadata[DctrlKnownField]): 

-

2665 

-

2666 def stanza_diagnostics( 

-

2667 self, 

-

2668 stanza: Deb822ParagraphElement, 

-

2669 stanza_position_in_file: "TEPosition", 

-

2670 ) -> Iterable[Diagnostic]: 

-

2671 pass 

-

2672 

-

2673 

-

2674@dataclasses.dataclass(slots=True, frozen=True) 

-

2675class DTestsCtrlStanzaMetadata(StanzaMetadata[Deb822KnownField]): 

-

2676 

-

2677 def stanza_diagnostics( 

-

2678 self, 

-

2679 stanza: Deb822ParagraphElement, 

-

2680 stanza_position_in_file: "TEPosition", 

-

2681 ) -> Iterable[Diagnostic]: 

-

2682 pass 

-

2683 

-

2684 

-

2685class Deb822FileMetadata(Generic[S]): 

-

2686 def classify_stanza(self, stanza: Deb822ParagraphElement, stanza_idx: int) -> S: 

-

2687 return self.guess_stanza_classification_by_idx(stanza_idx) 

-

2688 

-

2689 def guess_stanza_classification_by_idx(self, stanza_idx: int) -> S: 

-

2690 raise NotImplementedError 

-

2691 

-

2692 def stanza_types(self) -> Iterable[S]: 

-

2693 raise NotImplementedError 

-

2694 

-

2695 def __getitem__(self, item: str) -> S: 

-

2696 raise NotImplementedError 

-

2697 

-

2698 def file_diagnostics( 

-

2699 self, 

-

2700 file: Deb822FileElement, 

-

2701 ) -> Iterable[Diagnostic]: 

-

2702 raise NotImplementedError 

-

2703 

-

2704 def get(self, item: str) -> Optional[S]: 

-

2705 try: 

-

2706 return self[item] 

-

2707 except KeyError: 

-

2708 return None 

-

2709 

-

2710 

-

2711_DCTRL_SOURCE_STANZA = DctrlStanzaMetadata( 

-

2712 "Source", 

-

2713 SOURCE_FIELDS, 

-

2714) 

-

2715_DCTRL_PACKAGE_STANZA = DctrlStanzaMetadata("Package", BINARY_FIELDS) 

-

2716 

-

2717_DEP5_HEADER_STANZA = Dep5StanzaMetadata( 

-

2718 "Header", 

-

2719 _DEP5_HEADER_FIELDS, 

-

2720) 

-

2721_DEP5_FILES_STANZA = Dep5StanzaMetadata( 

-

2722 "Files", 

-

2723 _DEP5_FILES_FIELDS, 

-

2724) 

-

2725_DEP5_LICENSE_STANZA = Dep5StanzaMetadata( 

-

2726 "License", 

-

2727 _DEP5_LICENSE_FIELDS, 

-

2728) 

-

2729 

-

2730_DTESTSCTRL_STANZA = DTestsCtrlStanzaMetadata("Tests", _DTESTSCTRL_FIELDS) 

-

2731 

-

2732 

-

2733class Dep5FileMetadata(Deb822FileMetadata[Dep5StanzaMetadata]): 

-

2734 def classify_stanza(self, stanza: Deb822ParagraphElement, stanza_idx: int) -> S: 

-

2735 if stanza_idx == 0: 

-

2736 return _DEP5_HEADER_STANZA 

-

2737 if stanza_idx > 0: 

-

2738 if "Files" in stanza: 

-

2739 return _DEP5_FILES_STANZA 

-

2740 return _DEP5_LICENSE_STANZA 

-

2741 raise ValueError("The stanza_idx must be 0 or greater") 

-

2742 

-

2743 def guess_stanza_classification_by_idx(self, stanza_idx: int) -> S: 

-

2744 if stanza_idx == 0: 

-

2745 return _DEP5_HEADER_STANZA 

-

2746 if stanza_idx > 0: 

-

2747 return _DEP5_FILES_STANZA 

-

2748 raise ValueError("The stanza_idx must be 0 or greater") 

-

2749 

-

2750 def stanza_types(self) -> Iterable[S]: 

-

2751 yield _DEP5_HEADER_STANZA 

-

2752 yield _DEP5_FILES_STANZA 

-

2753 yield _DEP5_LICENSE_STANZA 

-

2754 

-

2755 def __getitem__(self, item: str) -> S: 

-

2756 if item == "Header": 

-

2757 return _DEP5_FILES_STANZA 

-

2758 if item == "Files": 

-

2759 return _DEP5_FILES_STANZA 

-

2760 if item == "License": 

-

2761 return _DEP5_LICENSE_STANZA 

-

2762 raise KeyError(item) 

-

2763 

-

2764 

-

2765class DctrlFileMetadata(Deb822FileMetadata[DctrlStanzaMetadata]): 

-

2766 def guess_stanza_classification_by_idx(self, stanza_idx: int) -> S: 

-

2767 if stanza_idx == 0: 2767 ↛ 2768line 2767 didn't jump to line 2768, because the condition on line 2767 was never true

-

2768 return _DCTRL_SOURCE_STANZA 

-

2769 if stanza_idx > 0: 2769 ↛ 2771line 2769 didn't jump to line 2771, because the condition on line 2769 was never false

-

2770 return _DCTRL_PACKAGE_STANZA 

-

2771 raise ValueError("The stanza_idx must be 0 or greater") 

-

2772 

-

2773 def stanza_types(self) -> Iterable[S]: 

-

2774 yield _DCTRL_SOURCE_STANZA 

-

2775 yield _DCTRL_PACKAGE_STANZA 

-

2776 

-

2777 def __getitem__(self, item: str) -> S: 

-

2778 if item == "Source": 

-

2779 return _DCTRL_SOURCE_STANZA 

-

2780 if item == "Package": 

-

2781 return _DCTRL_PACKAGE_STANZA 

-

2782 raise KeyError(item) 

-

2783 

-

2784 

-

2785class DTestsCtrlFileMetadata(Deb822FileMetadata[DctrlStanzaMetadata]): 

-

2786 def guess_stanza_classification_by_idx(self, stanza_idx: int) -> S: 

-

2787 if stanza_idx >= 0: 

-

2788 return _DTESTSCTRL_STANZA 

-

2789 raise ValueError("The stanza_idx must be 0 or greater") 

-

2790 

-

2791 def stanza_types(self) -> Iterable[S]: 

-

2792 yield _DTESTSCTRL_STANZA 

-

2793 

-

2794 def __getitem__(self, item: str) -> S: 

-

2795 if item == "Tests": 

-

2796 return _DTESTSCTRL_STANZA 

-

2797 raise KeyError(item) 

-
- - - diff --git a/coverage-report/d_5d0ec0d5422112df_lsp_debian_copyright_py.html b/coverage-report/d_5d0ec0d5422112df_lsp_debian_copyright_py.html deleted file mode 100644 index 901b958..0000000 --- a/coverage-report/d_5d0ec0d5422112df_lsp_debian_copyright_py.html +++ /dev/null @@ -1,604 +0,0 @@ - - - - - Coverage for src/debputy/lsp/lsp_debian_copyright.py: 18% - - - - - -
-
-

- Coverage for src/debputy/lsp/lsp_debian_copyright.py: - 18% -

- -

- 173 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import re 

-

2from typing import ( 

-

3 Union, 

-

4 Sequence, 

-

5 Tuple, 

-

6 Iterator, 

-

7 Optional, 

-

8 Iterable, 

-

9 Mapping, 

-

10 List, 

-

11) 

-

12 

-

13from lsprotocol.types import ( 

-

14 DiagnosticSeverity, 

-

15 Range, 

-

16 Diagnostic, 

-

17 Position, 

-

18 CompletionItem, 

-

19 CompletionList, 

-

20 CompletionParams, 

-

21 TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL, 

-

22 DiagnosticRelatedInformation, 

-

23 Location, 

-

24 HoverParams, 

-

25 Hover, 

-

26 TEXT_DOCUMENT_CODE_ACTION, 

-

27 SemanticTokens, 

-

28 SemanticTokensParams, 

-

29 FoldingRangeParams, 

-

30 FoldingRange, 

-

31) 

-

32 

-

33from debputy.linting.lint_util import LintState 

-

34from debputy.lsp.lsp_debian_control_reference_data import ( 

-

35 _DEP5_HEADER_FIELDS, 

-

36 _DEP5_FILES_FIELDS, 

-

37 Deb822KnownField, 

-

38 _DEP5_LICENSE_FIELDS, 

-

39 Dep5FileMetadata, 

-

40) 

-

41from debputy.lsp.lsp_features import ( 

-

42 lint_diagnostics, 

-

43 lsp_completer, 

-

44 lsp_hover, 

-

45 lsp_standard_handler, 

-

46 lsp_folding_ranges, 

-

47 lsp_semantic_tokens_full, 

-

48) 

-

49from debputy.lsp.lsp_generic_deb822 import ( 

-

50 deb822_completer, 

-

51 deb822_hover, 

-

52 deb822_folding_ranges, 

-

53 deb822_semantic_tokens_full, 

-

54) 

-

55from debputy.lsp.quickfixes import ( 

-

56 propose_correct_text_quick_fix, 

-

57) 

-

58from debputy.lsp.spellchecking import default_spellchecker 

-

59from debputy.lsp.text_util import ( 

-

60 normalize_dctrl_field_name, 

-

61 LintCapablePositionCodec, 

-

62 detect_possible_typo, 

-

63 te_range_to_lsp, 

-

64) 

-

65from debputy.lsp.vendoring._deb822_repro import ( 

-

66 parse_deb822_file, 

-

67 Deb822FileElement, 

-

68 Deb822ParagraphElement, 

-

69) 

-

70from debputy.lsp.vendoring._deb822_repro.parsing import ( 

-

71 Deb822KeyValuePairElement, 

-

72 LIST_SPACE_SEPARATED_INTERPRETATION, 

-

73) 

-

74from debputy.lsp.vendoring._deb822_repro.tokens import ( 

-

75 Deb822Token, 

-

76) 

-

77 

-

78try: 

-

79 from debputy.lsp.vendoring._deb822_repro.locatable import ( 

-

80 Position as TEPosition, 

-

81 Range as TERange, 

-

82 START_POSITION, 

-

83 ) 

-

84 

-

85 from pygls.server import LanguageServer 

-

86 from pygls.workspace import TextDocument 

-

87except ImportError: 

-

88 pass 

-

89 

-

90 

-

91_CONTAINS_SPACE_OR_COLON = re.compile(r"[\s:]") 

-

92_LANGUAGE_IDS = [ 

-

93 "debian/copyright", 

-

94 # emacs's name 

-

95 "debian-copyright", 

-

96 # vim's name 

-

97 "debcopyright", 

-

98] 

-

99 

-

100_DEP5_FILE_METADATA = Dep5FileMetadata() 

-

101 

-

102lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_CODE_ACTION) 

-

103lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL) 

-

104 

-

105 

-

106@lsp_hover(_LANGUAGE_IDS) 

-

107def _debian_copyright_hover( 

-

108 ls: "LanguageServer", 

-

109 params: HoverParams, 

-

110) -> Optional[Hover]: 

-

111 return deb822_hover(ls, params, _DEP5_FILE_METADATA) 

-

112 

-

113 

-

114@lsp_completer(_LANGUAGE_IDS) 

-

115def _debian_copyright_completions( 

-

116 ls: "LanguageServer", 

-

117 params: CompletionParams, 

-

118) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]: 

-

119 return deb822_completer(ls, params, _DEP5_FILE_METADATA) 

-

120 

-

121 

-

122@lsp_folding_ranges(_LANGUAGE_IDS) 

-

123def _debian_copyright_folding_ranges( 

-

124 ls: "LanguageServer", 

-

125 params: FoldingRangeParams, 

-

126) -> Optional[Sequence[FoldingRange]]: 

-

127 return deb822_folding_ranges(ls, params, _DEP5_FILE_METADATA) 

-

128 

-

129 

-

130def _deb822_token_iter( 

-

131 tokens: Iterable[Deb822Token], 

-

132) -> Iterator[Tuple[Deb822Token, int, int, int, int, int]]: 

-

133 line_no = 0 

-

134 line_offset = 0 

-

135 

-

136 for token in tokens: 

-

137 start_line = line_no 

-

138 start_line_offset = line_offset 

-

139 

-

140 newlines = token.text.count("\n") 

-

141 line_no += newlines 

-

142 text_len = len(token.text) 

-

143 if newlines: 

-

144 if token.text.endswith("\n"): 

-

145 line_offset = 0 

-

146 else: 

-

147 # -2, one to remove the "\n" and one to get 0-offset 

-

148 line_offset = text_len - token.text.rindex("\n") - 2 

-

149 else: 

-

150 line_offset += text_len 

-

151 

-

152 yield token, start_line, start_line_offset, line_no, line_offset 

-

153 

-

154 

-

155def _paragraph_representation_field( 

-

156 paragraph: Deb822ParagraphElement, 

-

157) -> Deb822KeyValuePairElement: 

-

158 return next(iter(paragraph.iter_parts_of_type(Deb822KeyValuePairElement))) 

-

159 

-

160 

-

161def _diagnostics_for_paragraph( 

-

162 stanza: Deb822ParagraphElement, 

-

163 stanza_position: "TEPosition", 

-

164 known_fields: Mapping[str, Deb822KnownField], 

-

165 other_known_fields: Mapping[str, Deb822KnownField], 

-

166 is_files_or_license_paragraph: bool, 

-

167 doc_reference: str, 

-

168 position_codec: "LintCapablePositionCodec", 

-

169 lines: List[str], 

-

170 diagnostics: List[Diagnostic], 

-

171) -> None: 

-

172 representation_field = _paragraph_representation_field(stanza) 

-

173 representation_field_pos = representation_field.position_in_parent().relative_to( 

-

174 stanza_position 

-

175 ) 

-

176 representation_field_range_server_units = te_range_to_lsp( 

-

177 TERange.from_position_and_size( 

-

178 representation_field_pos, representation_field.size() 

-

179 ) 

-

180 ) 

-

181 representation_field_range = position_codec.range_to_client_units( 

-

182 lines, 

-

183 representation_field_range_server_units, 

-

184 ) 

-

185 for known_field in known_fields.values(): 

-

186 missing_field_severity = known_field.missing_field_severity 

-

187 if missing_field_severity is None or known_field.name in stanza: 

-

188 continue 

-

189 

-

190 diagnostics.append( 

-

191 Diagnostic( 

-

192 representation_field_range, 

-

193 f"Stanza is missing field {known_field.name}", 

-

194 severity=missing_field_severity, 

-

195 source="debputy", 

-

196 ) 

-

197 ) 

-

198 

-

199 seen_fields = {} 

-

200 

-

201 for kvpair in stanza.iter_parts_of_type(Deb822KeyValuePairElement): 

-

202 field_name_token = kvpair.field_token 

-

203 field_name = field_name_token.text 

-

204 field_name_lc = field_name.lower() 

-

205 normalized_field_name_lc = normalize_dctrl_field_name(field_name_lc) 

-

206 known_field = known_fields.get(normalized_field_name_lc) 

-

207 field_value = stanza[field_name] 

-

208 field_range_te = kvpair.range_in_parent().relative_to(stanza_position) 

-

209 field_position_te = field_range_te.start_pos 

-

210 field_range_server_units = te_range_to_lsp(field_range_te) 

-

211 field_range = position_codec.range_to_client_units( 

-

212 lines, 

-

213 field_range_server_units, 

-

214 ) 

-

215 field_name_typo_detected = False 

-

216 existing_field_range = seen_fields.get(normalized_field_name_lc) 

-

217 if existing_field_range is not None: 

-

218 existing_field_range[3].append(field_range) 

-

219 else: 

-

220 normalized_field_name = normalize_dctrl_field_name(field_name) 

-

221 seen_fields[field_name_lc] = ( 

-

222 field_name, 

-

223 normalized_field_name, 

-

224 field_range, 

-

225 [], 

-

226 ) 

-

227 

-

228 if known_field is None: 

-

229 candidates = detect_possible_typo(normalized_field_name_lc, known_fields) 

-

230 if candidates: 

-

231 known_field = known_fields[candidates[0]] 

-

232 token_range_server_units = te_range_to_lsp( 

-

233 TERange.from_position_and_size( 

-

234 field_position_te, kvpair.field_token.size() 

-

235 ) 

-

236 ) 

-

237 field_range = position_codec.range_to_client_units( 

-

238 lines, 

-

239 token_range_server_units, 

-

240 ) 

-

241 field_name_typo_detected = True 

-

242 diagnostics.append( 

-

243 Diagnostic( 

-

244 field_range, 

-

245 f'The "{field_name}" looks like a typo of "{known_field.name}".', 

-

246 severity=DiagnosticSeverity.Warning, 

-

247 source="debputy", 

-

248 data=[ 

-

249 propose_correct_text_quick_fix(known_fields[m].name) 

-

250 for m in candidates 

-

251 ], 

-

252 ) 

-

253 ) 

-

254 if known_field is None: 

-

255 known_else_where = other_known_fields.get(normalized_field_name_lc) 

-

256 if known_else_where is not None: 

-

257 intended_usage = ( 

-

258 "Header" if is_files_or_license_paragraph else "Files/License" 

-

259 ) 

-

260 diagnostics.append( 

-

261 Diagnostic( 

-

262 field_range, 

-

263 f'The {field_name} is defined for use in the "{intended_usage}" stanza.' 

-

264 f" Please move it to the right place or remove it", 

-

265 severity=DiagnosticSeverity.Error, 

-

266 source="debputy", 

-

267 ) 

-

268 ) 

-

269 continue 

-

270 

-

271 if field_value.strip() == "": 

-

272 diagnostics.append( 

-

273 Diagnostic( 

-

274 field_range, 

-

275 f"The {field_name} has no value. Either provide a value or remove it.", 

-

276 severity=DiagnosticSeverity.Error, 

-

277 source="debputy", 

-

278 ) 

-

279 ) 

-

280 continue 

-

281 diagnostics.extend( 

-

282 known_field.field_diagnostics( 

-

283 kvpair, 

-

284 stanza, 

-

285 stanza_position, 

-

286 position_codec, 

-

287 lines, 

-

288 field_name_typo_reported=field_name_typo_detected, 

-

289 ) 

-

290 ) 

-

291 if known_field.spellcheck_value: 

-

292 words = kvpair.interpret_as(LIST_SPACE_SEPARATED_INTERPRETATION) 

-

293 spell_checker = default_spellchecker() 

-

294 value_position = kvpair.value_element.position_in_parent().relative_to( 

-

295 field_position_te 

-

296 ) 

-

297 for word_ref in words.iter_value_references(): 

-

298 token = word_ref.value 

-

299 for word, pos, endpos in spell_checker.iter_words(token): 

-

300 corrections = spell_checker.provide_corrections_for(word) 

-

301 if not corrections: 

-

302 continue 

-

303 word_loc = word_ref.locatable 

-

304 word_pos_te = word_loc.position_in_parent().relative_to( 

-

305 value_position 

-

306 ) 

-

307 if pos: 

-

308 word_pos_te = TEPosition(0, pos).relative_to(word_pos_te) 

-

309 word_range = TERange( 

-

310 START_POSITION, 

-

311 TEPosition(0, endpos - pos), 

-

312 ) 

-

313 word_range_server_units = te_range_to_lsp( 

-

314 TERange.from_position_and_size(word_pos_te, word_range) 

-

315 ) 

-

316 word_range = position_codec.range_to_client_units( 

-

317 lines, 

-

318 word_range_server_units, 

-

319 ) 

-

320 diagnostics.append( 

-

321 Diagnostic( 

-

322 word_range, 

-

323 f'Spelling "{word}"', 

-

324 severity=DiagnosticSeverity.Hint, 

-

325 source="debputy", 

-

326 data=[ 

-

327 propose_correct_text_quick_fix(c) for c in corrections 

-

328 ], 

-

329 ) 

-

330 ) 

-

331 if known_field.warn_if_default and field_value == known_field.default_value: 

-

332 diagnostics.append( 

-

333 Diagnostic( 

-

334 field_range, 

-

335 f"The {field_name} is redundant as it is set to the default value and the field should only be" 

-

336 " used in exceptional cases.", 

-

337 severity=DiagnosticSeverity.Warning, 

-

338 source="debputy", 

-

339 ) 

-

340 ) 

-

341 for ( 

-

342 field_name, 

-

343 normalized_field_name, 

-

344 field_range, 

-

345 duplicates, 

-

346 ) in seen_fields.values(): 

-

347 if not duplicates: 

-

348 continue 

-

349 related_information = [ 

-

350 DiagnosticRelatedInformation( 

-

351 location=Location(doc_reference, field_range), 

-

352 message=f"First definition of {field_name}", 

-

353 ) 

-

354 ] 

-

355 related_information.extend( 

-

356 DiagnosticRelatedInformation( 

-

357 location=Location(doc_reference, r), 

-

358 message=f"Duplicate of {field_name}", 

-

359 ) 

-

360 for r in duplicates 

-

361 ) 

-

362 for dup_range in duplicates: 

-

363 diagnostics.append( 

-

364 Diagnostic( 

-

365 dup_range, 

-

366 f"The {normalized_field_name} field name was used multiple times in this stanza." 

-

367 f" Please ensure the field is only used once per stanza. Note that {normalized_field_name} and" 

-

368 f" X[BCS]-{normalized_field_name} are considered the same field.", 

-

369 severity=DiagnosticSeverity.Error, 

-

370 source="debputy", 

-

371 related_information=related_information, 

-

372 ) 

-

373 ) 

-

374 

-

375 

-

376def _scan_for_syntax_errors_and_token_level_diagnostics( 

-

377 deb822_file: Deb822FileElement, 

-

378 position_codec: LintCapablePositionCodec, 

-

379 lines: List[str], 

-

380 diagnostics: List[Diagnostic], 

-

381) -> int: 

-

382 first_error = len(lines) + 1 

-

383 spell_checker = default_spellchecker() 

-

384 for ( 

-

385 token, 

-

386 start_line, 

-

387 start_offset, 

-

388 end_line, 

-

389 end_offset, 

-

390 ) in _deb822_token_iter(deb822_file.iter_tokens()): 

-

391 if token.is_error: 

-

392 first_error = min(first_error, start_line) 

-

393 start_pos = Position( 

-

394 start_line, 

-

395 start_offset, 

-

396 ) 

-

397 end_pos = Position( 

-

398 end_line, 

-

399 end_offset, 

-

400 ) 

-

401 token_range = position_codec.range_to_client_units( 

-

402 lines, Range(start_pos, end_pos) 

-

403 ) 

-

404 diagnostics.append( 

-

405 Diagnostic( 

-

406 token_range, 

-

407 "Syntax error", 

-

408 severity=DiagnosticSeverity.Error, 

-

409 source="debputy (python-debian parser)", 

-

410 ) 

-

411 ) 

-

412 elif token.is_comment: 

-

413 for word, pos, end_pos in spell_checker.iter_words(token.text): 

-

414 corrections = spell_checker.provide_corrections_for(word) 

-

415 if not corrections: 

-

416 continue 

-

417 start_pos = Position( 

-

418 start_line, 

-

419 pos, 

-

420 ) 

-

421 end_pos = Position( 

-

422 start_line, 

-

423 end_pos, 

-

424 ) 

-

425 word_range = position_codec.range_to_client_units( 

-

426 lines, Range(start_pos, end_pos) 

-

427 ) 

-

428 diagnostics.append( 

-

429 Diagnostic( 

-

430 word_range, 

-

431 f'Spelling "{word}"', 

-

432 severity=DiagnosticSeverity.Hint, 

-

433 source="debputy", 

-

434 data=[propose_correct_text_quick_fix(c) for c in corrections], 

-

435 ) 

-

436 ) 

-

437 return first_error 

-

438 

-

439 

-

440@lint_diagnostics(_LANGUAGE_IDS) 

-

441def _lint_debian_copyright( 

-

442 lint_state: LintState, 

-

443) -> Optional[List[Diagnostic]]: 

-

444 lines = lint_state.lines 

-

445 position_codec = lint_state.position_codec 

-

446 doc_reference = lint_state.doc_uri 

-

447 diagnostics = [] 

-

448 deb822_file = parse_deb822_file( 

-

449 lines, 

-

450 accept_files_with_duplicated_fields=True, 

-

451 accept_files_with_error_tokens=True, 

-

452 ) 

-

453 

-

454 first_error = _scan_for_syntax_errors_and_token_level_diagnostics( 

-

455 deb822_file, 

-

456 position_codec, 

-

457 lines, 

-

458 diagnostics, 

-

459 ) 

-

460 

-

461 paragraphs = list(deb822_file) 

-

462 is_dep5 = False 

-

463 

-

464 for paragraph_no, paragraph in enumerate(paragraphs, start=1): 

-

465 paragraph_pos = paragraph.position_in_file() 

-

466 if paragraph_pos.line_position >= first_error: 

-

467 break 

-

468 is_files_or_license_paragraph = paragraph_no != 1 

-

469 if is_files_or_license_paragraph: 

-

470 known_fields = ( 

-

471 _DEP5_FILES_FIELDS if "Files" in paragraph else _DEP5_LICENSE_FIELDS 

-

472 ) 

-

473 other_known_fields = _DEP5_HEADER_FIELDS 

-

474 elif "Format" in paragraph: 

-

475 is_dep5 = True 

-

476 known_fields = _DEP5_HEADER_FIELDS 

-

477 other_known_fields = _DEP5_FILES_FIELDS 

-

478 else: 

-

479 break 

-

480 _diagnostics_for_paragraph( 

-

481 paragraph, 

-

482 paragraph_pos, 

-

483 known_fields, 

-

484 other_known_fields, 

-

485 is_files_or_license_paragraph, 

-

486 doc_reference, 

-

487 position_codec, 

-

488 lines, 

-

489 diagnostics, 

-

490 ) 

-

491 if not is_dep5: 

-

492 return None 

-

493 return diagnostics 

-

494 

-

495 

-

496@lsp_semantic_tokens_full(_LANGUAGE_IDS) 

-

497def _semantic_tokens_full( 

-

498 ls: "LanguageServer", 

-

499 request: SemanticTokensParams, 

-

500) -> Optional[SemanticTokens]: 

-

501 return deb822_semantic_tokens_full( 

-

502 ls, 

-

503 request, 

-

504 _DEP5_FILE_METADATA, 

-

505 ) 

-
- - - diff --git a/coverage-report/d_5d0ec0d5422112df_lsp_debian_debputy_manifest_py.html b/coverage-report/d_5d0ec0d5422112df_lsp_debian_debputy_manifest_py.html deleted file mode 100644 index d21b364..0000000 --- a/coverage-report/d_5d0ec0d5422112df_lsp_debian_debputy_manifest_py.html +++ /dev/null @@ -1,1134 +0,0 @@ - - - - - Coverage for src/debputy/lsp/lsp_debian_debputy_manifest.py: 77% - - - - - -
-
-

- Coverage for src/debputy/lsp/lsp_debian_debputy_manifest.py: - 77% -

- -

- 467 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from typing import ( 

-

2 Optional, 

-

3 List, 

-

4 Any, 

-

5 Tuple, 

-

6 Union, 

-

7 Iterable, 

-

8 Sequence, 

-

9 Literal, 

-

10 get_args, 

-

11 get_origin, 

-

12) 

-

13 

-

14from lsprotocol.types import ( 

-

15 Diagnostic, 

-

16 TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL, 

-

17 Position, 

-

18 Range, 

-

19 DiagnosticSeverity, 

-

20 HoverParams, 

-

21 Hover, 

-

22 MarkupKind, 

-

23 MarkupContent, 

-

24 TEXT_DOCUMENT_CODE_ACTION, 

-

25 CompletionParams, 

-

26 CompletionList, 

-

27 CompletionItem, 

-

28 DiagnosticRelatedInformation, 

-

29 Location, 

-

30) 

-

31 

-

32from debputy.linting.lint_util import LintState 

-

33from debputy.lsp.quickfixes import propose_correct_text_quick_fix 

-

34from debputy.manifest_parser.base_types import DebputyDispatchableType 

-

35from debputy.plugin.api.feature_set import PluginProvidedFeatureSet 

-

36from debputy.yaml.compat import ( 

-

37 Node, 

-

38 CommentedMap, 

-

39 LineCol, 

-

40 CommentedSeq, 

-

41 CommentedBase, 

-

42 MarkedYAMLError, 

-

43 YAMLError, 

-

44) 

-

45 

-

46from debputy.highlevel_manifest import MANIFEST_YAML 

-

47from debputy.lsp.lsp_features import ( 

-

48 lint_diagnostics, 

-

49 lsp_standard_handler, 

-

50 lsp_hover, 

-

51 lsp_completer, 

-

52) 

-

53from debputy.lsp.text_util import ( 

-

54 LintCapablePositionCodec, 

-

55 detect_possible_typo, 

-

56) 

-

57from debputy.manifest_parser.declarative_parser import ( 

-

58 AttributeDescription, 

-

59 ParserGenerator, 

-

60 DeclarativeNonMappingInputParser, 

-

61) 

-

62from debputy.manifest_parser.declarative_parser import DeclarativeMappingInputParser 

-

63from debputy.manifest_parser.parser_doc import ( 

-

64 render_rule, 

-

65 render_attribute_doc, 

-

66 doc_args_for_parser_doc, 

-

67) 

-

68from debputy.manifest_parser.util import AttributePath 

-

69from debputy.plugin.api.impl import plugin_metadata_for_debputys_own_plugin 

-

70from debputy.plugin.api.impl_types import ( 

-

71 OPARSER_MANIFEST_ROOT, 

-

72 DeclarativeInputParser, 

-

73 DispatchingParserBase, 

-

74 DebputyPluginMetadata, 

-

75 ListWrappedDeclarativeInputParser, 

-

76 InPackageContextParser, 

-

77 DeclarativeValuelessKeywordInputParser, 

-

78) 

-

79from debputy.util import _info, _warn 

-

80 

-

81 

-

82try: 

-

83 from pygls.server import LanguageServer 

-

84 from debputy.lsp.debputy_ls import DebputyLanguageServer 

-

85except ImportError: 

-

86 pass 

-

87 

-

88 

-

89_LANGUAGE_IDS = [ 

-

90 "debian/debputy.manifest", 

-

91 "debputy.manifest", 

-

92 # LSP's official language ID for YAML files 

-

93 "yaml", 

-

94] 

-

95 

-

96 

-

97lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_CODE_ACTION) 

-

98lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL) 

-

99 

-

100 

-

101def is_valid_file(path: str) -> bool: 

-

102 # For debian/debputy.manifest, the language ID is often set to makefile meaning we get random 

-

103 # "non-debian/debputy.manifest" YAML files here. Skip those. 

-

104 return path.endswith("debian/debputy.manifest") 

-

105 

-

106 

-

107def _word_range_at_position( 

-

108 lines: List[str], 

-

109 line_no: int, 

-

110 char_offset: int, 

-

111) -> Range: 

-

112 line = lines[line_no] 

-

113 line_len = len(line) 

-

114 start_idx = char_offset 

-

115 end_idx = char_offset 

-

116 while end_idx + 1 < line_len and not line[end_idx + 1].isspace(): 

-

117 end_idx += 1 

-

118 

-

119 while start_idx - 1 >= 0 and not line[start_idx - 1].isspace(): 

-

120 start_idx -= 1 

-

121 

-

122 return Range( 

-

123 Position(line_no, start_idx), 

-

124 Position(line_no, end_idx), 

-

125 ) 

-

126 

-

127 

-

128@lint_diagnostics(_LANGUAGE_IDS) 

-

129def _lint_debian_debputy_manifest( 

-

130 lint_state: LintState, 

-

131) -> Optional[List[Diagnostic]]: 

-

132 lines = lint_state.lines 

-

133 position_codec = lint_state.position_codec 

-

134 doc_reference = lint_state.doc_uri 

-

135 path = lint_state.path 

-

136 if not is_valid_file(path): 136 ↛ 137line 136 didn't jump to line 137, because the condition on line 136 was never true

-

137 return None 

-

138 diagnostics = [] 

-

139 try: 

-

140 content = MANIFEST_YAML.load("".join(lines)) 

-

141 except MarkedYAMLError as e: 

-

142 if e.context_mark: 

-

143 line = e.context_mark.line 

-

144 column = e.context_mark.column + 1 

-

145 else: 

-

146 line = e.problem_mark.line 

-

147 column = e.problem_mark.column + 1 

-

148 error_range = position_codec.range_to_client_units( 

-

149 lines, 

-

150 _word_range_at_position( 

-

151 lines, 

-

152 line, 

-

153 column, 

-

154 ), 

-

155 ) 

-

156 diagnostics.append( 

-

157 Diagnostic( 

-

158 error_range, 

-

159 f"YAML parse error: {e}", 

-

160 DiagnosticSeverity.Error, 

-

161 ), 

-

162 ) 

-

163 except YAMLError as e: 

-

164 error_range = position_codec.range_to_client_units( 

-

165 lines, 

-

166 Range( 

-

167 Position(0, 0), 

-

168 Position(0, len(lines[0])), 

-

169 ), 

-

170 ) 

-

171 diagnostics.append( 

-

172 Diagnostic( 

-

173 error_range, 

-

174 f"Unknown YAML parse error: {e} [{e!r}]", 

-

175 DiagnosticSeverity.Error, 

-

176 ), 

-

177 ) 

-

178 else: 

-

179 feature_set = lint_state.plugin_feature_set 

-

180 pg = feature_set.manifest_parser_generator 

-

181 root_parser = pg.dispatchable_object_parsers[OPARSER_MANIFEST_ROOT] 

-

182 diagnostics.extend( 

-

183 _lint_content( 

-

184 doc_reference, 

-

185 pg, 

-

186 root_parser, 

-

187 content, 

-

188 lines, 

-

189 position_codec, 

-

190 ) 

-

191 ) 

-

192 return diagnostics 

-

193 

-

194 

-

195def _unknown_key( 

-

196 key: str, 

-

197 expected_keys: Iterable[str], 

-

198 line: int, 

-

199 col: int, 

-

200 lines: List[str], 

-

201 position_codec: LintCapablePositionCodec, 

-

202) -> Tuple["Diagnostic", Optional[str]]: 

-

203 key_range = position_codec.range_to_client_units( 

-

204 lines, 

-

205 Range( 

-

206 Position( 

-

207 line, 

-

208 col, 

-

209 ), 

-

210 Position( 

-

211 line, 

-

212 col + len(key), 

-

213 ), 

-

214 ), 

-

215 ) 

-

216 

-

217 candidates = detect_possible_typo(key, expected_keys) 

-

218 extra = "" 

-

219 corrected_key = None 

-

220 if candidates: 

-

221 extra = f' It looks like a typo of "{candidates[0]}".' 

-

222 # TODO: We should be able to tell that `install-doc` and `install-docs` are the same. 

-

223 # That would enable this to work in more cases. 

-

224 corrected_key = candidates[0] if len(candidates) == 1 else None 

-

225 

-

226 diagnostic = Diagnostic( 

-

227 key_range, 

-

228 f'Unknown or unsupported key "{key}".{extra}', 

-

229 DiagnosticSeverity.Error, 

-

230 source="debputy", 

-

231 data=[propose_correct_text_quick_fix(n) for n in candidates], 

-

232 ) 

-

233 return diagnostic, corrected_key 

-

234 

-

235 

-

236def _conflicting_key( 

-

237 uri: str, 

-

238 key_a: str, 

-

239 key_b: str, 

-

240 key_a_line: int, 

-

241 key_a_col: int, 

-

242 key_b_line: int, 

-

243 key_b_col: int, 

-

244 lines: List[str], 

-

245 position_codec: LintCapablePositionCodec, 

-

246) -> Iterable["Diagnostic"]: 

-

247 key_a_range = position_codec.range_to_client_units( 

-

248 lines, 

-

249 Range( 

-

250 Position( 

-

251 key_a_line, 

-

252 key_a_col, 

-

253 ), 

-

254 Position( 

-

255 key_a_line, 

-

256 key_a_col + len(key_a), 

-

257 ), 

-

258 ), 

-

259 ) 

-

260 key_b_range = position_codec.range_to_client_units( 

-

261 lines, 

-

262 Range( 

-

263 Position( 

-

264 key_b_line, 

-

265 key_b_col, 

-

266 ), 

-

267 Position( 

-

268 key_b_line, 

-

269 key_b_col + len(key_b), 

-

270 ), 

-

271 ), 

-

272 ) 

-

273 yield Diagnostic( 

-

274 key_a_range, 

-

275 f'The "{key_a}" cannot be used with "{key_b}".', 

-

276 DiagnosticSeverity.Error, 

-

277 source="debputy", 

-

278 related_information=[ 

-

279 DiagnosticRelatedInformation( 

-

280 location=Location( 

-

281 uri, 

-

282 key_b_range, 

-

283 ), 

-

284 message=f'The attribute "{key_b}" is used here.', 

-

285 ) 

-

286 ], 

-

287 ) 

-

288 

-

289 yield Diagnostic( 

-

290 key_b_range, 

-

291 f'The "{key_b}" cannot be used with "{key_a}".', 

-

292 DiagnosticSeverity.Error, 

-

293 source="debputy", 

-

294 related_information=[ 

-

295 DiagnosticRelatedInformation( 

-

296 location=Location( 

-

297 uri, 

-

298 key_a_range, 

-

299 ), 

-

300 message=f'The attribute "{key_a}" is used here.', 

-

301 ) 

-

302 ], 

-

303 ) 

-

304 

-

305 

-

306def _lint_attr_value( 

-

307 uri: str, 

-

308 attr: AttributeDescription, 

-

309 pg: ParserGenerator, 

-

310 value: Any, 

-

311 lines: List[str], 

-

312 position_codec: LintCapablePositionCodec, 

-

313) -> Iterable["Diagnostic"]: 

-

314 attr_type = attr.attribute_type 

-

315 orig = get_origin(attr_type) 

-

316 valid_values: Sequence[Any] = tuple() 

-

317 if orig == Literal: 317 ↛ 318line 317 didn't jump to line 318, because the condition on line 317 was never true

-

318 valid_values = get_args(attr.attribute_type) 

-

319 elif orig == bool or attr.attribute_type == bool: 319 ↛ 320line 319 didn't jump to line 320, because the condition on line 319 was never true

-

320 valid_values = ("true", "false") 

-

321 elif isinstance(attr_type, type) and issubclass(attr_type, DebputyDispatchableType): 

-

322 parser = pg.dispatch_parser_table_for(attr_type) 

-

323 yield from _lint_content( 

-

324 uri, 

-

325 pg, 

-

326 parser, 

-

327 value, 

-

328 lines, 

-

329 position_codec, 

-

330 ) 

-

331 return 

-

332 

-

333 if value in valid_values: 333 ↛ 334line 333 didn't jump to line 334, because the condition on line 333 was never true

-

334 return 

-

335 # TODO: Emit diagnostic for broken values 

-

336 return 

-

337 

-

338 

-

339def _lint_declarative_mapping_input_parser( 

-

340 uri: str, 

-

341 pg: ParserGenerator, 

-

342 parser: DeclarativeMappingInputParser, 

-

343 content: Any, 

-

344 lines: List[str], 

-

345 position_codec: LintCapablePositionCodec, 

-

346) -> Iterable["Diagnostic"]: 

-

347 if not isinstance(content, CommentedMap): 

-

348 return 

-

349 lc = content.lc 

-

350 for key, value in content.items(): 

-

351 attr = parser.manifest_attributes.get(key) 

-

352 line, col = lc.key(key) 

-

353 if attr is None: 

-

354 diag, corrected_key = _unknown_key( 

-

355 key, 

-

356 parser.manifest_attributes, 

-

357 line, 

-

358 col, 

-

359 lines, 

-

360 position_codec, 

-

361 ) 

-

362 yield diag 

-

363 if corrected_key: 363 ↛ 364line 363 didn't jump to line 364, because the condition on line 363 was never true

-

364 key = corrected_key 

-

365 attr = parser.manifest_attributes.get(corrected_key) 

-

366 if attr is None: 

-

367 continue 

-

368 

-

369 yield from _lint_attr_value( 

-

370 uri, 

-

371 attr, 

-

372 pg, 

-

373 value, 

-

374 lines, 

-

375 position_codec, 

-

376 ) 

-

377 

-

378 for forbidden_key in attr.conflicting_attributes: 

-

379 if forbidden_key in content: 

-

380 con_line, con_col = lc.key(forbidden_key) 

-

381 yield from _conflicting_key( 

-

382 uri, 

-

383 key, 

-

384 forbidden_key, 

-

385 line, 

-

386 col, 

-

387 con_line, 

-

388 con_col, 

-

389 lines, 

-

390 position_codec, 

-

391 ) 

-

392 for mx in parser.mutually_exclusive_attributes: 

-

393 matches = content.keys() & mx 

-

394 if len(matches) < 2: 

-

395 continue 

-

396 key, *others = list(matches) 

-

397 line, col = lc.key(key) 

-

398 for other in others: 

-

399 con_line, con_col = lc.key(other) 

-

400 yield from _conflicting_key( 

-

401 uri, 

-

402 key, 

-

403 other, 

-

404 line, 

-

405 col, 

-

406 con_line, 

-

407 con_col, 

-

408 lines, 

-

409 position_codec, 

-

410 ) 

-

411 

-

412 

-

413def _lint_content( 

-

414 uri: str, 

-

415 pg: ParserGenerator, 

-

416 parser: DeclarativeInputParser[Any], 

-

417 content: Any, 

-

418 lines: List[str], 

-

419 position_codec: LintCapablePositionCodec, 

-

420) -> Iterable["Diagnostic"]: 

-

421 if isinstance(parser, DispatchingParserBase): 

-

422 if not isinstance(content, CommentedMap): 

-

423 return 

-

424 lc = content.lc 

-

425 for key, value in content.items(): 

-

426 is_known = parser.is_known_keyword(key) 

-

427 if not is_known: 

-

428 line, col = lc.key(key) 

-

429 diag, corrected_key = _unknown_key( 

-

430 key, 

-

431 parser.registered_keywords(), 

-

432 line, 

-

433 col, 

-

434 lines, 

-

435 position_codec, 

-

436 ) 

-

437 yield diag 

-

438 if corrected_key is not None: 

-

439 key = corrected_key 

-

440 is_known = True 

-

441 

-

442 if is_known: 

-

443 subparser = parser.parser_for(key) 

-

444 assert subparser is not None 

-

445 yield from _lint_content( 

-

446 uri, 

-

447 pg, 

-

448 subparser.parser, 

-

449 value, 

-

450 lines, 

-

451 position_codec, 

-

452 ) 

-

453 elif isinstance(parser, ListWrappedDeclarativeInputParser): 

-

454 if not isinstance(content, CommentedSeq): 454 ↛ 455line 454 didn't jump to line 455, because the condition on line 454 was never true

-

455 return 

-

456 subparser = parser.delegate 

-

457 for value in content: 

-

458 yield from _lint_content(uri, pg, subparser, value, lines, position_codec) 

-

459 elif isinstance(parser, InPackageContextParser): 

-

460 if not isinstance(content, CommentedMap): 460 ↛ 461line 460 didn't jump to line 461, because the condition on line 460 was never true

-

461 return 

-

462 for v in content.values(): 

-

463 yield from _lint_content(uri, pg, parser.delegate, v, lines, position_codec) 

-

464 elif isinstance(parser, DeclarativeMappingInputParser): 

-

465 yield from _lint_declarative_mapping_input_parser( 

-

466 uri, 

-

467 pg, 

-

468 parser, 

-

469 content, 

-

470 lines, 

-

471 position_codec, 

-

472 ) 

-

473 

-

474 

-

475def is_at(position: Position, lc_pos: Tuple[int, int]) -> bool: 

-

476 return position.line == lc_pos[0] and position.character == lc_pos[1] 

-

477 

-

478 

-

479def is_before(position: Position, lc_pos: Tuple[int, int]) -> bool: 

-

480 line, column = lc_pos 

-

481 if position.line < line: 

-

482 return True 

-

483 if position.line == line and position.character < column: 

-

484 return True 

-

485 return False 

-

486 

-

487 

-

488def is_after(position: Position, lc_pos: Tuple[int, int]) -> bool: 

-

489 line, column = lc_pos 

-

490 if position.line > line: 

-

491 return True 

-

492 if position.line == line and position.character > column: 

-

493 return True 

-

494 return False 

-

495 

-

496 

-

497def _trace_cursor( 

-

498 content: Any, 

-

499 attribute_path: AttributePath, 

-

500 server_position: Position, 

-

501) -> Optional[Tuple[bool, AttributePath, Any, Any]]: 

-

502 matched_key: Optional[Union[str, int]] = None 

-

503 matched: Optional[Node] = None 

-

504 matched_was_key: bool = False 

-

505 

-

506 if isinstance(content, CommentedMap): 

-

507 dict_lc: LineCol = content.lc 

-

508 for k, v in content.items(): 

-

509 k_lc = dict_lc.key(k) 

-

510 if is_before(server_position, k_lc): 510 ↛ 511line 510 didn't jump to line 511, because the condition on line 510 was never true

-

511 break 

-

512 v_lc = dict_lc.value(k) 

-

513 if is_before(server_position, v_lc): 

-

514 # TODO: Handle ":" and "whitespace" 

-

515 matched = k 

-

516 matched_key = k 

-

517 matched_was_key = True 

-

518 break 

-

519 matched = v 

-

520 matched_key = k 

-

521 elif isinstance(content, CommentedSeq): 521 ↛ 530line 521 didn't jump to line 530, because the condition on line 521 was never false

-

522 list_lc: LineCol = content.lc 

-

523 for idx, value in enumerate(content): 

-

524 i_lc = list_lc.item(idx) 

-

525 if is_before(server_position, i_lc): 525 ↛ 526line 525 didn't jump to line 526, because the condition on line 525 was never true

-

526 break 

-

527 matched_key = idx 

-

528 matched = value 

-

529 

-

530 if matched is not None: 530 ↛ 536line 530 didn't jump to line 536, because the condition on line 530 was never false

-

531 assert matched_key is not None 

-

532 sub_path = attribute_path[matched_key] 

-

533 if not matched_was_key and isinstance(matched, CommentedBase): 

-

534 return _trace_cursor(matched, sub_path, server_position) 

-

535 return matched_was_key, sub_path, matched, content 

-

536 return None 

-

537 

-

538 

-

539_COMPLETION_HINT_KEY = "___COMPLETE:" 

-

540_COMPLETION_HINT_VALUE = "___COMPLETE" 

-

541 

-

542 

-

543def resolve_keyword( 

-

544 current_parser: Union[DeclarativeInputParser[Any], DispatchingParserBase], 

-

545 current_plugin: DebputyPluginMetadata, 

-

546 segments: List[Union[str, int]], 

-

547 segment_idx: int, 

-

548 parser_generator: ParserGenerator, 

-

549 *, 

-

550 is_completion_attempt: bool = False, 

-

551) -> Optional[ 

-

552 Tuple[ 

-

553 Union[DeclarativeInputParser[Any], DispatchingParserBase], 

-

554 DebputyPluginMetadata, 

-

555 int, 

-

556 ] 

-

557]: 

-

558 if segment_idx >= len(segments): 

-

559 return current_parser, current_plugin, segment_idx 

-

560 current_segment = segments[segment_idx] 

-

561 if isinstance(current_parser, ListWrappedDeclarativeInputParser): 

-

562 if isinstance(current_segment, int): 562 ↛ 569line 562 didn't jump to line 569, because the condition on line 562 was never false

-

563 current_parser = current_parser.delegate 

-

564 segment_idx += 1 

-

565 if segment_idx >= len(segments): 565 ↛ 566line 565 didn't jump to line 566, because the condition on line 565 was never true

-

566 return current_parser, current_plugin, segment_idx 

-

567 current_segment = segments[segment_idx] 

-

568 

-

569 if not isinstance(current_segment, str): 569 ↛ 570line 569 didn't jump to line 570, because the condition on line 569 was never true

-

570 return None 

-

571 

-

572 if is_completion_attempt and current_segment.endswith( 

-

573 (_COMPLETION_HINT_KEY, _COMPLETION_HINT_VALUE) 

-

574 ): 

-

575 return current_parser, current_plugin, segment_idx 

-

576 

-

577 if isinstance(current_parser, InPackageContextParser): 

-

578 return resolve_keyword( 

-

579 current_parser.delegate, 

-

580 current_plugin, 

-

581 segments, 

-

582 segment_idx + 1, 

-

583 parser_generator, 

-

584 is_completion_attempt=is_completion_attempt, 

-

585 ) 

-

586 elif isinstance(current_parser, DispatchingParserBase): 

-

587 if not current_parser.is_known_keyword(current_segment): 587 ↛ 588line 587 didn't jump to line 588, because the condition on line 587 was never true

-

588 if is_completion_attempt: 

-

589 return current_parser, current_plugin, segment_idx 

-

590 return None 

-

591 subparser = current_parser.parser_for(current_segment) 

-

592 segment_idx += 1 

-

593 if segment_idx < len(segments): 

-

594 return resolve_keyword( 

-

595 subparser.parser, 

-

596 subparser.plugin_metadata, 

-

597 segments, 

-

598 segment_idx, 

-

599 parser_generator, 

-

600 is_completion_attempt=is_completion_attempt, 

-

601 ) 

-

602 return subparser.parser, subparser.plugin_metadata, segment_idx 

-

603 elif isinstance(current_parser, DeclarativeMappingInputParser): 603 ↛ 625line 603 didn't jump to line 625, because the condition on line 603 was never false

-

604 attr = current_parser.manifest_attributes.get(current_segment) 

-

605 attr_type = attr.attribute_type if attr is not None else None 

-

606 if ( 

-

607 attr_type is not None 

-

608 and isinstance(attr_type, type) 

-

609 and issubclass(attr_type, DebputyDispatchableType) 

-

610 ): 

-

611 subparser = parser_generator.dispatch_parser_table_for(attr_type) 

-

612 if subparser is not None and ( 

-

613 is_completion_attempt or segment_idx + 1 < len(segments) 

-

614 ): 

-

615 return resolve_keyword( 

-

616 subparser, 

-

617 current_plugin, 

-

618 segments, 

-

619 segment_idx + 1, 

-

620 parser_generator, 

-

621 is_completion_attempt=is_completion_attempt, 

-

622 ) 

-

623 return current_parser, current_plugin, segment_idx 

-

624 else: 

-

625 _info(f"Unknown parser: {current_parser.__class__}") 

-

626 return None 

-

627 

-

628 

-

629def _render_param_doc( 

-

630 rule_name: str, 

-

631 declarative_parser: DeclarativeMappingInputParser, 

-

632 plugin_metadata: DebputyPluginMetadata, 

-

633 attribute: str, 

-

634) -> Optional[str]: 

-

635 attr = declarative_parser.source_attributes.get(attribute) 

-

636 if attr is None: 636 ↛ 637line 636 didn't jump to line 637, because the condition on line 636 was never true

-

637 return None 

-

638 

-

639 doc_args, parser_doc = doc_args_for_parser_doc( 

-

640 rule_name, 

-

641 declarative_parser, 

-

642 plugin_metadata, 

-

643 ) 

-

644 rendered_docs = render_attribute_doc( 

-

645 declarative_parser, 

-

646 declarative_parser.source_attributes, 

-

647 declarative_parser.input_time_required_parameters, 

-

648 declarative_parser.at_least_one_of, 

-

649 parser_doc, 

-

650 doc_args, 

-

651 is_interactive=True, 

-

652 rule_name=rule_name, 

-

653 ) 

-

654 

-

655 for attributes, rendered_doc in rendered_docs: 655 ↛ 664line 655 didn't jump to line 664, because the loop on line 655 didn't complete

-

656 if attribute in attributes: 

-

657 full_doc = [ 

-

658 f"# Attribute `{attribute}`", 

-

659 "", 

-

660 ] 

-

661 full_doc.extend(rendered_doc) 

-

662 

-

663 return "\n".join(full_doc) 

-

664 return None 

-

665 

-

666 

-

667DEBPUTY_PLUGIN_METADATA = plugin_metadata_for_debputys_own_plugin() 

-

668 

-

669 

-

670def _guess_rule_name(segments: List[Union[str, int]], idx: int) -> str: 

-

671 orig_idx = idx 

-

672 idx -= 1 

-

673 while idx >= 0: 673 ↛ 678line 673 didn't jump to line 678, because the condition on line 673 was never false

-

674 segment = segments[idx] 

-

675 if isinstance(segment, str): 

-

676 return segment 

-

677 idx -= 1 

-

678 _warn(f"Unable to derive rule name from {segments} [{orig_idx}]") 

-

679 return "<Bug: unknown rule name>" 

-

680 

-

681 

-

682def _escape(v: str) -> str: 

-

683 return '"' + v.replace("\n", "\\n") + '"' 

-

684 

-

685 

-

686def _insert_snippet(lines: List[str], server_position: Position) -> bool: 

-

687 _info(f"Complete at {server_position}") 

-

688 line_no = server_position.line 

-

689 line = lines[line_no] 

-

690 pos_rhs = line[server_position.character :] 

-

691 if pos_rhs and not pos_rhs.isspace(): 691 ↛ 692line 691 didn't jump to line 692, because the condition on line 691 was never true

-

692 _info(f"No insertion: {_escape(line[server_position.character:])}") 

-

693 return False 

-

694 lhs_ws = line[: server_position.character] 

-

695 lhs = lhs_ws.strip() 

-

696 if lhs.endswith(":"): 

-

697 _info("Insertion of value (key seen)") 

-

698 new_line = line[: server_position.character] + _COMPLETION_HINT_VALUE 

-

699 elif lhs.startswith("-"): 

-

700 _info("Insertion of key or value (list item)") 

-

701 # Respect the provided indentation 

-

702 snippet = _COMPLETION_HINT_KEY if ":" not in lhs else _COMPLETION_HINT_VALUE 

-

703 new_line = line[: server_position.character] + snippet 

-

704 elif not lhs or (lhs_ws and not lhs_ws[0].isspace()): 

-

705 _info(f"Insertion of key or value: {_escape(line[server_position.character:])}") 

-

706 # Respect the provided indentation 

-

707 snippet = _COMPLETION_HINT_KEY if ":" not in lhs else _COMPLETION_HINT_VALUE 

-

708 new_line = line[: server_position.character] + snippet 

-

709 elif lhs.isalpha() and ":" not in lhs: 

-

710 _info(f"Expanding value to a key: {_escape(line[server_position.character:])}") 

-

711 # Respect the provided indentation 

-

712 new_line = line[: server_position.character] + _COMPLETION_HINT_KEY 

-

713 else: 

-

714 c = line[server_position.character] 

-

715 _info(f"Not touching line: {_escape(line)} -- {_escape(c)}") 

-

716 return False 

-

717 _info(f'Evaluating complete on synthetic line: "{new_line}"') 

-

718 lines[line_no] = new_line 

-

719 return True 

-

720 

-

721 

-

722@lsp_completer(_LANGUAGE_IDS) 

-

723def debputy_manifest_completer( 

-

724 ls: "DebputyLanguageServer", 

-

725 params: CompletionParams, 

-

726) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]: 

-

727 doc = ls.workspace.get_text_document(params.text_document.uri) 

-

728 if not is_valid_file(doc.path): 728 ↛ 729line 728 didn't jump to line 729, because the condition on line 728 was never true

-

729 return None 

-

730 lines = doc.lines 

-

731 server_position = doc.position_codec.position_from_client_units( 

-

732 lines, params.position 

-

733 ) 

-

734 attribute_root_path = AttributePath.root_path() 

-

735 added_key = _insert_snippet(lines, server_position) 

-

736 attempts = 1 if added_key else 2 

-

737 content = None 

-

738 

-

739 while attempts > 0: 739 ↛ 767line 739 didn't jump to line 767, because the condition on line 739 was never false

-

740 attempts -= 1 

-

741 try: 

-

742 content = MANIFEST_YAML.load("".join(lines)) 

-

743 break 

-

744 except MarkedYAMLError as e: 

-

745 context_line = ( 

-

746 e.context_mark.line if e.context_mark else e.problem_mark.line 

-

747 ) 

-

748 if ( 

-

749 e.problem_mark.line != server_position.line 

-

750 and context_line != server_position.line 

-

751 ): 

-

752 l_data = ( 

-

753 lines[e.problem_mark.line].rstrip() 

-

754 if e.problem_mark.line < len(lines) 

-

755 else "N/A (OOB)" 

-

756 ) 

-

757 

-

758 _info(f"Parse error on line: {e.problem_mark.line}: {l_data}") 

-

759 return None 

-

760 

-

761 if attempts > 0: 

-

762 # Try to make it a key and see if that fixes the problem 

-

763 new_line = lines[server_position.line].rstrip() + _COMPLETION_HINT_KEY 

-

764 lines[server_position.line] = new_line 

-

765 except YAMLError: 

-

766 break 

-

767 if content is None: 767 ↛ 768line 767 didn't jump to line 768, because the condition on line 767 was never true

-

768 context = lines[server_position.line].replace("\n", "\\n") 

-

769 _info(f"Completion failed: parse error: Line in question: {context}") 

-

770 return None 

-

771 m = _trace_cursor(content, attribute_root_path, server_position) 

-

772 

-

773 if m is None: 773 ↛ 774line 773 didn't jump to line 774, because the condition on line 773 was never true

-

774 _info("No match") 

-

775 return None 

-

776 matched_key, attr_path, matched, parent = m 

-

777 _info(f"Matched path: {matched} (path: {attr_path.path}) [{matched_key=}]") 

-

778 feature_set = ls.plugin_feature_set 

-

779 root_parser = feature_set.manifest_parser_generator.dispatchable_object_parsers[ 

-

780 OPARSER_MANIFEST_ROOT 

-

781 ] 

-

782 segments = list(attr_path.path_segments()) 

-

783 km = resolve_keyword( 

-

784 root_parser, 

-

785 DEBPUTY_PLUGIN_METADATA, 

-

786 segments, 

-

787 0, 

-

788 feature_set.manifest_parser_generator, 

-

789 is_completion_attempt=True, 

-

790 ) 

-

791 if km is None: 791 ↛ 792line 791 didn't jump to line 792, because the condition on line 791 was never true

-

792 return None 

-

793 parser, _, at_depth_idx = km 

-

794 _info(f"Match leaf parser {at_depth_idx} -- {parser.__class__}") 

-

795 items = [] 

-

796 if at_depth_idx + 1 >= len(segments): 796 ↛ 859line 796 didn't jump to line 859, because the condition on line 796 was never false

-

797 if isinstance(parser, DispatchingParserBase): 

-

798 if matched_key: 

-

799 items = [ 

-

800 CompletionItem(f"{k}:") 

-

801 for k in parser.registered_keywords() 

-

802 if k not in parent 

-

803 and not isinstance( 

-

804 parser.parser_for(k).parser, 

-

805 DeclarativeValuelessKeywordInputParser, 

-

806 ) 

-

807 ] 

-

808 else: 

-

809 items = [ 

-

810 CompletionItem(k) 

-

811 for k in parser.registered_keywords() 

-

812 if k not in parent 

-

813 and isinstance( 

-

814 parser.parser_for(k).parser, 

-

815 DeclarativeValuelessKeywordInputParser, 

-

816 ) 

-

817 ] 

-

818 elif isinstance(parser, InPackageContextParser): 818 ↛ 820line 818 didn't jump to line 820, because the condition on line 818 was never true

-

819 # doc = ls.workspace.get_text_document(params.text_document.uri) 

-

820 _info(f"TODO: Match package - {parent} -- {matched} -- {matched_key=}") 

-

821 elif isinstance(parser, DeclarativeMappingInputParser): 

-

822 if matched_key: 

-

823 _info("Match attributes") 

-

824 locked = set(parent) 

-

825 for mx in parser.mutually_exclusive_attributes: 

-

826 if not mx.isdisjoint(parent.keys()): 

-

827 locked.update(mx) 

-

828 for attr_name, attr in parser.manifest_attributes.items(): 

-

829 if not attr.conflicting_attributes.isdisjoint(parent.keys()): 

-

830 locked.add(attr_name) 

-

831 break 

-

832 items = [ 

-

833 CompletionItem(f"{k}:") 

-

834 for k in parser.manifest_attributes 

-

835 if k not in locked 

-

836 ] 

-

837 else: 

-

838 # Value 

-

839 key = segments[at_depth_idx] if len(segments) > at_depth_idx else None 

-

840 attr = parser.manifest_attributes.get(key) 

-

841 if attr is not None: 841 ↛ 849line 841 didn't jump to line 849, because the condition on line 841 was never false

-

842 _info(f"Expand value / key: {key} -- {attr.attribute_type}") 

-

843 items = _completion_from_attr( 

-

844 attr, 

-

845 feature_set.manifest_parser_generator, 

-

846 matched, 

-

847 ) 

-

848 else: 

-

849 _info( 

-

850 f"Expand value / key: {key} -- !! {list(parser.manifest_attributes)}" 

-

851 ) 

-

852 elif isinstance(parser, DeclarativeNonMappingInputParser): 852 ↛ 859line 852 didn't jump to line 859, because the condition on line 852 was never false

-

853 attr = parser.alt_form_parser 

-

854 items = _completion_from_attr( 

-

855 attr, 

-

856 feature_set.manifest_parser_generator, 

-

857 matched, 

-

858 ) 

-

859 return items 

-

860 

-

861 

-

862def _completion_from_attr( 

-

863 attr: AttributeDescription, 

-

864 pg: ParserGenerator, 

-

865 matched: Any, 

-

866) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]: 

-

867 orig = get_origin(attr.attribute_type) 

-

868 valid_values: Sequence[Any] = tuple() 

-

869 if orig == Literal: 

-

870 valid_values = get_args(attr.attribute_type) 

-

871 elif orig == bool or attr.attribute_type == bool: 871 ↛ 873line 871 didn't jump to line 873, because the condition on line 871 was never false

-

872 valid_values = ("true", "false") 

-

873 elif isinstance(orig, type) and issubclass(orig, DebputyDispatchableType): 

-

874 parser = pg.dispatch_parser_table_for(orig) 

-

875 _info(f"M: {parser}") 

-

876 

-

877 if matched in valid_values: 877 ↛ 878line 877 didn't jump to line 878, because the condition on line 877 was never true

-

878 _info(f"Already filled: {matched} is one of {valid_values}") 

-

879 return None 

-

880 if valid_values: 880 ↛ 882line 880 didn't jump to line 882, because the condition on line 880 was never false

-

881 return [CompletionItem(x) for x in valid_values] 

-

882 return None 

-

883 

-

884 

-

885@lsp_hover(_LANGUAGE_IDS) 

-

886def debputy_manifest_hover( 

-

887 ls: "DebputyLanguageServer", 

-

888 params: HoverParams, 

-

889) -> Optional[Hover]: 

-

890 doc = ls.workspace.get_text_document(params.text_document.uri) 

-

891 if not is_valid_file(doc.path): 891 ↛ 892line 891 didn't jump to line 892, because the condition on line 891 was never true

-

892 return None 

-

893 lines = doc.lines 

-

894 position_codec = doc.position_codec 

-

895 attribute_root_path = AttributePath.root_path() 

-

896 server_position = position_codec.position_from_client_units(lines, params.position) 

-

897 

-

898 try: 

-

899 content = MANIFEST_YAML.load("".join(lines)) 

-

900 except YAMLError: 

-

901 return None 

-

902 m = _trace_cursor(content, attribute_root_path, server_position) 

-

903 if m is None: 903 ↛ 904line 903 didn't jump to line 904, because the condition on line 903 was never true

-

904 _info("No match") 

-

905 return None 

-

906 matched_key, attr_path, matched, _ = m 

-

907 _info(f"Matched path: {matched} (path: {attr_path.path}) [{matched_key=}]") 

-

908 

-

909 feature_set = ls.plugin_feature_set 

-

910 parser_generator = feature_set.manifest_parser_generator 

-

911 root_parser = parser_generator.dispatchable_object_parsers[OPARSER_MANIFEST_ROOT] 

-

912 segments = list(attr_path.path_segments()) 

-

913 km = resolve_keyword( 

-

914 root_parser, 

-

915 DEBPUTY_PLUGIN_METADATA, 

-

916 segments, 

-

917 0, 

-

918 parser_generator, 

-

919 ) 

-

920 if km is None: 920 ↛ 921line 920 didn't jump to line 921, because the condition on line 920 was never true

-

921 _info("No keyword match") 

-

922 return 

-

923 parser, plugin_metadata, at_depth_idx = km 

-

924 _info(f"Match leaf parser {at_depth_idx}/{len(segments)} -- {parser.__class__}") 

-

925 hover_doc_text = resolve_hover_text( 

-

926 feature_set, 

-

927 parser, 

-

928 plugin_metadata, 

-

929 segments, 

-

930 at_depth_idx, 

-

931 matched, 

-

932 matched_key, 

-

933 ) 

-

934 return _hover_doc(ls, hover_doc_text) 

-

935 

-

936 

-

937def resolve_hover_text_for_value( 

-

938 feature_set: PluginProvidedFeatureSet, 

-

939 parser: DeclarativeMappingInputParser, 

-

940 plugin_metadata: DebputyPluginMetadata, 

-

941 segment: Union[str, int], 

-

942 matched: Any, 

-

943) -> Optional[str]: 

-

944 

-

945 hover_doc_text: Optional[str] = None 

-

946 attr = parser.manifest_attributes.get(segment) 

-

947 attr_type = attr.attribute_type if attr is not None else None 

-

948 if attr_type is None: 948 ↛ 949line 948 didn't jump to line 949, because the condition on line 948 was never true

-

949 _info(f"Matched value for {segment} -- No attr or type") 

-

950 return None 

-

951 if isinstance(attr_type, type) and issubclass(attr_type, DebputyDispatchableType): 951 ↛ 969line 951 didn't jump to line 969, because the condition on line 951 was never false

-

952 parser_generator = feature_set.manifest_parser_generator 

-

953 parser = parser_generator.dispatch_parser_table_for(attr_type) 

-

954 if parser is None or not isinstance(matched, str): 954 ↛ 955line 954 didn't jump to line 955, because the condition on line 954 was never true

-

955 _info( 

-

956 f"Unknown parser for {segment} or matched is not a str -- {attr_type} {type(matched)=}" 

-

957 ) 

-

958 return None 

-

959 subparser = parser.parser_for(matched) 

-

960 if subparser is None: 960 ↛ 961line 960 didn't jump to line 961, because the condition on line 960 was never true

-

961 _info(f"Unknown parser for {matched} (subparser)") 

-

962 return None 

-

963 hover_doc_text = render_rule( 

-

964 matched, 

-

965 subparser.parser, 

-

966 plugin_metadata, 

-

967 ) 

-

968 else: 

-

969 _info(f"Unknown value: {matched} -- {segment}") 

-

970 return hover_doc_text 

-

971 

-

972 

-

973def resolve_hover_text( 

-

974 feature_set: PluginProvidedFeatureSet, 

-

975 parser: Optional[Union[DeclarativeInputParser[Any], DispatchingParserBase]], 

-

976 plugin_metadata: DebputyPluginMetadata, 

-

977 segments: List[Union[str, int]], 

-

978 at_depth_idx: int, 

-

979 matched: Any, 

-

980 matched_key: bool, 

-

981) -> Optional[str]: 

-

982 hover_doc_text: Optional[str] = None 

-

983 if at_depth_idx == len(segments): 

-

984 segment = segments[at_depth_idx - 1] 

-

985 _info(f"Matched {segment} at ==, {matched_key=} ") 

-

986 hover_doc_text = render_rule( 

-

987 segment, 

-

988 parser, 

-

989 plugin_metadata, 

-

990 is_root_rule=False, 

-

991 ) 

-

992 elif at_depth_idx + 1 == len(segments) and isinstance( 992 ↛ 1015line 992 didn't jump to line 1015, because the condition on line 992 was never false

-

993 parser, DeclarativeMappingInputParser 

-

994 ): 

-

995 segment = segments[at_depth_idx] 

-

996 _info(f"Matched {segment} at -1, {matched_key=} ") 

-

997 if isinstance(segment, str): 997 ↛ 1017line 997 didn't jump to line 1017, because the condition on line 997 was never false

-

998 if not matched_key: 

-

999 hover_doc_text = resolve_hover_text_for_value( 

-

1000 feature_set, 

-

1001 parser, 

-

1002 plugin_metadata, 

-

1003 segment, 

-

1004 matched, 

-

1005 ) 

-

1006 if matched_key or hover_doc_text is None: 

-

1007 rule_name = _guess_rule_name(segments, at_depth_idx) 

-

1008 hover_doc_text = _render_param_doc( 

-

1009 rule_name, 

-

1010 parser, 

-

1011 plugin_metadata, 

-

1012 segment, 

-

1013 ) 

-

1014 else: 

-

1015 _info(f"No doc: {at_depth_idx=} {len(segments)=}") 

-

1016 

-

1017 return hover_doc_text 

-

1018 

-

1019 

-

1020def _hover_doc(ls: "LanguageServer", hover_doc_text: Optional[str]) -> Optional[Hover]: 

-

1021 if hover_doc_text is None: 1021 ↛ 1022line 1021 didn't jump to line 1022, because the condition on line 1021 was never true

-

1022 return None 

-

1023 try: 

-

1024 supported_formats = ls.client_capabilities.text_document.hover.content_format 

-

1025 except AttributeError: 

-

1026 supported_formats = [] 

-

1027 markup_kind = MarkupKind.Markdown 

-

1028 if markup_kind not in supported_formats: 1028 ↛ 1030line 1028 didn't jump to line 1030, because the condition on line 1028 was never false

-

1029 markup_kind = MarkupKind.PlainText 

-

1030 return Hover( 

-

1031 contents=MarkupContent( 

-

1032 kind=markup_kind, 

-

1033 value=hover_doc_text, 

-

1034 ), 

-

1035 ) 

-
- - - diff --git a/coverage-report/d_5d0ec0d5422112df_lsp_debian_rules_py.html b/coverage-report/d_5d0ec0d5422112df_lsp_debian_rules_py.html deleted file mode 100644 index 46ad221..0000000 --- a/coverage-report/d_5d0ec0d5422112df_lsp_debian_rules_py.html +++ /dev/null @@ -1,483 +0,0 @@ - - - - - Coverage for src/debputy/lsp/lsp_debian_rules.py: 18% - - - - - -
-
-

- Coverage for src/debputy/lsp/lsp_debian_rules.py: - 18% -

- -

- 188 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import functools 

-

2import itertools 

-

3import json 

-

4import os 

-

5import re 

-

6import subprocess 

-

7from typing import ( 

-

8 Union, 

-

9 Sequence, 

-

10 Optional, 

-

11 Iterable, 

-

12 List, 

-

13 Iterator, 

-

14 Tuple, 

-

15) 

-

16 

-

17from lsprotocol.types import ( 

-

18 CompletionItem, 

-

19 Diagnostic, 

-

20 Range, 

-

21 Position, 

-

22 DiagnosticSeverity, 

-

23 CompletionList, 

-

24 CompletionParams, 

-

25 TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL, 

-

26 TEXT_DOCUMENT_CODE_ACTION, 

-

27) 

-

28 

-

29from debputy.debhelper_emulation import parse_drules_for_addons 

-

30from debputy.linting.lint_util import LintState 

-

31from debputy.lsp.lsp_features import ( 

-

32 lint_diagnostics, 

-

33 lsp_standard_handler, 

-

34 lsp_completer, 

-

35) 

-

36from debputy.lsp.quickfixes import propose_correct_text_quick_fix 

-

37from debputy.lsp.spellchecking import spellcheck_line 

-

38from debputy.lsp.text_util import ( 

-

39 LintCapablePositionCodec, 

-

40) 

-

41from debputy.util import _warn 

-

42 

-

43try: 

-

44 from debian._deb822_repro.locatable import ( 

-

45 Position as TEPosition, 

-

46 Range as TERange, 

-

47 START_POSITION, 

-

48 ) 

-

49 

-

50 from pygls.server import LanguageServer 

-

51 from pygls.workspace import TextDocument 

-

52except ImportError: 

-

53 pass 

-

54 

-

55 

-

56try: 

-

57 from Levenshtein import distance 

-

58except ImportError: 

-

59 

-

60 def _detect_possible_typo( 

-

61 provided_value: str, 

-

62 known_values: Iterable[str], 

-

63 ) -> Sequence[str]: 

-

64 return tuple() 

-

65 

-

66else: 

-

67 

-

68 def _detect_possible_typo( 

-

69 provided_value: str, 

-

70 known_values: Iterable[str], 

-

71 ) -> Sequence[str]: 

-

72 k_len = len(provided_value) 

-

73 candidates = [] 

-

74 for known_value in known_values: 

-

75 if abs(k_len - len(known_value)) > 2: 

-

76 continue 

-

77 d = distance(provided_value, known_value) 

-

78 if d > 2: 

-

79 continue 

-

80 candidates.append(known_value) 

-

81 return candidates 

-

82 

-

83 

-

84_CONTAINS_TAB_OR_COLON = re.compile(r"[\t:]") 

-

85_WORDS_RE = re.compile("([a-zA-Z0-9_-]+)") 

-

86_MAKE_ERROR_RE = re.compile(r"^[^:]+:(\d+):\s*(\S.+)") 

-

87 

-

88 

-

89_KNOWN_TARGETS = { 

-

90 "binary", 

-

91 "binary-arch", 

-

92 "binary-indep", 

-

93 "build", 

-

94 "build-arch", 

-

95 "build-indep", 

-

96 "clean", 

-

97} 

-

98 

-

99_COMMAND_WORDS = frozenset( 

-

100 { 

-

101 "export", 

-

102 "ifeq", 

-

103 "ifneq", 

-

104 "ifdef", 

-

105 "ifndef", 

-

106 "endif", 

-

107 "else", 

-

108 } 

-

109) 

-

110 

-

111_LANGUAGE_IDS = [ 

-

112 "debian/rules", 

-

113 # LSP's official language ID for Makefile 

-

114 "makefile", 

-

115 # emacs's name (there is no debian-rules mode) 

-

116 "makefile-gmake", 

-

117 # vim's name (there is no debrules) 

-

118 "make", 

-

119] 

-

120 

-

121 

-

122def _as_hook_targets(command_name: str) -> Iterable[str]: 

-

123 for prefix, suffix in itertools.product( 

-

124 ["override_", "execute_before_", "execute_after_"], 

-

125 ["", "-arch", "-indep"], 

-

126 ): 

-

127 yield f"{prefix}{command_name}{suffix}" 

-

128 

-

129 

-

130lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_CODE_ACTION) 

-

131lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL) 

-

132 

-

133 

-

134def is_valid_file(path: str) -> bool: 

-

135 # For debian/rules, the language ID is often set to makefile meaning we get random "non-debian/rules" 

-

136 # makefiles here. Skip those. 

-

137 return path.endswith("debian/rules") 

-

138 

-

139 

-

140@lint_diagnostics(_LANGUAGE_IDS) 

-

141def _lint_debian_rules( 

-

142 doc_reference: str, 

-

143 path: str, 

-

144 lines: List[str], 

-

145 position_codec: LintCapablePositionCodec, 

-

146) -> Optional[List[Diagnostic]]: 

-

147 if not is_valid_file(path): 

-

148 return None 

-

149 return _lint_debian_rules_impl( 

-

150 doc_reference, 

-

151 path, 

-

152 lines, 

-

153 position_codec, 

-

154 ) 

-

155 

-

156 

-

157@functools.lru_cache 

-

158def _is_project_trusted(source_root: str) -> bool: 

-

159 return os.environ.get("DEBPUTY_TRUST_PROJECT", "0") == "1" 

-

160 

-

161 

-

162def _run_make_dryrun( 

-

163 source_root: str, 

-

164 lines: List[str], 

-

165) -> Optional[Diagnostic]: 

-

166 if not _is_project_trusted(source_root): 

-

167 return None 

-

168 try: 

-

169 make_res = subprocess.run( 

-

170 ["make", "--dry-run", "-f", "-", "debhelper-fail-me"], 

-

171 input="".join(lines).encode("utf-8"), 

-

172 stdout=subprocess.DEVNULL, 

-

173 stderr=subprocess.PIPE, 

-

174 cwd=source_root, 

-

175 timeout=1, 

-

176 ) 

-

177 except (FileNotFoundError, subprocess.TimeoutExpired): 

-

178 pass 

-

179 else: 

-

180 if make_res.returncode != 0: 

-

181 make_output = make_res.stderr.decode("utf-8") 

-

182 m = _MAKE_ERROR_RE.match(make_output) 

-

183 if m: 

-

184 # We want it zero-based and make reports it one-based 

-

185 line_of_error = int(m.group(1)) - 1 

-

186 msg = m.group(2).strip() 

-

187 error_range = Range( 

-

188 Position( 

-

189 line_of_error, 

-

190 0, 

-

191 ), 

-

192 Position( 

-

193 line_of_error + 1, 

-

194 0, 

-

195 ), 

-

196 ) 

-

197 # No conversion needed; it is pure line numbers 

-

198 return Diagnostic( 

-

199 error_range, 

-

200 f"make error: {msg}", 

-

201 severity=DiagnosticSeverity.Error, 

-

202 source="debputy (make)", 

-

203 ) 

-

204 return None 

-

205 

-

206 

-

207def iter_make_lines( 

-

208 lines: List[str], 

-

209 position_codec: LintCapablePositionCodec, 

-

210 diagnostics: List[Diagnostic], 

-

211) -> Iterator[Tuple[int, str]]: 

-

212 skip_next_line = False 

-

213 is_extended_comment = False 

-

214 for line_no, line in enumerate(lines): 

-

215 skip_this = skip_next_line 

-

216 skip_next_line = False 

-

217 if line.rstrip().endswith("\\"): 

-

218 skip_next_line = True 

-

219 

-

220 if skip_this: 

-

221 if is_extended_comment: 

-

222 diagnostics.extend( 

-

223 spellcheck_line(lines, position_codec, line_no, line) 

-

224 ) 

-

225 continue 

-

226 

-

227 if line.startswith("#"): 

-

228 diagnostics.extend(spellcheck_line(lines, position_codec, line_no, line)) 

-

229 is_extended_comment = skip_next_line 

-

230 continue 

-

231 is_extended_comment = False 

-

232 

-

233 if line.startswith("\t") or line.isspace(): 

-

234 continue 

-

235 

-

236 is_extended_comment = False 

-

237 # We are not really dealing with extension lines at the moment (other than for spellchecking), 

-

238 # since nothing needs it 

-

239 yield line_no, line 

-

240 

-

241 

-

242def _lint_debian_rules_impl( 

-

243 lint_state: LintState, 

-

244) -> Optional[List[Diagnostic]]: 

-

245 lines = lint_state.lines 

-

246 position_codec = lint_state.position_codec 

-

247 path = lint_state.path 

-

248 source_root = os.path.dirname(os.path.dirname(path)) 

-

249 if source_root == "": 

-

250 source_root = "." 

-

251 diagnostics = [] 

-

252 

-

253 make_error = _run_make_dryrun(source_root, lines) 

-

254 if make_error is not None: 

-

255 diagnostics.append(make_error) 

-

256 

-

257 all_dh_commands = _all_dh_commands(source_root, lines) 

-

258 if all_dh_commands: 

-

259 all_hook_targets = {ht for c in all_dh_commands for ht in _as_hook_targets(c)} 

-

260 all_hook_targets.update(_KNOWN_TARGETS) 

-

261 source = "debputy (dh_assistant)" 

-

262 else: 

-

263 all_hook_targets = _KNOWN_TARGETS 

-

264 source = "debputy" 

-

265 

-

266 missing_targets = {} 

-

267 

-

268 for line_no, line in iter_make_lines(lines, position_codec, diagnostics): 

-

269 try: 

-

270 colon_idx = line.index(":") 

-

271 if len(line) > colon_idx + 1 and line[colon_idx + 1] == "=": 

-

272 continue 

-

273 except ValueError: 

-

274 continue 

-

275 target_substring = line[0:colon_idx] 

-

276 if "=" in target_substring or "$(for" in target_substring: 

-

277 continue 

-

278 for i, m in enumerate(_WORDS_RE.finditer(target_substring)): 

-

279 target = m.group(1) 

-

280 if i == 0 and (target in _COMMAND_WORDS or target.startswith("(")): 

-

281 break 

-

282 if "%" in target or "$" in target: 

-

283 continue 

-

284 if target in all_hook_targets or target in missing_targets: 

-

285 continue 

-

286 pos, endpos = m.span(1) 

-

287 hook_location = line_no, pos, endpos 

-

288 missing_targets[target] = hook_location 

-

289 

-

290 for target, (line_no, pos, endpos) in missing_targets.items(): 

-

291 candidates = _detect_possible_typo(target, all_hook_targets) 

-

292 if not candidates and not target.startswith( 

-

293 ("override_", "execute_before_", "execute_after_") 

-

294 ): 

-

295 continue 

-

296 

-

297 r_server_units = Range( 

-

298 Position( 

-

299 line_no, 

-

300 pos, 

-

301 ), 

-

302 Position( 

-

303 line_no, 

-

304 endpos, 

-

305 ), 

-

306 ) 

-

307 r = position_codec.range_to_client_units(lines, r_server_units) 

-

308 if candidates: 

-

309 msg = f"Target {target} looks like a typo of a known target" 

-

310 else: 

-

311 msg = f"Unknown rules dh hook target {target}" 

-

312 if candidates: 

-

313 fixes = [propose_correct_text_quick_fix(c) for c in candidates] 

-

314 else: 

-

315 fixes = [] 

-

316 diagnostics.append( 

-

317 Diagnostic( 

-

318 r, 

-

319 msg, 

-

320 severity=DiagnosticSeverity.Warning, 

-

321 data=fixes, 

-

322 source=source, 

-

323 ) 

-

324 ) 

-

325 return diagnostics 

-

326 

-

327 

-

328def _all_dh_commands(source_root: str, lines: List[str]) -> Optional[Sequence[str]]: 

-

329 drules_sequences = set() 

-

330 parse_drules_for_addons(lines, drules_sequences) 

-

331 cmd = ["dh_assistant", "list-commands", "--output-format=json"] 

-

332 if drules_sequences: 

-

333 cmd.append(f"--with={','.join(drules_sequences)}") 

-

334 try: 

-

335 output = subprocess.check_output( 

-

336 cmd, 

-

337 stderr=subprocess.DEVNULL, 

-

338 cwd=source_root, 

-

339 ) 

-

340 except (FileNotFoundError, subprocess.CalledProcessError) as e: 

-

341 _warn(f"dh_assistant failed (dir: {source_root}): {str(e)}") 

-

342 return None 

-

343 data = json.loads(output) 

-

344 commands_raw = data.get("commands") if isinstance(data, dict) else None 

-

345 if not isinstance(commands_raw, list): 

-

346 return None 

-

347 

-

348 commands = [] 

-

349 

-

350 for command in commands_raw: 

-

351 if not isinstance(command, dict): 

-

352 return None 

-

353 command_name = command.get("command") 

-

354 if not command_name: 

-

355 return None 

-

356 commands.append(command_name) 

-

357 

-

358 return commands 

-

359 

-

360 

-

361@lsp_completer(_LANGUAGE_IDS) 

-

362def _debian_rules_completions( 

-

363 ls: "LanguageServer", 

-

364 params: CompletionParams, 

-

365) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]: 

-

366 doc = ls.workspace.get_text_document(params.text_document.uri) 

-

367 if not is_valid_file(doc.path): 

-

368 return None 

-

369 lines = doc.lines 

-

370 server_position = doc.position_codec.position_from_client_units( 

-

371 lines, params.position 

-

372 ) 

-

373 

-

374 line = lines[server_position.line] 

-

375 line_start = line[0 : server_position.character] 

-

376 

-

377 if _CONTAINS_TAB_OR_COLON.search(line_start): 

-

378 return None 

-

379 

-

380 source_root = os.path.dirname(os.path.dirname(doc.path)) 

-

381 all_commands = _all_dh_commands(source_root, lines) 

-

382 items = [CompletionItem(ht) for c in all_commands for ht in _as_hook_targets(c)] 

-

383 

-

384 return items 

-
- - - diff --git a/coverage-report/d_5d0ec0d5422112df_lsp_debian_tests_control_py.html b/coverage-report/d_5d0ec0d5422112df_lsp_debian_tests_control_py.html deleted file mode 100644 index 14e3902..0000000 --- a/coverage-report/d_5d0ec0d5422112df_lsp_debian_tests_control_py.html +++ /dev/null @@ -1,585 +0,0 @@ - - - - - Coverage for src/debputy/lsp/lsp_debian_tests_control.py: 20% - - - - - -
-
-

- Coverage for src/debputy/lsp/lsp_debian_tests_control.py: - 20% -

- -

- 160 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import re 

-

2from typing import ( 

-

3 Union, 

-

4 Sequence, 

-

5 Tuple, 

-

6 Iterator, 

-

7 Optional, 

-

8 Iterable, 

-

9 Mapping, 

-

10 List, 

-

11) 

-

12 

-

13from lsprotocol.types import ( 

-

14 DiagnosticSeverity, 

-

15 Range, 

-

16 Diagnostic, 

-

17 Position, 

-

18 CompletionItem, 

-

19 CompletionList, 

-

20 CompletionParams, 

-

21 TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL, 

-

22 DiagnosticRelatedInformation, 

-

23 Location, 

-

24 HoverParams, 

-

25 Hover, 

-

26 TEXT_DOCUMENT_CODE_ACTION, 

-

27 SemanticTokens, 

-

28 SemanticTokensParams, 

-

29 FoldingRangeParams, 

-

30 FoldingRange, 

-

31) 

-

32 

-

33from debputy.linting.lint_util import LintState 

-

34from debputy.lsp.lsp_debian_control_reference_data import ( 

-

35 Deb822KnownField, 

-

36 DTestsCtrlFileMetadata, 

-

37 _DTESTSCTRL_FIELDS, 

-

38) 

-

39from debputy.lsp.lsp_features import ( 

-

40 lint_diagnostics, 

-

41 lsp_completer, 

-

42 lsp_hover, 

-

43 lsp_standard_handler, 

-

44 lsp_folding_ranges, 

-

45 lsp_semantic_tokens_full, 

-

46) 

-

47from debputy.lsp.lsp_generic_deb822 import ( 

-

48 deb822_completer, 

-

49 deb822_hover, 

-

50 deb822_folding_ranges, 

-

51 deb822_semantic_tokens_full, 

-

52) 

-

53from debputy.lsp.quickfixes import ( 

-

54 propose_correct_text_quick_fix, 

-

55) 

-

56from debputy.lsp.spellchecking import default_spellchecker 

-

57from debputy.lsp.text_util import ( 

-

58 normalize_dctrl_field_name, 

-

59 LintCapablePositionCodec, 

-

60 detect_possible_typo, 

-

61 te_range_to_lsp, 

-

62) 

-

63from debputy.lsp.vendoring._deb822_repro import ( 

-

64 parse_deb822_file, 

-

65 Deb822FileElement, 

-

66 Deb822ParagraphElement, 

-

67) 

-

68from debputy.lsp.vendoring._deb822_repro.parsing import ( 

-

69 Deb822KeyValuePairElement, 

-

70 LIST_SPACE_SEPARATED_INTERPRETATION, 

-

71) 

-

72from debputy.lsp.vendoring._deb822_repro.tokens import ( 

-

73 Deb822Token, 

-

74) 

-

75 

-

76try: 

-

77 from debputy.lsp.vendoring._deb822_repro.locatable import ( 

-

78 Position as TEPosition, 

-

79 Range as TERange, 

-

80 START_POSITION, 

-

81 ) 

-

82 

-

83 from pygls.server import LanguageServer 

-

84 from pygls.workspace import TextDocument 

-

85except ImportError: 

-

86 pass 

-

87 

-

88 

-

89_CONTAINS_SPACE_OR_COLON = re.compile(r"[\s:]") 

-

90_LANGUAGE_IDS = [ 

-

91 "debian/tests/control", 

-

92 # emacs's name - expected in elpa-dpkg-dev-el (>> 37.11) 

-

93 "debian-autopkgtest-control-mode", 

-

94 # Likely to be vim's name if it had support 

-

95 "debtestscontrol", 

-

96] 

-

97 

-

98_DEP5_FILE_METADATA = DTestsCtrlFileMetadata() 

-

99 

-

100lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_CODE_ACTION) 

-

101lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL) 

-

102 

-

103 

-

104@lsp_hover(_LANGUAGE_IDS) 

-

105def debian_tests_control_hover( 

-

106 ls: "LanguageServer", 

-

107 params: HoverParams, 

-

108) -> Optional[Hover]: 

-

109 return deb822_hover(ls, params, _DEP5_FILE_METADATA) 

-

110 

-

111 

-

112@lsp_completer(_LANGUAGE_IDS) 

-

113def debian_tests_control_completions( 

-

114 ls: "LanguageServer", 

-

115 params: CompletionParams, 

-

116) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]: 

-

117 return deb822_completer(ls, params, _DEP5_FILE_METADATA) 

-

118 

-

119 

-

120@lsp_folding_ranges(_LANGUAGE_IDS) 

-

121def debian_tests_control_folding_ranges( 

-

122 ls: "LanguageServer", 

-

123 params: FoldingRangeParams, 

-

124) -> Optional[Sequence[FoldingRange]]: 

-

125 return deb822_folding_ranges(ls, params, _DEP5_FILE_METADATA) 

-

126 

-

127 

-

128def _deb822_token_iter( 

-

129 tokens: Iterable[Deb822Token], 

-

130) -> Iterator[Tuple[Deb822Token, int, int, int, int, int]]: 

-

131 line_no = 0 

-

132 line_offset = 0 

-

133 

-

134 for token in tokens: 

-

135 start_line = line_no 

-

136 start_line_offset = line_offset 

-

137 

-

138 newlines = token.text.count("\n") 

-

139 line_no += newlines 

-

140 text_len = len(token.text) 

-

141 if newlines: 

-

142 if token.text.endswith("\n"): 

-

143 line_offset = 0 

-

144 else: 

-

145 # -2, one to remove the "\n" and one to get 0-offset 

-

146 line_offset = text_len - token.text.rindex("\n") - 2 

-

147 else: 

-

148 line_offset += text_len 

-

149 

-

150 yield token, start_line, start_line_offset, line_no, line_offset 

-

151 

-

152 

-

153def _paragraph_representation_field( 

-

154 paragraph: Deb822ParagraphElement, 

-

155) -> Deb822KeyValuePairElement: 

-

156 return next(iter(paragraph.iter_parts_of_type(Deb822KeyValuePairElement))) 

-

157 

-

158 

-

159def _diagnostics_for_paragraph( 

-

160 stanza: Deb822ParagraphElement, 

-

161 stanza_position: "TEPosition", 

-

162 known_fields: Mapping[str, Deb822KnownField], 

-

163 doc_reference: str, 

-

164 position_codec: "LintCapablePositionCodec", 

-

165 lines: List[str], 

-

166 diagnostics: List[Diagnostic], 

-

167) -> None: 

-

168 representation_field = _paragraph_representation_field(stanza) 

-

169 representation_field_pos = representation_field.position_in_parent().relative_to( 

-

170 stanza_position 

-

171 ) 

-

172 representation_field_range_server_units = te_range_to_lsp( 

-

173 TERange.from_position_and_size( 

-

174 representation_field_pos, representation_field.size() 

-

175 ) 

-

176 ) 

-

177 representation_field_range = position_codec.range_to_client_units( 

-

178 lines, 

-

179 representation_field_range_server_units, 

-

180 ) 

-

181 for known_field in known_fields.values(): 

-

182 missing_field_severity = known_field.missing_field_severity 

-

183 if missing_field_severity is None or known_field.name in stanza: 

-

184 continue 

-

185 

-

186 diagnostics.append( 

-

187 Diagnostic( 

-

188 representation_field_range, 

-

189 f"Stanza is missing field {known_field.name}", 

-

190 severity=missing_field_severity, 

-

191 source="debputy", 

-

192 ) 

-

193 ) 

-

194 

-

195 if "Tests" not in stanza and "Test-Command" not in stanza: 

-

196 diagnostics.append( 

-

197 Diagnostic( 

-

198 representation_field_range, 

-

199 f'Stanza must have either a "Tests" or a "Test-Command" field', 

-

200 severity=DiagnosticSeverity.Error, 

-

201 source="debputy", 

-

202 ) 

-

203 ) 

-

204 if "Tests" in stanza and "Test-Command" in stanza: 

-

205 diagnostics.append( 

-

206 Diagnostic( 

-

207 representation_field_range, 

-

208 'Stanza cannot have both a "Tests" and a "Test-Command" field', 

-

209 severity=DiagnosticSeverity.Error, 

-

210 source="debputy", 

-

211 ) 

-

212 ) 

-

213 

-

214 seen_fields = {} 

-

215 

-

216 for kvpair in stanza.iter_parts_of_type(Deb822KeyValuePairElement): 

-

217 field_name_token = kvpair.field_token 

-

218 field_name = field_name_token.text 

-

219 field_name_lc = field_name.lower() 

-

220 normalized_field_name_lc = normalize_dctrl_field_name(field_name_lc) 

-

221 known_field = known_fields.get(normalized_field_name_lc) 

-

222 field_value = stanza[field_name] 

-

223 field_range_te = kvpair.range_in_parent().relative_to(stanza_position) 

-

224 field_position_te = field_range_te.start_pos 

-

225 field_range_server_units = te_range_to_lsp(field_range_te) 

-

226 field_range = position_codec.range_to_client_units( 

-

227 lines, 

-

228 field_range_server_units, 

-

229 ) 

-

230 field_name_typo_detected = False 

-

231 existing_field_range = seen_fields.get(normalized_field_name_lc) 

-

232 if existing_field_range is not None: 

-

233 existing_field_range[3].append(field_range) 

-

234 else: 

-

235 normalized_field_name = normalize_dctrl_field_name(field_name) 

-

236 seen_fields[field_name_lc] = ( 

-

237 field_name, 

-

238 normalized_field_name, 

-

239 field_range, 

-

240 [], 

-

241 ) 

-

242 

-

243 if known_field is None: 

-

244 candidates = detect_possible_typo(normalized_field_name_lc, known_fields) 

-

245 if candidates: 

-

246 known_field = known_fields[candidates[0]] 

-

247 token_range_server_units = te_range_to_lsp( 

-

248 TERange.from_position_and_size( 

-

249 field_position_te, kvpair.field_token.size() 

-

250 ) 

-

251 ) 

-

252 field_range = position_codec.range_to_client_units( 

-

253 lines, 

-

254 token_range_server_units, 

-

255 ) 

-

256 field_name_typo_detected = True 

-

257 diagnostics.append( 

-

258 Diagnostic( 

-

259 field_range, 

-

260 f'The "{field_name}" looks like a typo of "{known_field.name}".', 

-

261 severity=DiagnosticSeverity.Warning, 

-

262 source="debputy", 

-

263 data=[ 

-

264 propose_correct_text_quick_fix(known_fields[m].name) 

-

265 for m in candidates 

-

266 ], 

-

267 ) 

-

268 ) 

-

269 if field_value.strip() == "": 

-

270 diagnostics.append( 

-

271 Diagnostic( 

-

272 field_range, 

-

273 f"The {field_name} has no value. Either provide a value or remove it.", 

-

274 severity=DiagnosticSeverity.Error, 

-

275 source="debputy", 

-

276 ) 

-

277 ) 

-

278 continue 

-

279 diagnostics.extend( 

-

280 known_field.field_diagnostics( 

-

281 kvpair, 

-

282 stanza, 

-

283 stanza_position, 

-

284 position_codec, 

-

285 lines, 

-

286 field_name_typo_reported=field_name_typo_detected, 

-

287 ) 

-

288 ) 

-

289 if known_field.spellcheck_value: 

-

290 words = kvpair.interpret_as(LIST_SPACE_SEPARATED_INTERPRETATION) 

-

291 spell_checker = default_spellchecker() 

-

292 value_position = kvpair.value_element.position_in_parent().relative_to( 

-

293 field_position_te 

-

294 ) 

-

295 for word_ref in words.iter_value_references(): 

-

296 token = word_ref.value 

-

297 for word, pos, endpos in spell_checker.iter_words(token): 

-

298 corrections = spell_checker.provide_corrections_for(word) 

-

299 if not corrections: 

-

300 continue 

-

301 word_loc = word_ref.locatable 

-

302 word_pos_te = word_loc.position_in_parent().relative_to( 

-

303 value_position 

-

304 ) 

-

305 if pos: 

-

306 word_pos_te = TEPosition(0, pos).relative_to(word_pos_te) 

-

307 word_range = TERange( 

-

308 START_POSITION, 

-

309 TEPosition(0, endpos - pos), 

-

310 ) 

-

311 word_range_server_units = te_range_to_lsp( 

-

312 TERange.from_position_and_size(word_pos_te, word_range) 

-

313 ) 

-

314 word_range = position_codec.range_to_client_units( 

-

315 lines, 

-

316 word_range_server_units, 

-

317 ) 

-

318 diagnostics.append( 

-

319 Diagnostic( 

-

320 word_range, 

-

321 f'Spelling "{word}"', 

-

322 severity=DiagnosticSeverity.Hint, 

-

323 source="debputy", 

-

324 data=[ 

-

325 propose_correct_text_quick_fix(c) for c in corrections 

-

326 ], 

-

327 ) 

-

328 ) 

-

329 if known_field.warn_if_default and field_value == known_field.default_value: 

-

330 diagnostics.append( 

-

331 Diagnostic( 

-

332 field_range, 

-

333 f"The {field_name} is redundant as it is set to the default value and the field should only be" 

-

334 " used in exceptional cases.", 

-

335 severity=DiagnosticSeverity.Warning, 

-

336 source="debputy", 

-

337 ) 

-

338 ) 

-

339 for ( 

-

340 field_name, 

-

341 normalized_field_name, 

-

342 field_range, 

-

343 duplicates, 

-

344 ) in seen_fields.values(): 

-

345 if not duplicates: 

-

346 continue 

-

347 related_information = [ 

-

348 DiagnosticRelatedInformation( 

-

349 location=Location(doc_reference, field_range), 

-

350 message=f"First definition of {field_name}", 

-

351 ) 

-

352 ] 

-

353 related_information.extend( 

-

354 DiagnosticRelatedInformation( 

-

355 location=Location(doc_reference, r), 

-

356 message=f"Duplicate of {field_name}", 

-

357 ) 

-

358 for r in duplicates 

-

359 ) 

-

360 for dup_range in duplicates: 

-

361 diagnostics.append( 

-

362 Diagnostic( 

-

363 dup_range, 

-

364 f"The {normalized_field_name} field name was used multiple times in this stanza." 

-

365 f" Please ensure the field is only used once per stanza.", 

-

366 severity=DiagnosticSeverity.Error, 

-

367 source="debputy", 

-

368 related_information=related_information, 

-

369 ) 

-

370 ) 

-

371 

-

372 

-

373def _scan_for_syntax_errors_and_token_level_diagnostics( 

-

374 deb822_file: Deb822FileElement, 

-

375 position_codec: LintCapablePositionCodec, 

-

376 lines: List[str], 

-

377 diagnostics: List[Diagnostic], 

-

378) -> int: 

-

379 first_error = len(lines) + 1 

-

380 spell_checker = default_spellchecker() 

-

381 for ( 

-

382 token, 

-

383 start_line, 

-

384 start_offset, 

-

385 end_line, 

-

386 end_offset, 

-

387 ) in _deb822_token_iter(deb822_file.iter_tokens()): 

-

388 if token.is_error: 

-

389 first_error = min(first_error, start_line) 

-

390 start_pos = Position( 

-

391 start_line, 

-

392 start_offset, 

-

393 ) 

-

394 end_pos = Position( 

-

395 end_line, 

-

396 end_offset, 

-

397 ) 

-

398 token_range = position_codec.range_to_client_units( 

-

399 lines, Range(start_pos, end_pos) 

-

400 ) 

-

401 diagnostics.append( 

-

402 Diagnostic( 

-

403 token_range, 

-

404 "Syntax error", 

-

405 severity=DiagnosticSeverity.Error, 

-

406 source="debputy (python-debian parser)", 

-

407 ) 

-

408 ) 

-

409 elif token.is_comment: 

-

410 for word, pos, end_pos in spell_checker.iter_words(token.text): 

-

411 corrections = spell_checker.provide_corrections_for(word) 

-

412 if not corrections: 

-

413 continue 

-

414 start_pos = Position( 

-

415 start_line, 

-

416 pos, 

-

417 ) 

-

418 end_pos = Position( 

-

419 start_line, 

-

420 end_pos, 

-

421 ) 

-

422 word_range = position_codec.range_to_client_units( 

-

423 lines, Range(start_pos, end_pos) 

-

424 ) 

-

425 diagnostics.append( 

-

426 Diagnostic( 

-

427 word_range, 

-

428 f'Spelling "{word}"', 

-

429 severity=DiagnosticSeverity.Hint, 

-

430 source="debputy", 

-

431 data=[propose_correct_text_quick_fix(c) for c in corrections], 

-

432 ) 

-

433 ) 

-

434 return first_error 

-

435 

-

436 

-

437@lint_diagnostics(_LANGUAGE_IDS) 

-

438def _lint_debian_tests_control( 

-

439 lint_state: LintState, 

-

440) -> Optional[List[Diagnostic]]: 

-

441 lines = lint_state.lines 

-

442 position_codec = lint_state.position_codec 

-

443 doc_reference = lint_state.doc_uri 

-

444 diagnostics = [] 

-

445 deb822_file = parse_deb822_file( 

-

446 lines, 

-

447 accept_files_with_duplicated_fields=True, 

-

448 accept_files_with_error_tokens=True, 

-

449 ) 

-

450 

-

451 first_error = _scan_for_syntax_errors_and_token_level_diagnostics( 

-

452 deb822_file, 

-

453 position_codec, 

-

454 lines, 

-

455 diagnostics, 

-

456 ) 

-

457 

-

458 paragraphs = list(deb822_file) 

-

459 

-

460 for paragraph_no, paragraph in enumerate(paragraphs, start=1): 

-

461 paragraph_pos = paragraph.position_in_file() 

-

462 if paragraph_pos.line_position >= first_error: 

-

463 break 

-

464 known_fields = _DTESTSCTRL_FIELDS 

-

465 _diagnostics_for_paragraph( 

-

466 paragraph, 

-

467 paragraph_pos, 

-

468 known_fields, 

-

469 doc_reference, 

-

470 position_codec, 

-

471 lines, 

-

472 diagnostics, 

-

473 ) 

-

474 return diagnostics 

-

475 

-

476 

-

477@lsp_semantic_tokens_full(_LANGUAGE_IDS) 

-

478def _semantic_tokens_full( 

-

479 ls: "LanguageServer", 

-

480 request: SemanticTokensParams, 

-

481) -> Optional[SemanticTokens]: 

-

482 return deb822_semantic_tokens_full( 

-

483 ls, 

-

484 request, 

-

485 _DEP5_FILE_METADATA, 

-

486 ) 

-
- - - diff --git a/coverage-report/d_5d0ec0d5422112df_lsp_dispatch_py.html b/coverage-report/d_5d0ec0d5422112df_lsp_dispatch_py.html deleted file mode 100644 index 75cd711..0000000 --- a/coverage-report/d_5d0ec0d5422112df_lsp_dispatch_py.html +++ /dev/null @@ -1,330 +0,0 @@ - - - - - Coverage for src/debputy/lsp/lsp_dispatch.py: 43% - - - - - -
-
-

- Coverage for src/debputy/lsp/lsp_dispatch.py: - 43% -

- -

- 82 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import asyncio 

-

2import os.path 

-

3from typing import ( 

-

4 Dict, 

-

5 Sequence, 

-

6 Union, 

-

7 Optional, 

-

8 TypeVar, 

-

9 Callable, 

-

10 Mapping, 

-

11 List, 

-

12 Tuple, 

-

13) 

-

14 

-

15from lsprotocol.types import ( 

-

16 DidOpenTextDocumentParams, 

-

17 DidChangeTextDocumentParams, 

-

18 TEXT_DOCUMENT_DID_CHANGE, 

-

19 TEXT_DOCUMENT_DID_OPEN, 

-

20 TEXT_DOCUMENT_COMPLETION, 

-

21 CompletionList, 

-

22 CompletionItem, 

-

23 CompletionParams, 

-

24 TEXT_DOCUMENT_HOVER, 

-

25 TEXT_DOCUMENT_FOLDING_RANGE, 

-

26 FoldingRange, 

-

27 FoldingRangeParams, 

-

28 TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL, 

-

29 SemanticTokensParams, 

-

30 SemanticTokens, 

-

31 Hover, 

-

32 TEXT_DOCUMENT_CODE_ACTION, 

-

33 Command, 

-

34 CodeAction, 

-

35 CodeActionParams, 

-

36 SemanticTokensRegistrationOptions, 

-

37) 

-

38 

-

39from debputy import __version__ 

-

40from debputy.lsp.lsp_features import ( 

-

41 DIAGNOSTIC_HANDLERS, 

-

42 COMPLETER_HANDLERS, 

-

43 HOVER_HANDLERS, 

-

44 SEMANTIC_TOKENS_FULL_HANDLERS, 

-

45 CODE_ACTION_HANDLERS, 

-

46 SEMANTIC_TOKENS_LEGEND, 

-

47) 

-

48from debputy.util import _info 

-

49 

-

50_DOCUMENT_VERSION_TABLE: Dict[str, int] = {} 

-

51 

-

52try: 

-

53 from pygls.server import LanguageServer 

-

54 from pygls.workspace import TextDocument 

-

55 from debputy.lsp.debputy_ls import DebputyLanguageServer 

-

56 

-

57 DEBPUTY_LANGUAGE_SERVER = DebputyLanguageServer("debputy", f"v{__version__}") 

-

58except ImportError as e: 

-

59 

-

60 class Mock: 

-

61 

-

62 def feature(self, *args, **kwargs): 

-

63 return lambda x: x 

-

64 

-

65 DEBPUTY_LANGUAGE_SERVER = Mock() 

-

66 

-

67 

-

68P = TypeVar("P") 

-

69R = TypeVar("R") 

-

70L = TypeVar("L", "LanguageServer", "DebputyLanguageServer") 

-

71 

-

72 

-

73def is_doc_at_version(uri: str, version: int) -> bool: 

-

74 dv = _DOCUMENT_VERSION_TABLE.get(uri) 

-

75 return dv == version 

-

76 

-

77 

-

78def determine_language_id(doc: "TextDocument") -> Tuple[str, str]: 

-

79 lang_id = doc.language_id 

-

80 if lang_id and not lang_id.isspace(): 

-

81 return "declared", lang_id 

-

82 path = doc.path 

-

83 try: 

-

84 last_idx = path.rindex("debian/") 

-

85 except ValueError: 

-

86 return "filename", os.path.basename(path) 

-

87 guess_language_id = path[last_idx:] 

-

88 return "filename", guess_language_id 

-

89 

-

90 

-

91@DEBPUTY_LANGUAGE_SERVER.feature(TEXT_DOCUMENT_DID_OPEN) 

-

92@DEBPUTY_LANGUAGE_SERVER.feature(TEXT_DOCUMENT_DID_CHANGE) 

-

93async def _open_or_changed_document( 

-

94 ls: "DebputyLanguageServer", 

-

95 params: Union[DidOpenTextDocumentParams, DidChangeTextDocumentParams], 

-

96) -> None: 

-

97 version = params.text_document.version 

-

98 doc_uri = params.text_document.uri 

-

99 doc = ls.workspace.get_text_document(doc_uri) 

-

100 

-

101 _DOCUMENT_VERSION_TABLE[doc_uri] = version 

-

102 id_source, language_id = determine_language_id(doc) 

-

103 handler = DIAGNOSTIC_HANDLERS.get(language_id) 

-

104 if handler is None: 

-

105 _info( 

-

106 f"Opened/Changed document: {doc.path} ({language_id}, {id_source}) - no diagnostics handler" 

-

107 ) 

-

108 return 

-

109 _info( 

-

110 f"Opened/Changed document: {doc.path} ({language_id}, {id_source}) - running diagnostics for doc version {version}" 

-

111 ) 

-

112 last_publish_count = -1 

-

113 

-

114 diagnostics_scanner = handler(ls, params) 

-

115 async for diagnostics in diagnostics_scanner: 

-

116 await asyncio.sleep(0) 

-

117 if not is_doc_at_version(doc_uri, version): 

-

118 # This basically happens with very edit, so lets not notify the client 

-

119 # for that. 

-

120 _info( 

-

121 f"Cancel (obsolete) diagnostics for doc version {version}: document version changed" 

-

122 ) 

-

123 break 

-

124 if diagnostics is None or last_publish_count != len(diagnostics): 

-

125 last_publish_count = len(diagnostics) if diagnostics is not None else 0 

-

126 ls.publish_diagnostics( 

-

127 doc.uri, 

-

128 diagnostics, 

-

129 ) 

-

130 

-

131 

-

132@DEBPUTY_LANGUAGE_SERVER.feature(TEXT_DOCUMENT_COMPLETION) 

-

133def _completions( 

-

134 ls: "DebputyLanguageServer", 

-

135 params: CompletionParams, 

-

136) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]: 

-

137 return _dispatch_standard_handler( 

-

138 ls, 

-

139 params.text_document.uri, 

-

140 params, 

-

141 COMPLETER_HANDLERS, 

-

142 "Complete request", 

-

143 ) 

-

144 

-

145 

-

146@DEBPUTY_LANGUAGE_SERVER.feature(TEXT_DOCUMENT_HOVER) 

-

147def _hover( 

-

148 ls: "DebputyLanguageServer", 

-

149 params: CompletionParams, 

-

150) -> Optional[Hover]: 

-

151 return _dispatch_standard_handler( 

-

152 ls, 

-

153 params.text_document.uri, 

-

154 params, 

-

155 HOVER_HANDLERS, 

-

156 "Hover doc request", 

-

157 ) 

-

158 

-

159 

-

160@DEBPUTY_LANGUAGE_SERVER.feature(TEXT_DOCUMENT_CODE_ACTION) 

-

161def _code_actions( 

-

162 ls: "DebputyLanguageServer", 

-

163 params: CodeActionParams, 

-

164) -> Optional[List[Union[Command, CodeAction]]]: 

-

165 return _dispatch_standard_handler( 

-

166 ls, 

-

167 params.text_document.uri, 

-

168 params, 

-

169 CODE_ACTION_HANDLERS, 

-

170 "Code action request", 

-

171 ) 

-

172 

-

173 

-

174@DEBPUTY_LANGUAGE_SERVER.feature(TEXT_DOCUMENT_FOLDING_RANGE) 

-

175def _folding_ranges( 

-

176 ls: "DebputyLanguageServer", 

-

177 params: FoldingRangeParams, 

-

178) -> Optional[Sequence[FoldingRange]]: 

-

179 return _dispatch_standard_handler( 

-

180 ls, 

-

181 params.text_document.uri, 

-

182 params, 

-

183 HOVER_HANDLERS, 

-

184 "Folding range request", 

-

185 ) 

-

186 

-

187 

-

188@DEBPUTY_LANGUAGE_SERVER.feature( 

-

189 TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL, 

-

190 SemanticTokensRegistrationOptions( 

-

191 SEMANTIC_TOKENS_LEGEND, 

-

192 full=True, 

-

193 ), 

-

194) 

-

195def _semantic_tokens_full( 

-

196 ls: "DebputyLanguageServer", 

-

197 params: SemanticTokensParams, 

-

198) -> Optional[SemanticTokens]: 

-

199 return _dispatch_standard_handler( 

-

200 ls, 

-

201 params.text_document.uri, 

-

202 params, 

-

203 SEMANTIC_TOKENS_FULL_HANDLERS, 

-

204 "Semantic tokens request", 

-

205 ) 

-

206 

-

207 

-

208def _dispatch_standard_handler( 

-

209 ls: "DebputyLanguageServer", 

-

210 doc_uri: str, 

-

211 params: P, 

-

212 handler_table: Mapping[str, Callable[[L, P], R]], 

-

213 request_type: str, 

-

214) -> R: 

-

215 doc = ls.workspace.get_text_document(doc_uri) 

-

216 

-

217 id_source, language_id = determine_language_id(doc) 

-

218 handler = handler_table.get(language_id) 

-

219 if handler is None: 

-

220 _info( 

-

221 f"{request_type} for document: {doc.path} ({language_id}, {id_source}) - no handler" 

-

222 ) 

-

223 return 

-

224 _info( 

-

225 f"{request_type} for document: {doc.path} ({language_id}, {id_source}) - delegating to handler" 

-

226 ) 

-

227 

-

228 return handler( 

-

229 ls, 

-

230 params, 

-

231 ) 

-
- - - diff --git a/coverage-report/d_5d0ec0d5422112df_lsp_features_py.html b/coverage-report/d_5d0ec0d5422112df_lsp_features_py.html deleted file mode 100644 index 32b1e00..0000000 --- a/coverage-report/d_5d0ec0d5422112df_lsp_features_py.html +++ /dev/null @@ -1,316 +0,0 @@ - - - - - Coverage for src/debputy/lsp/lsp_features.py: 57% - - - - - -
-
-

- Coverage for src/debputy/lsp/lsp_features.py: - 57% -

- -

- 107 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import collections 

-

2import inspect 

-

3from typing import Callable, TypeVar, Sequence, Union, Dict, List, Optional 

-

4 

-

5from lsprotocol.types import ( 

-

6 TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL, 

-

7 TEXT_DOCUMENT_CODE_ACTION, 

-

8 DidChangeTextDocumentParams, 

-

9 Diagnostic, 

-

10 DidOpenTextDocumentParams, 

-

11 SemanticTokensLegend, 

-

12) 

-

13 

-

14from debputy.plugin.api.feature_set import PluginProvidedFeatureSet 

-

15 

-

16try: 

-

17 from pygls.server import LanguageServer 

-

18 from debputy.lsp.debputy_ls import DebputyLanguageServer 

-

19except ImportError: 

-

20 pass 

-

21 

-

22from debputy.linting.lint_util import LinterImpl 

-

23from debputy.lsp.quickfixes import provide_standard_quickfixes_from_diagnostics 

-

24from debputy.lsp.text_util import on_save_trim_end_of_line_whitespace 

-

25 

-

26C = TypeVar("C", bound=Callable) 

-

27 

-

28SEMANTIC_TOKENS_LEGEND = SemanticTokensLegend( 

-

29 token_types=["keyword", "enumMember"], 

-

30 token_modifiers=[], 

-

31) 

-

32SEMANTIC_TOKEN_TYPES_IDS = { 

-

33 t: idx for idx, t in enumerate(SEMANTIC_TOKENS_LEGEND.token_types) 

-

34} 

-

35 

-

36DIAGNOSTIC_HANDLERS = {} 

-

37COMPLETER_HANDLERS = {} 

-

38HOVER_HANDLERS = {} 

-

39CODE_ACTION_HANDLERS = {} 

-

40FOLDING_RANGE_HANDLERS = {} 

-

41SEMANTIC_TOKENS_FULL_HANDLERS = {} 

-

42WILL_SAVE_WAIT_UNTIL_HANDLERS = {} 

-

43_ALIAS_OF = {} 

-

44 

-

45_STANDARD_HANDLERS = { 45 ↛ exitline 45 didn't jump to the function exit

-

46 TEXT_DOCUMENT_CODE_ACTION: ( 

-

47 CODE_ACTION_HANDLERS, 

-

48 lambda ls, params: provide_standard_quickfixes_from_diagnostics(params), 

-

49 ), 

-

50 TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL: ( 

-

51 WILL_SAVE_WAIT_UNTIL_HANDLERS, 

-

52 on_save_trim_end_of_line_whitespace, 

-

53 ), 

-

54} 

-

55 

-

56 

-

57def lint_diagnostics( 

-

58 file_formats: Union[str, Sequence[str]] 

-

59) -> Callable[[LinterImpl], LinterImpl]: 

-

60 

-

61 def _wrapper(func: C) -> C: 

-

62 if not inspect.iscoroutinefunction(func): 62 ↛ 76line 62 didn't jump to line 76, because the condition on line 62 was never false

-

63 

-

64 async def _lint_wrapper( 

-

65 ls: "DebputyLanguageServer", 

-

66 params: Union[ 

-

67 DidOpenTextDocumentParams, 

-

68 DidChangeTextDocumentParams, 

-

69 ], 

-

70 ) -> Optional[List[Diagnostic]]: 

-

71 doc = ls.workspace.get_text_document(params.text_document.uri) 

-

72 lint_state = ls.lint_state(doc) 

-

73 yield func(lint_state) 

-

74 

-

75 else: 

-

76 raise ValueError("Linters are all non-async at the moment") 

-

77 

-

78 for file_format in file_formats: 

-

79 if file_format in DIAGNOSTIC_HANDLERS: 

-

80 raise AssertionError( 

-

81 "There is already a diagnostics handler for " + file_format 

-

82 ) 

-

83 DIAGNOSTIC_HANDLERS[file_format] = _lint_wrapper 

-

84 

-

85 return func 

-

86 

-

87 return _wrapper 

-

88 

-

89 

-

90def lsp_diagnostics(file_formats: Union[str, Sequence[str]]) -> Callable[[C], C]: 

-

91 

-

92 def _wrapper(func: C) -> C: 

-

93 

-

94 if not inspect.iscoroutinefunction(func): 94 ↛ 106line 94 didn't jump to line 106, because the condition on line 94 was never false

-

95 

-

96 async def _linter(*args, **kwargs) -> None: 

-

97 res = func(*args, **kwargs) 

-

98 if inspect.isgenerator(res): 

-

99 for r in res: 

-

100 yield r 

-

101 else: 

-

102 yield res 

-

103 

-

104 else: 

-

105 

-

106 _linter = func 

-

107 

-

108 _register_handler(file_formats, DIAGNOSTIC_HANDLERS, _linter) 

-

109 

-

110 return func 

-

111 

-

112 return _wrapper 

-

113 

-

114 

-

115def lsp_completer(file_formats: Union[str, Sequence[str]]) -> Callable[[C], C]: 

-

116 return _registering_wrapper(file_formats, COMPLETER_HANDLERS) 

-

117 

-

118 

-

119def lsp_hover(file_formats: Union[str, Sequence[str]]) -> Callable[[C], C]: 

-

120 return _registering_wrapper(file_formats, HOVER_HANDLERS) 

-

121 

-

122 

-

123def lsp_folding_ranges(file_formats: Union[str, Sequence[str]]) -> Callable[[C], C]: 

-

124 return _registering_wrapper(file_formats, FOLDING_RANGE_HANDLERS) 

-

125 

-

126 

-

127def lsp_semantic_tokens_full( 

-

128 file_formats: Union[str, Sequence[str]] 

-

129) -> Callable[[C], C]: 

-

130 return _registering_wrapper(file_formats, SEMANTIC_TOKENS_FULL_HANDLERS) 

-

131 

-

132 

-

133def lsp_standard_handler(file_formats: Union[str, Sequence[str]], topic: str) -> None: 

-

134 res = _STANDARD_HANDLERS.get(topic) 

-

135 if res is None: 135 ↛ 136line 135 didn't jump to line 136, because the condition on line 135 was never true

-

136 raise ValueError(f"No standard handler for {topic}") 

-

137 

-

138 table, handler = res 

-

139 

-

140 _register_handler(file_formats, table, handler) 

-

141 

-

142 

-

143def _registering_wrapper( 

-

144 file_formats: Union[str, Sequence[str]], handler_dict: Dict[str, C] 

-

145) -> Callable[[C], C]: 

-

146 def _wrapper(func: C) -> C: 

-

147 _register_handler(file_formats, handler_dict, func) 

-

148 return func 

-

149 

-

150 return _wrapper 

-

151 

-

152 

-

153def _register_handler( 

-

154 file_formats: Union[str, Sequence[str]], 

-

155 handler_dict: Dict[str, C], 

-

156 handler: C, 

-

157) -> None: 

-

158 if isinstance(file_formats, str): 158 ↛ 159line 158 didn't jump to line 159, because the condition on line 158 was never true

-

159 file_formats = [file_formats] 

-

160 else: 

-

161 if not file_formats: 161 ↛ 162line 161 didn't jump to line 162, because the condition on line 161 was never true

-

162 raise ValueError("At least one language ID (file format) must be provided") 

-

163 main = file_formats[0] 

-

164 for alias in file_formats[1:]: 

-

165 if alias not in _ALIAS_OF: 

-

166 _ALIAS_OF[alias] = main 

-

167 

-

168 for file_format in file_formats: 

-

169 if file_format in handler_dict: 

-

170 raise AssertionError(f"There is already a handler for {file_format}") 

-

171 

-

172 handler_dict[file_format] = handler 

-

173 

-

174 

-

175def ensure_lsp_features_are_loaded() -> None: 

-

176 # FIXME: This import is needed to force loading of the LSP files. But it only works 

-

177 # for files with a linter (which currently happens to be all of them, but this is 

-

178 # a bit fragile). 

-

179 from debputy.linting.lint_impl import LINTER_FORMATS 

-

180 

-

181 assert LINTER_FORMATS 

-

182 

-

183 

-

184def describe_lsp_features() -> None: 

-

185 

-

186 ensure_lsp_features_are_loaded() 

-

187 

-

188 feature_list = [ 

-

189 ("diagnostics (lint)", DIAGNOSTIC_HANDLERS), 

-

190 ("code actions/quickfixes", CODE_ACTION_HANDLERS), 

-

191 ("completion suggestions", COMPLETER_HANDLERS), 

-

192 ("hover docs", HOVER_HANDLERS), 

-

193 ("folding ranges", FOLDING_RANGE_HANDLERS), 

-

194 ("semantic tokens", SEMANTIC_TOKENS_FULL_HANDLERS), 

-

195 ("on-save handler", WILL_SAVE_WAIT_UNTIL_HANDLERS), 

-

196 ] 

-

197 print("LSP language IDs and their features:") 

-

198 all_ids = sorted(set(lid for _, t in feature_list for lid in t)) 

-

199 for lang_id in all_ids: 

-

200 if lang_id in _ALIAS_OF: 

-

201 continue 

-

202 features = [n for n, t in feature_list if lang_id in t] 

-

203 print(f" * {lang_id}:") 

-

204 for feature in features: 

-

205 print(f" - {feature}") 

-

206 

-

207 aliases = collections.defaultdict(list) 

-

208 for lang_id in all_ids: 

-

209 main_lang = _ALIAS_OF.get(lang_id) 

-

210 if main_lang is None: 

-

211 continue 

-

212 aliases[main_lang].append(lang_id) 

-

213 

-

214 print() 

-

215 print("Aliases:") 

-

216 for main_id, aliases in aliases.items(): 

-

217 print(f" * {main_id}: {', '.join(aliases)}") 

-
- - - diff --git a/coverage-report/d_5d0ec0d5422112df_lsp_generic_deb822_py.html b/coverage-report/d_5d0ec0d5422112df_lsp_generic_deb822_py.html deleted file mode 100644 index 44685c8..0000000 --- a/coverage-report/d_5d0ec0d5422112df_lsp_generic_deb822_py.html +++ /dev/null @@ -1,481 +0,0 @@ - - - - - Coverage for src/debputy/lsp/lsp_generic_deb822.py: 46% - - - - - -
-
-

- Coverage for src/debputy/lsp/lsp_generic_deb822.py: - 46% -

- -

- 203 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import re 

-

2from typing import ( 

-

3 Optional, 

-

4 Union, 

-

5 Sequence, 

-

6 Tuple, 

-

7 Set, 

-

8 Any, 

-

9 Container, 

-

10 List, 

-

11 Iterable, 

-

12 Iterator, 

-

13) 

-

14 

-

15from lsprotocol.types import ( 

-

16 CompletionParams, 

-

17 CompletionList, 

-

18 CompletionItem, 

-

19 Position, 

-

20 CompletionItemTag, 

-

21 MarkupContent, 

-

22 Hover, 

-

23 MarkupKind, 

-

24 HoverParams, 

-

25 FoldingRangeParams, 

-

26 FoldingRange, 

-

27 FoldingRangeKind, 

-

28 SemanticTokensParams, 

-

29 SemanticTokens, 

-

30) 

-

31 

-

32from debputy.lsp.lsp_debian_control_reference_data import ( 

-

33 Deb822FileMetadata, 

-

34 Deb822KnownField, 

-

35 StanzaMetadata, 

-

36 FieldValueClass, 

-

37) 

-

38from debputy.lsp.lsp_features import SEMANTIC_TOKEN_TYPES_IDS 

-

39from debputy.lsp.text_util import normalize_dctrl_field_name 

-

40from debputy.lsp.vendoring._deb822_repro import parse_deb822_file 

-

41from debputy.lsp.vendoring._deb822_repro.parsing import ( 

-

42 Deb822KeyValuePairElement, 

-

43 LIST_SPACE_SEPARATED_INTERPRETATION, 

-

44) 

-

45from debputy.lsp.vendoring._deb822_repro.tokens import tokenize_deb822_file, Deb822Token 

-

46from debputy.util import _info 

-

47 

-

48try: 

-

49 from pygls.server import LanguageServer 

-

50 from pygls.workspace import TextDocument 

-

51except ImportError: 

-

52 pass 

-

53 

-

54 

-

55_CONTAINS_SPACE_OR_COLON = re.compile(r"[\s:]") 

-

56 

-

57 

-

58def _at_cursor( 

-

59 doc: "TextDocument", 

-

60 lines: List[str], 

-

61 client_position: Position, 

-

62) -> Tuple[Optional[str], str, bool, int, Set[str]]: 

-

63 paragraph_no = -1 

-

64 paragraph_started = False 

-

65 seen_fields = set() 

-

66 last_field_seen: Optional[str] = None 

-

67 current_field: Optional[str] = None 

-

68 server_position = doc.position_codec.position_from_client_units( 

-

69 lines, 

-

70 client_position, 

-

71 ) 

-

72 position_line_no = server_position.line 

-

73 

-

74 line_at_position = lines[position_line_no] 

-

75 line_start = "" 

-

76 if server_position.character: 

-

77 line_start = line_at_position[0 : server_position.character] 

-

78 

-

79 for line_no, line in enumerate(lines): 

-

80 if not line or line.isspace(): 

-

81 if line_no == position_line_no: 

-

82 current_field = last_field_seen 

-

83 continue 

-

84 last_field_seen = None 

-

85 if line_no > position_line_no: 85 ↛ 86line 85 didn't jump to line 86, because the condition on line 85 was never true

-

86 break 

-

87 paragraph_started = False 

-

88 elif line and line[0] == "#": 88 ↛ 89line 88 didn't jump to line 89, because the condition on line 88 was never true

-

89 continue 

-

90 elif line and not line[0].isspace() and ":" in line: 90 ↛ 79line 90 didn't jump to line 79, because the condition on line 90 was never false

-

91 if not paragraph_started: 

-

92 paragraph_started = True 

-

93 seen_fields = set() 

-

94 paragraph_no += 1 

-

95 key, _ = line.split(":", 1) 

-

96 key_lc = key.lower() 

-

97 last_field_seen = key_lc 

-

98 if line_no == position_line_no: 

-

99 current_field = key_lc 

-

100 seen_fields.add(key_lc) 

-

101 

-

102 in_value = bool(_CONTAINS_SPACE_OR_COLON.search(line_start)) 

-

103 current_word = doc.word_at_position(client_position) 

-

104 if current_field is not None: 104 ↛ 106line 104 didn't jump to line 106, because the condition on line 104 was never false

-

105 current_field = normalize_dctrl_field_name(current_field) 

-

106 return current_field, current_word, in_value, paragraph_no, seen_fields 

-

107 

-

108 

-

109def deb822_completer( 

-

110 ls: "LanguageServer", 

-

111 params: CompletionParams, 

-

112 file_metadata: Deb822FileMetadata[Any], 

-

113) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]: 

-

114 doc = ls.workspace.get_text_document(params.text_document.uri) 

-

115 lines = doc.lines 

-

116 

-

117 current_field, _, in_value, paragraph_no, seen_fields = _at_cursor( 

-

118 doc, 

-

119 lines, 

-

120 params.position, 

-

121 ) 

-

122 

-

123 stanza_metadata = file_metadata.guess_stanza_classification_by_idx(paragraph_no) 

-

124 

-

125 if in_value: 125 ↛ 126line 125 didn't jump to line 126, because the condition on line 125 was never true

-

126 _info(f"Completion for field value {current_field}") 

-

127 if current_field is None: 

-

128 return None 

-

129 known_field = stanza_metadata.get(current_field) 

-

130 if known_field is None: 

-

131 return None 

-

132 items = _complete_field_value(known_field) 

-

133 else: 

-

134 _info("Completing field name") 

-

135 items = _complete_field_name( 

-

136 stanza_metadata, 

-

137 seen_fields, 

-

138 ) 

-

139 

-

140 _info(f"Completion candidates: {items}") 

-

141 

-

142 return items 

-

143 

-

144 

-

145def deb822_hover( 

-

146 ls: "LanguageServer", 

-

147 params: HoverParams, 

-

148 file_metadata: Deb822FileMetadata[Any], 

-

149) -> Optional[Hover]: 

-

150 doc = ls.workspace.get_text_document(params.text_document.uri) 

-

151 lines = doc.lines 

-

152 current_field, word_at_position, in_value, paragraph_no, _ = _at_cursor( 

-

153 doc, lines, params.position 

-

154 ) 

-

155 stanza_metadata = file_metadata.guess_stanza_classification_by_idx(paragraph_no) 

-

156 

-

157 if current_field is None: 157 ↛ 158line 157 didn't jump to line 158, because the condition on line 157 was never true

-

158 _info("No hover information as we cannot determine which field it is for") 

-

159 return None 

-

160 known_field = stanza_metadata.get(current_field) 

-

161 

-

162 if known_field is None: 162 ↛ 163line 162 didn't jump to line 163, because the condition on line 162 was never true

-

163 return None 

-

164 if in_value: 164 ↛ 165line 164 didn't jump to line 165, because the condition on line 164 was never true

-

165 if not known_field.known_values: 

-

166 return 

-

167 keyword = known_field.known_values.get(word_at_position) 

-

168 if keyword is None: 

-

169 return 

-

170 hover_text = keyword.hover_text 

-

171 else: 

-

172 hover_text = known_field.hover_text 

-

173 if hover_text is None: 173 ↛ 174line 173 didn't jump to line 174, because the condition on line 173 was never true

-

174 hover_text = f"The field {current_field} had no documentation." 

-

175 

-

176 try: 

-

177 supported_formats = ls.client_capabilities.text_document.hover.content_format 

-

178 except AttributeError: 

-

179 supported_formats = [] 

-

180 

-

181 _info(f"Supported formats {supported_formats}") 

-

182 markup_kind = MarkupKind.Markdown 

-

183 if markup_kind not in supported_formats: 183 ↛ 185line 183 didn't jump to line 185, because the condition on line 183 was never false

-

184 markup_kind = MarkupKind.PlainText 

-

185 return Hover( 

-

186 contents=MarkupContent( 

-

187 kind=markup_kind, 

-

188 value=hover_text, 

-

189 ) 

-

190 ) 

-

191 

-

192 

-

193def _deb822_token_iter( 

-

194 tokens: Iterable[Deb822Token], 

-

195) -> Iterator[Tuple[Deb822Token, int, int, int, int, int]]: 

-

196 line_no = 0 

-

197 line_offset = 0 

-

198 

-

199 for token in tokens: 

-

200 start_line = line_no 

-

201 start_line_offset = line_offset 

-

202 

-

203 newlines = token.text.count("\n") 

-

204 line_no += newlines 

-

205 text_len = len(token.text) 

-

206 if newlines: 

-

207 if token.text.endswith("\n"): 

-

208 line_offset = 0 

-

209 else: 

-

210 # -2, one to remove the "\n" and one to get 0-offset 

-

211 line_offset = text_len - token.text.rindex("\n") - 2 

-

212 else: 

-

213 line_offset += text_len 

-

214 

-

215 yield token, start_line, start_line_offset, line_no, line_offset 

-

216 

-

217 

-

218def deb822_folding_ranges( 

-

219 ls: "LanguageServer", 

-

220 params: FoldingRangeParams, 

-

221 # Unused for now: might be relevant for supporting folding for some fields 

-

222 _file_metadata: Deb822FileMetadata[Any], 

-

223) -> Optional[Sequence[FoldingRange]]: 

-

224 doc = ls.workspace.get_text_document(params.text_document.uri) 

-

225 comment_start = -1 

-

226 folding_ranges = [] 

-

227 for ( 

-

228 token, 

-

229 start_line, 

-

230 start_offset, 

-

231 end_line, 

-

232 end_offset, 

-

233 ) in _deb822_token_iter(tokenize_deb822_file(doc.lines)): 

-

234 if token.is_comment: 

-

235 if comment_start < 0: 

-

236 comment_start = start_line 

-

237 elif comment_start > -1: 

-

238 comment_start = -1 

-

239 folding_range = FoldingRange( 

-

240 comment_start, 

-

241 end_line, 

-

242 kind=FoldingRangeKind.Comment, 

-

243 ) 

-

244 

-

245 folding_ranges.append(folding_range) 

-

246 

-

247 return folding_ranges 

-

248 

-

249 

-

250def deb822_semantic_tokens_full( 

-

251 ls: "LanguageServer", 

-

252 request: SemanticTokensParams, 

-

253 file_metadata: Deb822FileMetadata[Any], 

-

254) -> Optional[SemanticTokens]: 

-

255 doc = ls.workspace.get_text_document(request.text_document.uri) 

-

256 lines = doc.lines 

-

257 deb822_file = parse_deb822_file( 

-

258 lines, 

-

259 accept_files_with_duplicated_fields=True, 

-

260 accept_files_with_error_tokens=True, 

-

261 ) 

-

262 tokens = [] 

-

263 previous_line = 0 

-

264 keyword_token_code = SEMANTIC_TOKEN_TYPES_IDS["keyword"] 

-

265 known_value_token_code = SEMANTIC_TOKEN_TYPES_IDS["enumMember"] 

-

266 no_modifiers = 0 

-

267 

-

268 # TODO: Add comment support; slightly complicated by how we parse the file. 

-

269 

-

270 for stanza_idx, stanza in enumerate(deb822_file): 

-

271 stanza_position = stanza.position_in_file() 

-

272 stanza_metadata = file_metadata.classify_stanza(stanza, stanza_idx=stanza_idx) 

-

273 for kvpair in stanza.iter_parts_of_type(Deb822KeyValuePairElement): 

-

274 kvpair_pos = kvpair.position_in_parent().relative_to(stanza_position) 

-

275 # These two happen to be the same; the indirection is to make it explicit that the two 

-

276 # positions for different tokens are the same. 

-

277 field_position_without_comments = kvpair_pos 

-

278 field_size = doc.position_codec.client_num_units(kvpair.field_name) 

-

279 current_line = field_position_without_comments.line_position 

-

280 line_delta = current_line - previous_line 

-

281 previous_line = current_line 

-

282 tokens.append(line_delta) # Line delta 

-

283 tokens.append(0) # Token column delta 

-

284 tokens.append(field_size) # Token length 

-

285 tokens.append(keyword_token_code) 

-

286 tokens.append(no_modifiers) 

-

287 

-

288 known_field: Optional[Deb822KnownField] = stanza_metadata.get( 

-

289 kvpair.field_name 

-

290 ) 

-

291 if ( 

-

292 known_field is None 

-

293 or not known_field.known_values 

-

294 or known_field.spellcheck_value 

-

295 ): 

-

296 continue 

-

297 

-

298 if known_field.field_value_class not in ( 

-

299 FieldValueClass.SINGLE_VALUE, 

-

300 FieldValueClass.SPACE_SEPARATED_LIST, 

-

301 ): 

-

302 continue 

-

303 value_element_pos = kvpair.value_element.position_in_parent().relative_to( 

-

304 kvpair_pos 

-

305 ) 

-

306 

-

307 last_token_start_column = 0 

-

308 

-

309 for value_ref in kvpair.interpret_as( 

-

310 LIST_SPACE_SEPARATED_INTERPRETATION 

-

311 ).iter_value_references(): 

-

312 if value_ref.value not in known_field.known_values: 

-

313 continue 

-

314 value_loc = value_ref.locatable 

-

315 value_range_te = value_loc.range_in_parent().relative_to( 

-

316 value_element_pos 

-

317 ) 

-

318 start_line = value_range_te.start_pos.line_position 

-

319 line_delta = start_line - current_line 

-

320 current_line = start_line 

-

321 if line_delta: 

-

322 last_token_start_column = 0 

-

323 

-

324 value_start_column = value_range_te.start_pos.cursor_position 

-

325 column_delta = value_start_column - last_token_start_column 

-

326 last_token_start_column = value_start_column 

-

327 

-

328 tokens.append(line_delta) # Line delta 

-

329 tokens.append(column_delta) # Token column delta 

-

330 tokens.append(field_size) # Token length 

-

331 tokens.append(known_value_token_code) 

-

332 tokens.append(no_modifiers) 

-

333 

-

334 if not tokens: 

-

335 return None 

-

336 return SemanticTokens(tokens) 

-

337 

-

338 

-

339def _should_complete_field_with_value(cand: Deb822KnownField) -> bool: 

-

340 return cand.known_values is not None and ( 

-

341 len(cand.known_values) == 1 

-

342 or ( 

-

343 len(cand.known_values) == 2 

-

344 and cand.warn_if_default 

-

345 and cand.default_value is not None 

-

346 ) 

-

347 ) 

-

348 

-

349 

-

350def _complete_field_name( 

-

351 fields: StanzaMetadata[Any], 

-

352 seen_fields: Container[str], 

-

353) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]: 

-

354 items = [] 

-

355 for cand_key, cand in fields.items(): 

-

356 if cand_key.lower() in seen_fields: 

-

357 continue 

-

358 name = cand.name 

-

359 complete_as = name + ": " 

-

360 if _should_complete_field_with_value(cand): 

-

361 value = next(iter(v for v in cand.known_values if v != cand.default_value)) 361 ↛ exitline 361 didn't finish the generator expression on line 361

-

362 complete_as += value 

-

363 tags = [] 

-

364 if cand.replaced_by or cand.deprecated_with_no_replacement: 

-

365 tags.append(CompletionItemTag.Deprecated) 

-

366 

-

367 items.append( 

-

368 CompletionItem( 

-

369 name, 

-

370 insert_text=complete_as, 

-

371 tags=tags, 

-

372 ) 

-

373 ) 

-

374 return items 

-

375 

-

376 

-

377def _complete_field_value( 

-

378 field: Deb822KnownField, 

-

379) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]: 

-

380 if field.known_values is None: 

-

381 return None 

-

382 return [CompletionItem(v) for v in field.known_values] 

-
- - - diff --git a/coverage-report/d_5d0ec0d5422112df_quickfixes_py.html b/coverage-report/d_5d0ec0d5422112df_quickfixes_py.html deleted file mode 100644 index b62166d..0000000 --- a/coverage-report/d_5d0ec0d5422112df_quickfixes_py.html +++ /dev/null @@ -1,301 +0,0 @@ - - - - - Coverage for src/debputy/lsp/quickfixes.py: 42% - - - - - -
-
-

- Coverage for src/debputy/lsp/quickfixes.py: - 42% -

- -

- 63 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from typing import ( 

-

2 Literal, 

-

3 TypedDict, 

-

4 Callable, 

-

5 Iterable, 

-

6 Union, 

-

7 TypeVar, 

-

8 Mapping, 

-

9 Dict, 

-

10 Optional, 

-

11 List, 

-

12 cast, 

-

13) 

-

14 

-

15from lsprotocol.types import ( 

-

16 CodeAction, 

-

17 Command, 

-

18 CodeActionParams, 

-

19 Diagnostic, 

-

20 CodeActionDisabledType, 

-

21 TextEdit, 

-

22 WorkspaceEdit, 

-

23 TextDocumentEdit, 

-

24 OptionalVersionedTextDocumentIdentifier, 

-

25 Range, 

-

26 Position, 

-

27 CodeActionKind, 

-

28) 

-

29 

-

30from debputy.util import _warn 

-

31 

-

32try: 

-

33 from debian._deb822_repro.locatable import Position as TEPosition, Range as TERange 

-

34 

-

35 from pygls.server import LanguageServer 

-

36 from pygls.workspace import TextDocument 

-

37except ImportError: 

-

38 pass 

-

39 

-

40 

-

41CodeActionName = Literal["correct-text", "remove-line"] 

-

42 

-

43 

-

44class CorrectTextCodeAction(TypedDict): 

-

45 code_action: Literal["correct-text"] 

-

46 correct_value: str 

-

47 

-

48 

-

49class RemoveLineCodeAction(TypedDict): 

-

50 code_action: Literal["remove-line"] 

-

51 

-

52 

-

53def propose_correct_text_quick_fix(correct_value: str) -> CorrectTextCodeAction: 

-

54 return { 

-

55 "code_action": "correct-text", 

-

56 "correct_value": correct_value, 

-

57 } 

-

58 

-

59 

-

60def propose_remove_line_quick_fix() -> RemoveLineCodeAction: 

-

61 return { 

-

62 "code_action": "remove-line", 

-

63 } 

-

64 

-

65 

-

66CODE_ACTION_HANDLERS: Dict[ 

-

67 CodeActionName, 

-

68 Callable[ 

-

69 [Mapping[str, str], CodeActionParams, Diagnostic], 

-

70 Iterable[Union[CodeAction, Command]], 

-

71 ], 

-

72] = {} 

-

73M = TypeVar("M", bound=Mapping[str, str]) 

-

74Handler = Callable[ 

-

75 [M, CodeActionParams, Diagnostic], 

-

76 Iterable[Union[CodeAction, Command]], 

-

77] 

-

78 

-

79 

-

80def _code_handler_for(action_name: CodeActionName) -> Callable[[Handler], Handler]: 

-

81 def _wrapper(func: Handler) -> Handler: 

-

82 assert action_name not in CODE_ACTION_HANDLERS 

-

83 CODE_ACTION_HANDLERS[action_name] = func 

-

84 return func 

-

85 

-

86 return _wrapper 

-

87 

-

88 

-

89@_code_handler_for("correct-text") 

-

90def _correct_value_code_action( 

-

91 code_action_data: CorrectTextCodeAction, 

-

92 code_action_params: CodeActionParams, 

-

93 diagnostic: Diagnostic, 

-

94) -> Iterable[Union[CodeAction, Command]]: 

-

95 corrected_value = code_action_data["correct_value"] 

-

96 edits = [ 

-

97 TextEdit( 

-

98 diagnostic.range, 

-

99 corrected_value, 

-

100 ), 

-

101 ] 

-

102 yield CodeAction( 

-

103 title=f'Replace with "{corrected_value}"', 

-

104 kind=CodeActionKind.QuickFix, 

-

105 diagnostics=[diagnostic], 

-

106 edit=WorkspaceEdit( 

-

107 changes={code_action_params.text_document.uri: edits}, 

-

108 document_changes=[ 

-

109 TextDocumentEdit( 

-

110 text_document=OptionalVersionedTextDocumentIdentifier( 

-

111 uri=code_action_params.text_document.uri, 

-

112 ), 

-

113 edits=edits, 

-

114 ) 

-

115 ], 

-

116 ), 

-

117 ) 

-

118 

-

119 

-

120def range_compatible_with_remove_line_fix(range_: Range) -> bool: 

-

121 start = range_.start 

-

122 end = range_.end 

-

123 if start.line != end.line and (start.line + 1 != end.line or end.character > 0): 

-

124 return False 

-

125 return True 

-

126 

-

127 

-

128@_code_handler_for("remove-line") 

-

129def _correct_value_code_action( 

-

130 _code_action_data: RemoveLineCodeAction, 

-

131 code_action_params: CodeActionParams, 

-

132 diagnostic: Diagnostic, 

-

133) -> Iterable[Union[CodeAction, Command]]: 

-

134 start = code_action_params.range.start 

-

135 if range_compatible_with_remove_line_fix(code_action_params.range): 

-

136 _warn( 

-

137 "Bug: the quick was used for a diagnostic that spanned multiple lines and would corrupt the file." 

-

138 ) 

-

139 return 

-

140 

-

141 edits = [ 

-

142 TextEdit( 

-

143 Range( 

-

144 start=Position( 

-

145 line=start.line, 

-

146 character=0, 

-

147 ), 

-

148 end=Position( 

-

149 line=start.line + 1, 

-

150 character=0, 

-

151 ), 

-

152 ), 

-

153 "", 

-

154 ), 

-

155 ] 

-

156 yield CodeAction( 

-

157 title="Remove the line", 

-

158 kind=CodeActionKind.QuickFix, 

-

159 diagnostics=[diagnostic], 

-

160 edit=WorkspaceEdit( 

-

161 changes={code_action_params.text_document.uri: edits}, 

-

162 document_changes=[ 

-

163 TextDocumentEdit( 

-

164 text_document=OptionalVersionedTextDocumentIdentifier( 

-

165 uri=code_action_params.text_document.uri, 

-

166 ), 

-

167 edits=edits, 

-

168 ) 

-

169 ], 

-

170 ), 

-

171 ) 

-

172 

-

173 

-

174def provide_standard_quickfixes_from_diagnostics( 

-

175 code_action_params: CodeActionParams, 

-

176) -> Optional[List[Union[Command, CodeAction]]]: 

-

177 actions = [] 

-

178 for diagnostic in code_action_params.context.diagnostics: 

-

179 data = diagnostic.data 

-

180 if not isinstance(data, list): 

-

181 data = [data] 

-

182 for action_suggestion in data: 

-

183 if ( 

-

184 action_suggestion 

-

185 and isinstance(action_suggestion, Mapping) 

-

186 and "code_action" in action_suggestion 

-

187 ): 

-

188 action_name: CodeActionName = action_suggestion["code_action"] 

-

189 handler = CODE_ACTION_HANDLERS.get(action_name) 

-

190 if handler is not None: 

-

191 actions.extend( 

-

192 handler( 

-

193 cast("Mapping[str, str]", action_suggestion), 

-

194 code_action_params, 

-

195 diagnostic, 

-

196 ) 

-

197 ) 

-

198 else: 

-

199 _warn(f"No codeAction handler for {action_name} !?") 

-

200 if not actions: 

-

201 return None 

-

202 return actions 

-
- - - diff --git a/coverage-report/d_5d0ec0d5422112df_spellchecking_py.html b/coverage-report/d_5d0ec0d5422112df_spellchecking_py.html deleted file mode 100644 index 2474783..0000000 --- a/coverage-report/d_5d0ec0d5422112df_spellchecking_py.html +++ /dev/null @@ -1,403 +0,0 @@ - - - - - Coverage for src/debputy/lsp/spellchecking.py: 71% - - - - - -
-
-

- Coverage for src/debputy/lsp/spellchecking.py: - 71% -

- -

- 152 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import functools 

-

2import itertools 

-

3import os 

-

4import re 

-

5import subprocess 

-

6from typing import Iterable, FrozenSet, Tuple, Optional, List 

-

7 

-

8from debian.debian_support import Release 

-

9from lsprotocol.types import Diagnostic, Range, Position, DiagnosticSeverity 

-

10 

-

11from debputy.lsp.quickfixes import propose_correct_text_quick_fix 

-

12from debputy.lsp.text_util import LintCapablePositionCodec 

-

13from debputy.util import _info, _warn 

-

14 

-

15_SPELL_CHECKER_DICT = "/usr/share/hunspell/en_US.dic" 

-

16_SPELL_CHECKER_AFF = "/usr/share/hunspell/en_US.aff" 

-

17_WORD_PARTS = re.compile(r"(\S+)") 

-

18_PRUNE_SYMBOLS_RE = re.compile(r"(\w+(?:-\w+|'\w+)?)") 

-

19_FIND_QUOTE_CHAR = re.compile(r'["`]') 

-

20_LOOKS_LIKE_FILENAME = re.compile( 

-

21 r""" 

-

22 [.]{0,3}/[a-z0-9]+(/[a-z0-9]+)+/* 

-

23 | [a-z0-9-_]+(/[a-z0-9]+)+/* 

-

24 | [a-z0-9_]+(/[a-z0-9_]+){2,}/* 

-

25 | (?:\S+)?[.][a-z]{1,3} 

-

26 

-

27""", 

-

28 re.VERBOSE, 

-

29) 

-

30_LOOKS_LIKE_PROGRAMMING_TERM = re.compile( 

-

31 r""" 

-

32 ( 

-

33 # Java identifier Camel Case 

-

34 [a-z][a-z0-9]*(?:[A-Z]{1,3}[a-z0-9]+)+ 

-

35 # Type name Camel Case 

-

36 | [A-Z]{1,3}[a-z0-9]+(?:[A-Z]{1,3}[a-z0-9]+)+ 

-

37 # Type name Camel Case with underscore (seen in Dh_Lib.pm among other 

-

38 | [A-Z]{1,3}[a-z0-9]+(?:_[A-Z]{1,3}[a-z0-9]+)+ 

-

39 # Perl module 

-

40 | [A-Z]{1,3}[a-z0-9]+(?:_[A-Z]{1,3}[a-z0-9]+)*(::[A-Z]{1,3}[a-z0-9]+(?:_[A-Z]{1,3}[a-z0-9]+)*)+ 

-

41 # Probably an abbreviation 

-

42 | [A-Z]{3,} 

-

43 # Perl/Python identifiers or Jinja templates 

-

44 | [$%&@_]?[{]?[{]?[a-z][a-z0-9]*(?:_[a-z0-9]+)+(?:(?:->)?[\[{]\S+|}}?)? 

-

45 # SCREAMING_SNAKE_CASE (environment variables plus -DVAR=B or $FOO) 

-

46 | [-$%&*_]{0,2}[A-Z][A-Z0-9]*(_[A-Z0-9]+)+(?:=\S+)? 

-

47 | \#[A-Z][A-Z0-9]*(_[A-Z0-9]+)+\# 

-

48 # Subcommand names. Require at least two "-" to avoid skipping hyphenated words 

-

49 | [a-z][a-z0-9]*(-[a-z0-9]+){2,} 

-

50 # Short args 

-

51 | -[a-z0-9]+ 

-

52 # Things like 32bit 

-

53 | \d{2,}-?[a-z]+ 

-

54 # Source package (we do not have a package without prefix/suffix because it covers 95% of all lowercase words) 

-

55 | src:[a-z0-9][-+.a-z0-9]+ 

-

56 | [a-z0-9][-+.a-z0-9]+:(?:any|native) 

-

57 # Version 

-

58 | v\d+(?:[.]\S+)? 

-

59 # chmod symbolic mode or math 

-

60 | \S*=\S+ 

-

61 ) 

-

62""", 

-

63 re.VERBOSE, 

-

64) 

-

65_LOOKS_LIKE_EMAIL = re.compile( 

-

66 r""" 

-

67 <[^>@\s]+@[^>@\s]+> 

-

68""", 

-

69 re.VERBOSE, 

-

70) 

-

71_NO_CORRECTIONS = tuple() 

-

72_WORDLISTS = [ 

-

73 "debian-wordlist.dic", 

-

74] 

-

75_NAMELISTS = [ 

-

76 "logins-and-people.dic", 

-

77] 

-

78_PERSONAL_DICTS = [ 

-

79 "${HOME}/.hunspell_default", 

-

80 "${HOME}/.hunspell_en_US", 

-

81] 

-

82 

-

83 

-

84try: 

-

85 if not os.path.lexists(_SPELL_CHECKER_DICT) or not os.path.lexists( 85 ↛ 88line 85 didn't jump to line 88, because the condition on line 85 was never true

-

86 _SPELL_CHECKER_AFF 

-

87 ): 

-

88 raise ImportError 

-

89 from hunspell import HunSpell 

-

90 

-

91 _HAS_HUNSPELL = True 

-

92except ImportError: 

-

93 _HAS_HUNSPELL = False 

-

94 

-

95 

-

96def _read_wordlist( 

-

97 base_dir: str, wordlist_name: str, *, namelist: bool = False 

-

98) -> Iterable[str]: 

-

99 with open(os.path.join(base_dir, wordlist_name)) as fd: 

-

100 w = [w.strip() for w in fd] 

-

101 yield from w 

-

102 if namelist: 

-

103 yield from (f"{n}'s" for n in w) 

-

104 

-

105 

-

106def _all_debian_archs() -> Iterable[str]: 

-

107 try: 

-

108 output = subprocess.check_output(["dpkg-architecture", "-L"]) 

-

109 except (FileNotFoundError, subprocess.CalledProcessError) as e: 

-

110 _warn(f"dpkg-architecture -L failed: {e}") 

-

111 return tuple() 

-

112 

-

113 return (x.strip() for x in output.decode("utf-8").splitlines()) 

-

114 

-

115 

-

116@functools.lru_cache 

-

117def _builtin_exception_words() -> FrozenSet[str]: 

-

118 basedirs = os.path.dirname(__file__) 

-

119 release_names = (x for x in Release.releases) 

-

120 return frozenset( 

-

121 itertools.chain( 

-

122 itertools.chain.from_iterable( 

-

123 _read_wordlist(basedirs, wl) for wl in _WORDLISTS 

-

124 ), 

-

125 itertools.chain.from_iterable( 

-

126 _read_wordlist(basedirs, wl, namelist=True) for wl in _NAMELISTS 

-

127 ), 

-

128 release_names, 

-

129 _all_debian_archs(), 

-

130 ) 

-

131 ) 

-

132 

-

133 

-

134_DEFAULT_SPELL_CHECKER: Optional["Spellchecker"] = None 

-

135 

-

136 

-

137def spellcheck_line( 

-

138 lines: List[str], 

-

139 position_codec: LintCapablePositionCodec, 

-

140 line_no: int, 

-

141 line: str, 

-

142) -> Iterable[Diagnostic]: 

-

143 spell_checker = default_spellchecker() 

-

144 for word, pos, endpos in spell_checker.iter_words(line): 

-

145 corrections = spell_checker.provide_corrections_for(word) 

-

146 if not corrections: 

-

147 continue 

-

148 word_range_server_units = Range( 

-

149 Position(line_no, pos), 

-

150 Position(line_no, endpos), 

-

151 ) 

-

152 word_range = position_codec.range_to_client_units( 

-

153 lines, 

-

154 word_range_server_units, 

-

155 ) 

-

156 yield Diagnostic( 

-

157 word_range, 

-

158 f'Spelling "{word}"', 

-

159 severity=DiagnosticSeverity.Hint, 

-

160 source="debputy", 

-

161 data=[propose_correct_text_quick_fix(c) for c in corrections], 

-

162 ) 

-

163 

-

164 

-

165def default_spellchecker() -> "Spellchecker": 

-

166 global _DEFAULT_SPELL_CHECKER 

-

167 spellchecker = _DEFAULT_SPELL_CHECKER 

-

168 if spellchecker is None: 

-

169 if _HAS_HUNSPELL: 169 ↛ 172line 169 didn't jump to line 172, because the condition on line 169 was never false

-

170 spellchecker = HunspellSpellchecker() 

-

171 else: 

-

172 spellchecker = _do_nothing_spellchecker() 

-

173 _DEFAULT_SPELL_CHECKER = spellchecker 

-

174 return spellchecker 

-

175 

-

176 

-

177@functools.lru_cache() 

-

178def _do_nothing_spellchecker() -> "Spellchecker": 

-

179 return EverythingIsCorrectSpellchecker() 

-

180 

-

181 

-

182def disable_spellchecking() -> None: 

-

183 global _DEFAULT_SPELL_CHECKER 

-

184 _DEFAULT_SPELL_CHECKER = _do_nothing_spellchecker() 

-

185 

-

186 

-

187def _skip_quoted_parts(line: str) -> Iterable[Tuple[str, int]]: 

-

188 current_pos = 0 

-

189 while True: 

-

190 try: 

-

191 m = _FIND_QUOTE_CHAR.search(line, current_pos) 

-

192 if m is None: 192 ↛ 198line 192 didn't jump to line 198, because the condition on line 192 was never false

-

193 if current_pos == 0: 193 ↛ 196line 193 didn't jump to line 196, because the condition on line 193 was never false

-

194 yield line, 0 

-

195 else: 

-

196 yield line[current_pos:], current_pos 

-

197 return 

-

198 starting_marker_pos = m.span()[0] 

-

199 quote_char = m.group() 

-

200 end_marker_pos = line.index(quote_char, starting_marker_pos + 1) 

-

201 except ValueError: 

-

202 yield line[current_pos:], current_pos 

-

203 return 

-

204 

-

205 part = line[current_pos:starting_marker_pos] 

-

206 

-

207 if not part.isspace(): 

-

208 yield part, current_pos 

-

209 current_pos = end_marker_pos + 1 

-

210 

-

211 

-

212def _split_line_to_words(line: str) -> Iterable[Tuple[str, int, int]]: 

-

213 for line_part, part_pos in _skip_quoted_parts(line): 

-

214 for m in _WORD_PARTS.finditer(line_part): 

-

215 fullword = m.group(1) 

-

216 if fullword.startswith("--"): 216 ↛ 218line 216 didn't jump to line 218, because the condition on line 216 was never true

-

217 # CLI arg 

-

218 continue 

-

219 if _LOOKS_LIKE_PROGRAMMING_TERM.match(fullword): 219 ↛ 220line 219 didn't jump to line 220, because the condition on line 219 was never true

-

220 continue 

-

221 if _LOOKS_LIKE_FILENAME.match(fullword): 221 ↛ 222line 221 didn't jump to line 222, because the condition on line 221 was never true

-

222 continue 

-

223 if _LOOKS_LIKE_EMAIL.match(fullword): 223 ↛ 224line 223 didn't jump to line 224, because the condition on line 223 was never true

-

224 continue 

-

225 mpos = m.span(1)[0] 

-

226 for sm in _PRUNE_SYMBOLS_RE.finditer(fullword): 

-

227 pos, endpos = sm.span(1) 

-

228 offset = part_pos + mpos 

-

229 yield sm.group(1), pos + offset, endpos + offset 

-

230 

-

231 

-

232class Spellchecker: 

-

233 

-

234 @staticmethod 

-

235 def do_nothing_spellchecker() -> "Spellchecker": 

-

236 return EverythingIsCorrectSpellchecker() 

-

237 

-

238 def iter_words(self, line: str) -> Iterable[Tuple[str, int, int]]: 

-

239 yield from _split_line_to_words(line) 

-

240 

-

241 def provide_corrections_for(self, word: str) -> Iterable[str]: 

-

242 raise NotImplementedError 

-

243 

-

244 def ignore_word(self, word: str) -> None: 

-

245 raise NotImplementedError 

-

246 

-

247 

-

248class EverythingIsCorrectSpellchecker(Spellchecker): 

-

249 def provide_corrections_for(self, word: str) -> Iterable[str]: 

-

250 return _NO_CORRECTIONS 

-

251 

-

252 def ignore_word(self, word: str) -> None: 

-

253 # It is hard to ignore words, when you never check them in the fist place. 

-

254 pass 

-

255 

-

256 

-

257class HunspellSpellchecker(Spellchecker): 

-

258 

-

259 def __init__(self) -> None: 

-

260 self._checker = HunSpell(_SPELL_CHECKER_DICT, _SPELL_CHECKER_AFF) 

-

261 for w in _builtin_exception_words(): 

-

262 self._checker.add(w) 

-

263 self._load_personal_exclusions() 

-

264 

-

265 def provide_corrections_for(self, word: str) -> Iterable[str]: 

-

266 if word.startswith( 266 ↛ 278line 266 didn't jump to line 278, because the condition on line 266 was never true

-

267 ( 

-

268 "dpkg-", 

-

269 "dh-", 

-

270 "dh_", 

-

271 "debian-", 

-

272 "debconf-", 

-

273 "update-", 

-

274 "DEB_", 

-

275 "DPKG_", 

-

276 ) 

-

277 ): 

-

278 return _NO_CORRECTIONS 

-

279 # 'ing is deliberately forcing a word into another word-class 

-

280 if word.endswith(("'ing", "-nss")): 280 ↛ 281line 280 didn't jump to line 281, because the condition on line 280 was never true

-

281 return _NO_CORRECTIONS 

-

282 return self._lookup(word) 

-

283 

-

284 @functools.lru_cache(128) 

-

285 def _lookup(self, word: str) -> Iterable[str]: 

-

286 if self._checker.spell(word): 286 ↛ 288line 286 didn't jump to line 288, because the condition on line 286 was never false

-

287 return _NO_CORRECTIONS 

-

288 return self._checker.suggest(word) 

-

289 

-

290 def ignore_word(self, word: str) -> None: 

-

291 self._checker.add(word) 

-

292 

-

293 def _load_personal_exclusions(self) -> None: 

-

294 for filename in _PERSONAL_DICTS: 

-

295 if filename.startswith("${"): 295 ↛ 302line 295 didn't jump to line 302, because the condition on line 295 was never false

-

296 end_index = filename.index("}") 

-

297 varname = filename[2:end_index] 

-

298 value = os.environ.get(varname) 

-

299 if value is None: 299 ↛ 300line 299 didn't jump to line 300, because the condition on line 299 was never true

-

300 continue 

-

301 filename = value + filename[end_index + 1 :] 

-

302 if os.path.isfile(filename): 302 ↛ 303line 302 didn't jump to line 303, because the condition on line 302 was never true

-

303 _info(f"Loading personal spelling dictionary from {filename}") 

-

304 self._checker.add_dic(filename) 

-
- - - diff --git a/coverage-report/d_5d0ec0d5422112df_text_edit_py.html b/coverage-report/d_5d0ec0d5422112df_text_edit_py.html deleted file mode 100644 index f78ee63..0000000 --- a/coverage-report/d_5d0ec0d5422112df_text_edit_py.html +++ /dev/null @@ -1,209 +0,0 @@ - - - - - Coverage for src/debputy/lsp/text_edit.py: 10% - - - - - -
-
-

- Coverage for src/debputy/lsp/text_edit.py: - 10% -

- -

- 66 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1# Copied and adapted from on python-lsp-server 

-

2# 

-

3# Copyright 2017-2020 Palantir Technologies, Inc. 

-

4# Copyright 2021- Python Language Server Contributors. 

-

5# License: Expat (MIT/X11) 

-

6# 

-

7from typing import List 

-

8 

-

9from lsprotocol.types import Range, TextEdit, Position 

-

10 

-

11 

-

12def get_well_formatted_range(lsp_range: Range) -> Range: 

-

13 start = lsp_range.start 

-

14 end = lsp_range.end 

-

15 

-

16 if start.line > end.line or ( 

-

17 start.line == end.line and start.character > end.character 

-

18 ): 

-

19 return Range(end, start) 

-

20 

-

21 return lsp_range 

-

22 

-

23 

-

24def get_well_formatted_edit(text_edit: TextEdit) -> TextEdit: 

-

25 lsp_range = get_well_formatted_range(text_edit.range) 

-

26 if lsp_range != text_edit.range: 

-

27 return TextEdit(new_text=text_edit.new_text, range=lsp_range) 

-

28 

-

29 return text_edit 

-

30 

-

31 

-

32def compare_text_edits(a: TextEdit, b: TextEdit) -> int: 

-

33 diff = a.range.start.line - b.range.start.line 

-

34 if diff == 0: 

-

35 return a.range.start.character - b.range.start.character 

-

36 

-

37 return diff 

-

38 

-

39 

-

40def merge_sort_text_edits(text_edits: List[TextEdit]) -> List[TextEdit]: 

-

41 if len(text_edits) <= 1: 

-

42 return text_edits 

-

43 

-

44 p = len(text_edits) // 2 

-

45 left = text_edits[:p] 

-

46 right = text_edits[p:] 

-

47 

-

48 merge_sort_text_edits(left) 

-

49 merge_sort_text_edits(right) 

-

50 

-

51 left_idx = 0 

-

52 right_idx = 0 

-

53 i = 0 

-

54 while left_idx < len(left) and right_idx < len(right): 

-

55 ret = compare_text_edits(left[left_idx], right[right_idx]) 

-

56 if ret <= 0: 

-

57 # smaller_equal -> take left to preserve order 

-

58 text_edits[i] = left[left_idx] 

-

59 i += 1 

-

60 left_idx += 1 

-

61 else: 

-

62 # greater -> take right 

-

63 text_edits[i] = right[right_idx] 

-

64 i += 1 

-

65 right_idx += 1 

-

66 while left_idx < len(left): 

-

67 text_edits[i] = left[left_idx] 

-

68 i += 1 

-

69 left_idx += 1 

-

70 while right_idx < len(right): 

-

71 text_edits[i] = right[right_idx] 

-

72 i += 1 

-

73 right_idx += 1 

-

74 return text_edits 

-

75 

-

76 

-

77class OverLappingTextEditException(Exception): 

-

78 """ 

-

79 Text edits are expected to be sorted 

-

80 and compressed instead of overlapping. 

-

81 This error is raised when two edits 

-

82 are overlapping. 

-

83 """ 

-

84 

-

85 

-

86def offset_at_position(lines: List[str], server_position: Position) -> int: 

-

87 row, col = server_position.line, server_position.character 

-

88 return col + sum(len(line) for line in lines[:row]) 

-

89 

-

90 

-

91def apply_text_edits(text: str, lines: List[str], text_edits: List[TextEdit]) -> str: 

-

92 sorted_edits = merge_sort_text_edits( 

-

93 [get_well_formatted_edit(e) for e in text_edits] 

-

94 ) 

-

95 last_modified_offset = 0 

-

96 spans = [] 

-

97 for e in sorted_edits: 

-

98 start_offset = offset_at_position(lines, e.range.start) 

-

99 if start_offset < last_modified_offset: 

-

100 raise OverLappingTextEditException("overlapping edit") 

-

101 

-

102 if start_offset > last_modified_offset: 

-

103 spans.append(text[last_modified_offset:start_offset]) 

-

104 

-

105 if e.new_text != "": 

-

106 spans.append(e.new_text) 

-

107 last_modified_offset = offset_at_position(lines, e.range.end) 

-

108 

-

109 spans.append(text[last_modified_offset:]) 

-

110 return "".join(spans) 

-
- - - diff --git a/coverage-report/d_5d0ec0d5422112df_text_util_py.html b/coverage-report/d_5d0ec0d5422112df_text_util_py.html deleted file mode 100644 index a8877ef..0000000 --- a/coverage-report/d_5d0ec0d5422112df_text_util_py.html +++ /dev/null @@ -1,221 +0,0 @@ - - - - - Coverage for src/debputy/lsp/text_util.py: 67% - - - - - -
-
-

- Coverage for src/debputy/lsp/text_util.py: - 67% -

- -

- 59 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from typing import List, Optional, Sequence, Union, Iterable 

-

2 

-

3from lsprotocol.types import ( 

-

4 TextEdit, 

-

5 Position, 

-

6 Range, 

-

7 WillSaveTextDocumentParams, 

-

8) 

-

9 

-

10from debputy.linting.lint_util import LinterPositionCodec 

-

11 

-

12try: 

-

13 from debian._deb822_repro.locatable import Position as TEPosition, Range as TERange 

-

14except ImportError: 

-

15 pass 

-

16 

-

17try: 

-

18 from pygls.workspace import LanguageServer, TextDocument, PositionCodec 

-

19 

-

20 LintCapablePositionCodec = Union[LinterPositionCodec, PositionCodec] 

-

21except ImportError: 

-

22 LintCapablePositionCodec = LinterPositionCodec 

-

23 

-

24 

-

25try: 

-

26 from Levenshtein import distance 

-

27except ImportError: 

-

28 

-

29 def detect_possible_typo( 

-

30 provided_value: str, 

-

31 known_values: Iterable[str], 

-

32 ) -> Sequence[str]: 

-

33 return tuple() 

-

34 

-

35else: 

-

36 

-

37 def detect_possible_typo( 

-

38 provided_value: str, 

-

39 known_values: Iterable[str], 

-

40 ) -> Sequence[str]: 

-

41 k_len = len(provided_value) 

-

42 candidates = [] 

-

43 for known_value in known_values: 

-

44 if abs(k_len - len(known_value)) > 2: 

-

45 continue 

-

46 d = distance(provided_value, known_value) 

-

47 if d > 2: 

-

48 continue 

-

49 candidates.append(known_value) 

-

50 return candidates 

-

51 

-

52 

-

53def normalize_dctrl_field_name(f: str) -> str: 

-

54 if not f or not f.startswith(("x", "X")): 

-

55 return f 

-

56 i = 0 

-

57 for i in range(1, len(f)): 57 ↛ 63line 57 didn't jump to line 63, because the loop on line 57 didn't complete

-

58 if f[i] == "-": 

-

59 i += 1 

-

60 break 

-

61 if f[i] not in ("b", "B", "s", "S", "c", "C"): 61 ↛ 62line 61 didn't jump to line 62, because the condition on line 61 was never true

-

62 return f 

-

63 assert i > 0 

-

64 return f[i:] 

-

65 

-

66 

-

67def on_save_trim_end_of_line_whitespace( 

-

68 ls: "LanguageServer", 

-

69 params: WillSaveTextDocumentParams, 

-

70) -> Optional[Sequence[TextEdit]]: 

-

71 doc = ls.workspace.get_text_document(params.text_document.uri) 

-

72 return trim_end_of_line_whitespace(doc, doc.lines) 

-

73 

-

74 

-

75def trim_end_of_line_whitespace( 

-

76 doc: "TextDocument", 

-

77 lines: List[str], 

-

78) -> Optional[Sequence[TextEdit]]: 

-

79 edits = [] 

-

80 for line_no, orig_line in enumerate(lines): 

-

81 orig_len = len(orig_line) 

-

82 if orig_line.endswith("\n"): 

-

83 orig_len -= 1 

-

84 stripped_len = len(orig_line.rstrip()) 

-

85 if stripped_len == orig_len: 

-

86 continue 

-

87 

-

88 edit_range = doc.position_codec.range_to_client_units( 

-

89 lines, 

-

90 Range( 

-

91 Position( 

-

92 line_no, 

-

93 stripped_len, 

-

94 ), 

-

95 Position( 

-

96 line_no, 

-

97 orig_len, 

-

98 ), 

-

99 ), 

-

100 ) 

-

101 edits.append( 

-

102 TextEdit( 

-

103 edit_range, 

-

104 "", 

-

105 ) 

-

106 ) 

-

107 

-

108 return edits 

-

109 

-

110 

-

111def te_position_to_lsp(te_position: "TEPosition") -> Position: 

-

112 return Position( 

-

113 te_position.line_position, 

-

114 te_position.cursor_position, 

-

115 ) 

-

116 

-

117 

-

118def te_range_to_lsp(te_range: "TERange") -> Range: 

-

119 return Range( 

-

120 te_position_to_lsp(te_range.start_pos), 

-

121 te_position_to_lsp(te_range.end_pos), 

-

122 ) 

-
- - - diff --git a/coverage-report/d_64287305fe0c6642___init___py.html b/coverage-report/d_64287305fe0c6642___init___py.html deleted file mode 100644 index 6ff82c5..0000000 --- a/coverage-report/d_64287305fe0c6642___init___py.html +++ /dev/null @@ -1,136 +0,0 @@ - - - - - Coverage for src/debputy/plugin/api/__init__.py: 100% - - - - - -
-
-

- Coverage for src/debputy/plugin/api/__init__.py: - 100% -

- -

- 3 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from ...exceptions import ( 

-

2 DebputyPluginRuntimeError, 

-

3 DebputyMetadataAccessError, 

-

4) 

-

5from .spec import ( 

-

6 DebputyPluginInitializer, 

-

7 PackageProcessingContext, 

-

8 MetadataAutoDetector, 

-

9 DpkgTriggerType, 

-

10 Maintscript, 

-

11 VirtualPath, 

-

12 BinaryCtrlAccessor, 

-

13 PluginInitializationEntryPoint, 

-

14 undocumented_attr, 

-

15 documented_attr, 

-

16 reference_documentation, 

-

17 virtual_path_def, 

-

18 packager_provided_file_reference_documentation, 

-

19) 

-

20 

-

21__all__ = [ 

-

22 "DebputyPluginInitializer", 

-

23 "PackageProcessingContext", 

-

24 "MetadataAutoDetector", 

-

25 "DpkgTriggerType", 

-

26 "Maintscript", 

-

27 "BinaryCtrlAccessor", 

-

28 "VirtualPath", 

-

29 "PluginInitializationEntryPoint", 

-

30 "documented_attr", 

-

31 "undocumented_attr", 

-

32 "reference_documentation", 

-

33 "virtual_path_def", 

-

34 "DebputyPluginRuntimeError", 

-

35 "DebputyMetadataAccessError", 

-

36 "packager_provided_file_reference_documentation", 

-

37] 

-
- - - diff --git a/coverage-report/d_64287305fe0c6642_example_processing_py.html b/coverage-report/d_64287305fe0c6642_example_processing_py.html deleted file mode 100644 index 8f26489..0000000 --- a/coverage-report/d_64287305fe0c6642_example_processing_py.html +++ /dev/null @@ -1,198 +0,0 @@ - - - - - Coverage for src/debputy/plugin/api/example_processing.py: 96% - - - - - -
-
-

- Coverage for src/debputy/plugin/api/example_processing.py: - 96% -

- -

- 62 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2from enum import Enum 

-

3from typing import Set, Tuple, List, cast, Dict, Sequence 

-

4 

-

5from debputy.filesystem_scan import build_virtual_fs 

-

6from debputy.plugin.api import VirtualPath 

-

7from debputy.plugin.api.impl_types import ( 

-

8 AutomaticDiscardRuleExample, 

-

9 PluginProvidedDiscardRule, 

-

10) 

-

11from debputy.util import _normalize_path 

-

12 

-

13 

-

14class DiscardVerdict(Enum): 

-

15 INCONSISTENT_CODE_KEPT = ( 

-

16 None, 

-

17 "INCONSISTENT (code kept the path, but should have discarded)", 

-

18 ) 

-

19 INCONSISTENT_CODE_DISCARDED = ( 

-

20 None, 

-

21 "INCONSISTENT (code discarded the path, but should have kept it)", 

-

22 ) 

-

23 KEPT = (False, "Kept") 

-

24 DISCARDED_BY_CODE = (True, "Discarded (directly by the rule)") 

-

25 DISCARDED_BY_DIRECTORY = (True, "Discarded (directory was discarded)") 

-

26 

-

27 @property 

-

28 def message(self) -> str: 

-

29 return cast("str", self.value[1]) 

-

30 

-

31 @property 

-

32 def is_consistent(self) -> bool: 

-

33 return self.value[0] is not None 

-

34 

-

35 @property 

-

36 def is_discarded(self) -> bool: 

-

37 return self.value[0] is True 

-

38 

-

39 @property 

-

40 def is_kept(self) -> bool: 

-

41 return self.value[0] is False 

-

42 

-

43 

-

44@dataclasses.dataclass(slots=True, frozen=True) 

-

45class ProcessedDiscardRuleExample: 

-

46 rendered_paths: Sequence[Tuple[VirtualPath, DiscardVerdict]] 

-

47 inconsistent_paths: Set[VirtualPath] 

-

48 # To avoid the parents being garbage collected 

-

49 fs_root: VirtualPath 

-

50 

-

51 

-

52def process_discard_rule_example( 

-

53 discard_rule: PluginProvidedDiscardRule, 

-

54 example: AutomaticDiscardRuleExample, 

-

55) -> ProcessedDiscardRuleExample: 

-

56 fs_root: VirtualPath = build_virtual_fs([p for p, _ in example.content]) 

-

57 

-

58 actual_discarded: Dict[str, bool] = {} 

-

59 expected_output = { 

-

60 "/" + _normalize_path(p.path_name, with_prefix=False): v 

-

61 for p, v in example.content 

-

62 } 

-

63 inconsistent_paths = set() 

-

64 rendered_paths = [] 

-

65 

-

66 for p in fs_root.all_paths(): 

-

67 parent = p.parent_dir 

-

68 discard_carry_over = False 

-

69 path_name = p.absolute 

-

70 if parent and actual_discarded[parent.absolute]: 

-

71 verdict = True 

-

72 discard_carry_over = True 

-

73 else: 

-

74 verdict = discard_rule.should_discard(p) 

-

75 

-

76 actual_discarded[path_name] = verdict 

-

77 expected = expected_output.get(path_name) 

-

78 if expected is not None: 

-

79 inconsistent = expected != verdict 

-

80 if inconsistent: 

-

81 inconsistent_paths.add(p) 

-

82 else: 

-

83 continue 

-

84 

-

85 if inconsistent: 

-

86 if verdict: 

-

87 verdict_code = DiscardVerdict.INCONSISTENT_CODE_DISCARDED 

-

88 else: 

-

89 verdict_code = DiscardVerdict.INCONSISTENT_CODE_KEPT 

-

90 elif verdict: 

-

91 if discard_carry_over: 

-

92 verdict_code = DiscardVerdict.DISCARDED_BY_DIRECTORY 

-

93 else: 

-

94 verdict_code = DiscardVerdict.DISCARDED_BY_CODE 

-

95 else: 

-

96 verdict_code = DiscardVerdict.KEPT 

-

97 rendered_paths.append((p, verdict_code)) 

-

98 

-

99 return ProcessedDiscardRuleExample(rendered_paths, inconsistent_paths, fs_root) 

-
- - - diff --git a/coverage-report/d_64287305fe0c6642_feature_set_py.html b/coverage-report/d_64287305fe0c6642_feature_set_py.html deleted file mode 100644 index 0c259e6..0000000 --- a/coverage-report/d_64287305fe0c6642_feature_set_py.html +++ /dev/null @@ -1,191 +0,0 @@ - - - - - Coverage for src/debputy/plugin/api/feature_set.py: 73% - - - - - -
-
-

- Coverage for src/debputy/plugin/api/feature_set.py: - 73% -

- -

- 35 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2import textwrap 

-

3from typing import Dict, List, Tuple, Sequence, Any 

-

4 

-

5from debputy import DEBPUTY_DOC_ROOT_DIR 

-

6from debputy.manifest_parser.declarative_parser import ParserGenerator 

-

7from debputy.plugin.api import reference_documentation 

-

8from debputy.plugin.api.impl_types import ( 

-

9 DebputyPluginMetadata, 

-

10 PackagerProvidedFileClassSpec, 

-

11 MetadataOrMaintscriptDetector, 

-

12 TTP, 

-

13 DispatchingTableParser, 

-

14 TP, 

-

15 SUPPORTED_DISPATCHABLE_TABLE_PARSERS, 

-

16 DispatchingObjectParser, 

-

17 SUPPORTED_DISPATCHABLE_OBJECT_PARSERS, 

-

18 PluginProvidedManifestVariable, 

-

19 PluginProvidedPackageProcessor, 

-

20 PluginProvidedDiscardRule, 

-

21 ServiceManagerDetails, 

-

22 PluginProvidedKnownPackagingFile, 

-

23 PluginProvidedTypeMapping, 

-

24 OPARSER_PACKAGES, 

-

25 OPARSER_PACKAGES_ROOT, 

-

26) 

-

27 

-

28 

-

29def _initialize_parser_generator() -> ParserGenerator: 

-

30 pg = ParserGenerator() 

-

31 

-

32 for path, ref_doc in SUPPORTED_DISPATCHABLE_OBJECT_PARSERS.items(): 

-

33 pg.add_object_parser(path, parser_documentation=ref_doc) 

-

34 

-

35 for rt, path in SUPPORTED_DISPATCHABLE_TABLE_PARSERS.items(): 

-

36 pg.add_table_parser(rt, path) 

-

37 

-

38 return pg 

-

39 

-

40 

-

41@dataclasses.dataclass(slots=True) 

-

42class PluginProvidedFeatureSet: 

-

43 plugin_data: Dict[str, DebputyPluginMetadata] = dataclasses.field( 

-

44 default_factory=dict 

-

45 ) 

-

46 packager_provided_files: Dict[str, PackagerProvidedFileClassSpec] = ( 

-

47 dataclasses.field(default_factory=dict) 

-

48 ) 

-

49 metadata_maintscript_detectors: Dict[str, List[MetadataOrMaintscriptDetector]] = ( 

-

50 dataclasses.field(default_factory=dict) 

-

51 ) 

-

52 manifest_variables: Dict[str, PluginProvidedManifestVariable] = dataclasses.field( 

-

53 default_factory=dict 

-

54 ) 

-

55 all_package_processors: Dict[Tuple[str, str], PluginProvidedPackageProcessor] = ( 

-

56 dataclasses.field(default_factory=dict) 

-

57 ) 

-

58 auto_discard_rules: Dict[str, PluginProvidedDiscardRule] = dataclasses.field( 

-

59 default_factory=dict 

-

60 ) 

-

61 service_managers: Dict[str, ServiceManagerDetails] = dataclasses.field( 

-

62 default_factory=dict 

-

63 ) 

-

64 known_packaging_files: Dict[str, PluginProvidedKnownPackagingFile] = ( 

-

65 dataclasses.field(default_factory=dict) 

-

66 ) 

-

67 mapped_types: Dict[Any, PluginProvidedTypeMapping] = dataclasses.field( 

-

68 default_factory=dict 

-

69 ) 

-

70 manifest_parser_generator: ParserGenerator = dataclasses.field( 

-

71 default_factory=_initialize_parser_generator 

-

72 ) 

-

73 

-

74 def package_processors_in_order(self) -> Sequence[PluginProvidedPackageProcessor]: 

-

75 order = [] 

-

76 delayed = [] 

-

77 for plugin_processor in self.all_package_processors.values(): 

-

78 if not plugin_processor.dependencies: 

-

79 order.append(plugin_processor) 

-

80 else: 

-

81 delayed.append(plugin_processor) 

-

82 

-

83 # At the time of writing, insert order will work as a plugin cannot declare 

-

84 # dependencies out of order in the current version. However, we want to 

-

85 # ensure dependencies are taken a bit seriously, so we ensure that processors 

-

86 # without dependencies are run first. This should weed out anything that 

-

87 # needs dependencies but do not add them. 

-

88 # 

-

89 # It is still far from as any dependency issues will be hidden if you just 

-

90 # add a single dependency. 

-

91 order.extend(delayed) 

-

92 return order 

-
- - - diff --git a/coverage-report/d_64287305fe0c6642_impl_py.html b/coverage-report/d_64287305fe0c6642_impl_py.html deleted file mode 100644 index c2c961d..0000000 --- a/coverage-report/d_64287305fe0c6642_impl_py.html +++ /dev/null @@ -1,2060 +0,0 @@ - - - - - Coverage for src/debputy/plugin/api/impl.py: 55% - - - - - -
-
-

- Coverage for src/debputy/plugin/api/impl.py: - 55% -

- -

- 753 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import contextlib 

-

2import dataclasses 

-

3import functools 

-

4import importlib 

-

5import importlib.util 

-

6import itertools 

-

7import json 

-

8import os 

-

9import re 

-

10import subprocess 

-

11import sys 

-

12from abc import ABC 

-

13from json import JSONDecodeError 

-

14from typing import ( 

-

15 Optional, 

-

16 Callable, 

-

17 Dict, 

-

18 Tuple, 

-

19 Iterable, 

-

20 Sequence, 

-

21 Type, 

-

22 List, 

-

23 Union, 

-

24 Set, 

-

25 Iterator, 

-

26 IO, 

-

27 Mapping, 

-

28 AbstractSet, 

-

29 cast, 

-

30 FrozenSet, 

-

31 Any, 

-

32 Literal, 

-

33) 

-

34 

-

35from debputy import DEBPUTY_DOC_ROOT_DIR 

-

36from debputy.exceptions import ( 

-

37 DebputySubstitutionError, 

-

38 PluginConflictError, 

-

39 PluginMetadataError, 

-

40 PluginBaseError, 

-

41 PluginInitializationError, 

-

42 PluginAPIViolationError, 

-

43 PluginNotFoundError, 

-

44) 

-

45from debputy.maintscript_snippet import ( 

-

46 STD_CONTROL_SCRIPTS, 

-

47 MaintscriptSnippetContainer, 

-

48 MaintscriptSnippet, 

-

49) 

-

50from debputy.manifest_parser.base_types import TypeMapping 

-

51from debputy.manifest_parser.exceptions import ManifestParseException 

-

52from debputy.manifest_parser.parser_data import ParserContextData 

-

53from debputy.manifest_parser.util import AttributePath 

-

54from debputy.plugin.api.feature_set import PluginProvidedFeatureSet 

-

55from debputy.plugin.api.impl_types import ( 

-

56 DebputyPluginMetadata, 

-

57 PackagerProvidedFileClassSpec, 

-

58 MetadataOrMaintscriptDetector, 

-

59 PluginProvidedTrigger, 

-

60 TTP, 

-

61 DIPHandler, 

-

62 PF, 

-

63 SF, 

-

64 DIPKWHandler, 

-

65 PluginProvidedManifestVariable, 

-

66 PluginProvidedPackageProcessor, 

-

67 PluginProvidedDiscardRule, 

-

68 AutomaticDiscardRuleExample, 

-

69 PPFFormatParam, 

-

70 ServiceManagerDetails, 

-

71 resolve_package_type_selectors, 

-

72 KnownPackagingFileInfo, 

-

73 PluginProvidedKnownPackagingFile, 

-

74 InstallPatternDHCompatRule, 

-

75 PluginProvidedTypeMapping, 

-

76) 

-

77from debputy.plugin.api.plugin_parser import ( 

-

78 PLUGIN_METADATA_PARSER, 

-

79 PluginJsonMetadata, 

-

80 PLUGIN_PPF_PARSER, 

-

81 PackagerProvidedFileJsonDescription, 

-

82 PLUGIN_MANIFEST_VARS_PARSER, 

-

83 PLUGIN_KNOWN_PACKAGING_FILES_PARSER, 

-

84) 

-

85from debputy.plugin.api.spec import ( 

-

86 MaintscriptAccessor, 

-

87 Maintscript, 

-

88 DpkgTriggerType, 

-

89 BinaryCtrlAccessor, 

-

90 PackageProcessingContext, 

-

91 MetadataAutoDetector, 

-

92 PluginInitializationEntryPoint, 

-

93 DebputyPluginInitializer, 

-

94 PackageTypeSelector, 

-

95 FlushableSubstvars, 

-

96 ParserDocumentation, 

-

97 PackageProcessor, 

-

98 VirtualPath, 

-

99 ServiceIntegrator, 

-

100 ServiceDetector, 

-

101 ServiceRegistry, 

-

102 ServiceDefinition, 

-

103 DSD, 

-

104 ServiceUpgradeRule, 

-

105 PackagerProvidedFileReferenceDocumentation, 

-

106 packager_provided_file_reference_documentation, 

-

107 TypeMappingDocumentation, 

-

108) 

-

109from debputy.substitution import ( 

-

110 Substitution, 

-

111 VariableNameState, 

-

112 SUBST_VAR_RE, 

-

113 VariableContext, 

-

114) 

-

115from debputy.util import ( 

-

116 _normalize_path, 

-

117 POSTINST_DEFAULT_CONDITION, 

-

118 _error, 

-

119 print_command, 

-

120 _warn, 

-

121) 

-

122 

-

123PLUGIN_TEST_SUFFIX = re.compile(r"_(?:t|test|check)(?:_([a-z0-9_]+))?[.]py$") 

-

124 

-

125 

-

126def _validate_known_packaging_file_dh_compat_rules( 

-

127 dh_compat_rules: Optional[List[InstallPatternDHCompatRule]], 

-

128) -> None: 

-

129 max_compat = None 

-

130 if not dh_compat_rules: 

-

131 return 

-

132 dh_compat_rule: InstallPatternDHCompatRule 

-

133 for idx, dh_compat_rule in enumerate(dh_compat_rules): 

-

134 dh_version = dh_compat_rule.get("starting_with_debhelper_version") 

-

135 compat = dh_compat_rule.get("starting_with_compat_level") 

-

136 

-

137 remaining = dh_compat_rule.keys() - { 

-

138 "after_debhelper_version", 

-

139 "starting_with_compat_level", 

-

140 } 

-

141 if not remaining: 

-

142 raise ValueError( 

-

143 f"The dh compat-rule at index {idx} does not affect anything not have any rules!? So why have it?" 

-

144 ) 

-

145 if dh_version is None and compat is None and idx < len(dh_compat_rules) - 1: 

-

146 raise ValueError( 

-

147 f"The dh compat-rule at index {idx} is not the last and is missing either" 

-

148 " before-debhelper-version or before-compat-level" 

-

149 ) 

-

150 if compat is not None and compat < 0: 

-

151 raise ValueError( 

-

152 f"There is no compat below 1 but dh compat-rule at {idx} wants to declare some rule" 

-

153 f" for something that appeared when migrating from {compat} to {compat + 1}." 

-

154 ) 

-

155 

-

156 if max_compat is None: 

-

157 max_compat = compat 

-

158 elif compat is not None: 

-

159 if compat >= max_compat: 

-

160 raise ValueError( 

-

161 f"The dh compat-rule at {idx} should be moved earlier than the entry for compat {max_compat}." 

-

162 ) 

-

163 max_compat = compat 

-

164 

-

165 install_pattern = dh_compat_rule.get("install_pattern") 

-

166 if ( 

-

167 install_pattern is not None 

-

168 and _normalize_path(install_pattern, with_prefix=False) != install_pattern 

-

169 ): 

-

170 raise ValueError( 

-

171 f"The install-pattern in dh compat-rule at {idx} must be normalized as" 

-

172 f' "{_normalize_path(install_pattern, with_prefix=False)}".' 

-

173 ) 

-

174 

-

175 

-

176class DebputyPluginInitializerProvider(DebputyPluginInitializer): 

-

177 __slots__ = ( 

-

178 "_plugin_metadata", 

-

179 "_feature_set", 

-

180 "_plugin_detector_ids", 

-

181 "_substitution", 

-

182 "_unloaders", 

-

183 "_load_started", 

-

184 ) 

-

185 

-

186 def __init__( 

-

187 self, 

-

188 plugin_metadata: DebputyPluginMetadata, 

-

189 feature_set: PluginProvidedFeatureSet, 

-

190 substitution: Substitution, 

-

191 ) -> None: 

-

192 self._plugin_metadata: DebputyPluginMetadata = plugin_metadata 

-

193 self._feature_set = feature_set 

-

194 self._plugin_detector_ids: Set[str] = set() 

-

195 self._substitution = substitution 

-

196 self._unloaders: List[Callable[[], None]] = [] 

-

197 self._load_started = False 

-

198 

-

199 def unload_plugin(self) -> None: 

-

200 if self._load_started: 

-

201 for unloader in self._unloaders: 

-

202 unloader() 

-

203 del self._feature_set.plugin_data[self._plugin_name] 

-

204 

-

205 def load_plugin(self) -> None: 

-

206 metadata = self._plugin_metadata 

-

207 if metadata.plugin_name in self._feature_set.plugin_data: 207 ↛ 208line 207 didn't jump to line 208, because the condition on line 207 was never true

-

208 raise PluginConflictError( 

-

209 f'The plugin "{metadata.plugin_name}" has already been loaded!?' 

-

210 ) 

-

211 assert ( 

-

212 metadata.api_compat_version == 1 

-

213 ), f"Unsupported plugin API compat version {metadata.api_compat_version}" 

-

214 self._feature_set.plugin_data[metadata.plugin_name] = metadata 

-

215 self._load_started = True 

-

216 assert not metadata.is_initialized 

-

217 try: 

-

218 metadata.initialize_plugin(self) 

-

219 except Exception as e: 

-

220 initializer = metadata.plugin_initializer 

-

221 if ( 221 ↛ 226line 221 didn't jump to line 226

-

222 isinstance(e, TypeError) 

-

223 and initializer is not None 

-

224 and not callable(initializer) 

-

225 ): 

-

226 raise PluginMetadataError( 

-

227 f"The specified entry point for plugin {metadata.plugin_name} does not appear to be a" 

-

228 f" callable (callable returns False). The specified entry point identifies" 

-

229 f' itself as "{initializer.__qualname__}".' 

-

230 ) from e 

-

231 elif isinstance(e, PluginBaseError): 231 ↛ 233line 231 didn't jump to line 233, because the condition on line 231 was never false

-

232 raise 

-

233 raise PluginInitializationError( 

-

234 f"Exception while attempting to load plugin {metadata.plugin_name}" 

-

235 ) from e 

-

236 

-

237 def packager_provided_file( 

-

238 self, 

-

239 stem: str, 

-

240 installed_path: str, 

-

241 *, 

-

242 default_mode: int = 0o0644, 

-

243 default_priority: Optional[int] = None, 

-

244 allow_name_segment: bool = True, 

-

245 allow_architecture_segment: bool = False, 

-

246 post_formatting_rewrite: Optional[Callable[[str], str]] = None, 

-

247 packageless_is_fallback_for_all_packages: bool = False, 

-

248 reservation_only: bool = False, 

-

249 format_callback: Optional[ 

-

250 Callable[[str, PPFFormatParam, VirtualPath], str] 

-

251 ] = None, 

-

252 reference_documentation: Optional[ 

-

253 PackagerProvidedFileReferenceDocumentation 

-

254 ] = None, 

-

255 ) -> None: 

-

256 packager_provided_files = self._feature_set.packager_provided_files 

-

257 existing = packager_provided_files.get(stem) 

-

258 

-

259 if format_callback is not None and self._plugin_name != "debputy": 259 ↛ 260line 259 didn't jump to line 260, because the condition on line 259 was never true

-

260 raise ValueError( 

-

261 "Sorry; Using format_callback is a debputy-internal" 

-

262 f" API. Triggered by plugin {self._plugin_name}" 

-

263 ) 

-

264 

-

265 if installed_path.endswith("/"): 265 ↛ 266line 265 didn't jump to line 266, because the condition on line 265 was never true

-

266 raise ValueError( 

-

267 f'The installed_path ends with "/" indicating it is a directory, but it must be a file.' 

-

268 f" Triggered by plugin {self._plugin_name}." 

-

269 ) 

-

270 

-

271 installed_path = _normalize_path(installed_path) 

-

272 

-

273 has_name_var = "{name}" in installed_path 

-

274 

-

275 if installed_path.startswith("./DEBIAN") or reservation_only: 

-

276 # Special-case, used for control files. 

-

277 if self._plugin_name != "debputy": 277 ↛ 278line 277 didn't jump to line 278, because the condition on line 277 was never true

-

278 raise ValueError( 

-

279 "Sorry; Using DEBIAN as install path or/and reservation_only is a debputy-internal" 

-

280 f" API. Triggered by plugin {self._plugin_name}" 

-

281 ) 

-

282 elif not has_name_var and "{owning_package}" not in installed_path: 282 ↛ 283line 282 didn't jump to line 283, because the condition on line 282 was never true

-

283 raise ValueError( 

-

284 'The installed_path must contain a "{name}" (preferred) or a "{owning_package}"' 

-

285 " substitution (or have installed_path end with a slash). Otherwise, the installed" 

-

286 f" path would caused file-conflicts. Triggered by plugin {self._plugin_name}" 

-

287 ) 

-

288 

-

289 if allow_name_segment and not has_name_var: 289 ↛ 290line 289 didn't jump to line 290, because the condition on line 289 was never true

-

290 raise ValueError( 

-

291 'When allow_name_segment is True, the installed_path must have a "{name}" substitution' 

-

292 " variable. Otherwise, the name segment will not work properly. Triggered by" 

-

293 f" plugin {self._plugin_name}" 

-

294 ) 

-

295 

-

296 if ( 296 ↛ 301line 296 didn't jump to line 301

-

297 default_priority is not None 

-

298 and "{priority}" not in installed_path 

-

299 and "{priority:02}" not in installed_path 

-

300 ): 

-

301 raise ValueError( 

-

302 'When default_priority is not None, the installed_path should have a "{priority}"' 

-

303 ' or a "{priority:02}" substitution variable. Otherwise, the priority would be lost.' 

-

304 f" Triggered by plugin {self._plugin_name}" 

-

305 ) 

-

306 

-

307 if existing is not None: 

-

308 if existing.debputy_plugin_metadata.plugin_name != self._plugin_name: 308 ↛ 315line 308 didn't jump to line 315

-

309 message = ( 

-

310 f'The stem "{stem}" is registered twice for packager provided files.' 

-

311 f" Once by {existing.debputy_plugin_metadata.plugin_name} and once" 

-

312 f" by {self._plugin_name}" 

-

313 ) 

-

314 else: 

-

315 message = ( 

-

316 f"Bug in the plugin {self._plugin_name}: It tried to register the" 

-

317 f' stem "{stem}" twice for packager provided files.' 

-

318 ) 

-

319 raise PluginConflictError( 

-

320 message, existing.debputy_plugin_metadata, self._plugin_metadata 

-

321 ) 

-

322 packager_provided_files[stem] = PackagerProvidedFileClassSpec( 

-

323 self._plugin_metadata, 

-

324 stem, 

-

325 installed_path, 

-

326 default_mode=default_mode, 

-

327 default_priority=default_priority, 

-

328 allow_name_segment=allow_name_segment, 

-

329 allow_architecture_segment=allow_architecture_segment, 

-

330 post_formatting_rewrite=post_formatting_rewrite, 

-

331 packageless_is_fallback_for_all_packages=packageless_is_fallback_for_all_packages, 

-

332 reservation_only=reservation_only, 

-

333 formatting_callback=format_callback, 

-

334 reference_documentation=reference_documentation, 

-

335 ) 

-

336 

-

337 def _unload() -> None: 

-

338 del packager_provided_files[stem] 

-

339 

-

340 self._unloaders.append(_unload) 

-

341 

-

342 def metadata_or_maintscript_detector( 

-

343 self, 

-

344 auto_detector_id: str, 

-

345 auto_detector: MetadataAutoDetector, 

-

346 *, 

-

347 package_type: PackageTypeSelector = "deb", 

-

348 ) -> None: 

-

349 if auto_detector_id in self._plugin_detector_ids: 349 ↛ 350line 349 didn't jump to line 350, because the condition on line 349 was never true

-

350 raise ValueError( 

-

351 f"The plugin {self._plugin_name} tried to register" 

-

352 f' "{auto_detector_id}" twice' 

-

353 ) 

-

354 self._plugin_detector_ids.add(auto_detector_id) 

-

355 all_detectors = self._feature_set.metadata_maintscript_detectors 

-

356 if self._plugin_name not in all_detectors: 

-

357 all_detectors[self._plugin_name] = [] 

-

358 package_types = resolve_package_type_selectors(package_type) 

-

359 all_detectors[self._plugin_name].append( 

-

360 MetadataOrMaintscriptDetector( 

-

361 detector_id=auto_detector_id, 

-

362 detector=auto_detector, 

-

363 plugin_metadata=self._plugin_metadata, 

-

364 applies_to_package_types=package_types, 

-

365 enabled=True, 

-

366 ) 

-

367 ) 

-

368 

-

369 def _unload() -> None: 

-

370 if self._plugin_name in all_detectors: 

-

371 del all_detectors[self._plugin_name] 

-

372 

-

373 self._unloaders.append(_unload) 

-

374 

-

375 def document_builtin_variable( 

-

376 self, 

-

377 variable_name: str, 

-

378 variable_reference_documentation: str, 

-

379 *, 

-

380 is_context_specific: bool = False, 

-

381 is_for_special_case: bool = False, 

-

382 ) -> None: 

-

383 manifest_variables = self._feature_set.manifest_variables 

-

384 self._restricted_api() 

-

385 state = self._substitution.variable_state(variable_name) 

-

386 if state == VariableNameState.UNDEFINED: 386 ↛ 387line 386 didn't jump to line 387, because the condition on line 386 was never true

-

387 raise ValueError( 

-

388 f"The plugin {self._plugin_name} attempted to document built-in {variable_name}," 

-

389 f" but it is not known to be a variable" 

-

390 ) 

-

391 

-

392 assert variable_name not in manifest_variables 

-

393 

-

394 manifest_variables[variable_name] = PluginProvidedManifestVariable( 

-

395 self._plugin_metadata, 

-

396 variable_name, 

-

397 None, 

-

398 is_context_specific_variable=is_context_specific, 

-

399 variable_reference_documentation=variable_reference_documentation, 

-

400 is_documentation_placeholder=True, 

-

401 is_for_special_case=is_for_special_case, 

-

402 ) 

-

403 

-

404 def _unload() -> None: 

-

405 del manifest_variables[variable_name] 

-

406 

-

407 self._unloaders.append(_unload) 

-

408 

-

409 def manifest_variable_provider( 

-

410 self, 

-

411 provider: Callable[[VariableContext], Mapping[str, str]], 

-

412 variables: Union[Sequence[str], Mapping[str, Optional[str]]], 

-

413 ) -> None: 

-

414 self._restricted_api() 

-

415 cached_provider = functools.lru_cache(None)(provider) 

-

416 permitted_variables = frozenset(variables) 

-

417 variables_iter: Iterable[Tuple[str, Optional[str]]] 

-

418 if not isinstance(variables, Mapping): 418 ↛ 419line 418 didn't jump to line 419, because the condition on line 418 was never true

-

419 variables_iter = zip(variables, itertools.repeat(None)) 

-

420 else: 

-

421 variables_iter = variables.items() 

-

422 

-

423 checked_vars = False 

-

424 manifest_variables = self._feature_set.manifest_variables 

-

425 plugin_name = self._plugin_name 

-

426 

-

427 def _value_resolver_generator( 

-

428 variable_name: str, 

-

429 ) -> Callable[[VariableContext], str]: 

-

430 def _value_resolver(variable_context: VariableContext) -> str: 

-

431 res = cached_provider(variable_context) 

-

432 nonlocal checked_vars 

-

433 if not checked_vars: 433 ↛ 444line 433 didn't jump to line 444, because the condition on line 433 was never false

-

434 if permitted_variables != res.keys(): 434 ↛ 435line 434 didn't jump to line 435, because the condition on line 434 was never true

-

435 expected = ", ".join(sorted(permitted_variables)) 

-

436 actual = ", ".join(sorted(res)) 

-

437 raise PluginAPIViolationError( 

-

438 f"The plugin {plugin_name} claimed to provide" 

-

439 f" the following variables {expected}," 

-

440 f" but when resolving the variables, the plugin provided" 

-

441 f" {actual}. These two lists should have been the same." 

-

442 ) 

-

443 checked_vars = False 

-

444 return res[variable_name] 

-

445 

-

446 return _value_resolver 

-

447 

-

448 for varname, vardoc in variables_iter: 

-

449 self._check_variable_name(varname) 

-

450 manifest_variables[varname] = PluginProvidedManifestVariable( 

-

451 self._plugin_metadata, 

-

452 varname, 

-

453 _value_resolver_generator(varname), 

-

454 is_context_specific_variable=False, 

-

455 variable_reference_documentation=vardoc, 

-

456 ) 

-

457 

-

458 def _unload() -> None: 

-

459 raise PluginInitializationError( 

-

460 "Cannot unload manifest_variable_provider (not implemented)" 

-

461 ) 

-

462 

-

463 self._unloaders.append(_unload) 

-

464 

-

465 def _check_variable_name(self, variable_name: str) -> None: 

-

466 manifest_variables = self._feature_set.manifest_variables 

-

467 existing = manifest_variables.get(variable_name) 

-

468 

-

469 if existing is not None: 

-

470 if existing.plugin_metadata.plugin_name == self._plugin_name: 470 ↛ 476line 470 didn't jump to line 476

-

471 message = ( 

-

472 f"Bug in the plugin {self._plugin_name}: It tried to register the" 

-

473 f' manifest variable "{variable_name}" twice.' 

-

474 ) 

-

475 else: 

-

476 message = ( 

-

477 f"The plugins {existing.plugin_metadata.plugin_name} and {self._plugin_name}" 

-

478 f" both tried to provide the manifest variable {variable_name}" 

-

479 ) 

-

480 raise PluginConflictError( 

-

481 message, existing.plugin_metadata, self._plugin_metadata 

-

482 ) 

-

483 if not SUBST_VAR_RE.match("{{" + variable_name + "}}"): 

-

484 raise ValueError( 

-

485 f"The plugin {self._plugin_name} attempted to declare {variable_name}," 

-

486 f" which is not a valid variable name" 

-

487 ) 

-

488 

-

489 namespace = "" 

-

490 variable_basename = variable_name 

-

491 if ":" in variable_name: 

-

492 namespace, variable_basename = variable_name.rsplit(":", 1) 

-

493 assert namespace != "" 

-

494 assert variable_name != "" 

-

495 

-

496 if namespace != "" and namespace not in ("token", "path"): 

-

497 raise ValueError( 

-

498 f"The plugin {self._plugin_name} attempted to declare {variable_name}," 

-

499 f" which is in the reserved namespace {namespace}" 

-

500 ) 

-

501 

-

502 variable_name_upper = variable_name.upper() 

-

503 if ( 

-

504 variable_name_upper.startswith(("DEB_", "DPKG_", "DEBPUTY")) 

-

505 or variable_basename.startswith("_") 

-

506 or variable_basename.upper().startswith("DEBPUTY") 

-

507 ) and self._plugin_name != "debputy": 

-

508 raise ValueError( 

-

509 f"The plugin {self._plugin_name} attempted to declare {variable_name}," 

-

510 f" which is a variable name reserved by debputy" 

-

511 ) 

-

512 

-

513 state = self._substitution.variable_state(variable_name) 

-

514 if state != VariableNameState.UNDEFINED and self._plugin_name != "debputy": 

-

515 raise ValueError( 

-

516 f"The plugin {self._plugin_name} attempted to declare {variable_name}," 

-

517 f" which would shadow a built-in variable" 

-

518 ) 

-

519 

-

520 def package_processor( 

-

521 self, 

-

522 processor_id: str, 

-

523 processor: PackageProcessor, 

-

524 *, 

-

525 depends_on_processor: Iterable[str] = tuple(), 

-

526 package_type: PackageTypeSelector = "deb", 

-

527 ) -> None: 

-

528 self._restricted_api(allowed_plugins={"lua"}) 

-

529 package_processors = self._feature_set.all_package_processors 

-

530 dependencies = set() 

-

531 processor_key = (self._plugin_name, processor_id) 

-

532 

-

533 if processor_key in package_processors: 533 ↛ 534line 533 didn't jump to line 534, because the condition on line 533 was never true

-

534 raise PluginConflictError( 

-

535 f"The plugin {self._plugin_name} already registered a processor with id {processor_id}", 

-

536 self._plugin_metadata, 

-

537 self._plugin_metadata, 

-

538 ) 

-

539 

-

540 for depends_ref in depends_on_processor: 

-

541 if isinstance(depends_ref, str): 541 ↛ 555line 541 didn't jump to line 555, because the condition on line 541 was never false

-

542 if (self._plugin_name, depends_ref) in package_processors: 542 ↛ 544line 542 didn't jump to line 544, because the condition on line 542 was never false

-

543 depends_key = (self._plugin_name, depends_ref) 

-

544 elif ("debputy", depends_ref) in package_processors: 

-

545 depends_key = ("debputy", depends_ref) 

-

546 else: 

-

547 raise ValueError( 

-

548 f'Could not resolve dependency "{depends_ref}" for' 

-

549 f' "{processor_id}". It was not provided by the plugin itself' 

-

550 f" ({self._plugin_name}) nor debputy." 

-

551 ) 

-

552 else: 

-

553 # TODO: Add proper dependencies first, at which point we should probably resolve "name" 

-

554 # via the direct dependencies. 

-

555 assert False 

-

556 

-

557 existing_processor = package_processors.get(depends_key) 

-

558 if existing_processor is None: 558 ↛ 561line 558 didn't jump to line 561, because the condition on line 558 was never true

-

559 # We currently require the processor to be declared already. If this ever changes, 

-

560 # PluginProvidedFeatureSet.package_processors_in_order will need an update 

-

561 dplugin_name, dprocessor_name = depends_key 

-

562 available_processors = ", ".join( 

-

563 n for p, n in package_processors.keys() if p == dplugin_name 

-

564 ) 

-

565 raise ValueError( 

-

566 f"The plugin {dplugin_name} does not provide a processor called" 

-

567 f" {dprocessor_name}. Available processors for that plugin are:" 

-

568 f" {available_processors}" 

-

569 ) 

-

570 dependencies.add(depends_key) 

-

571 

-

572 package_processors[processor_key] = PluginProvidedPackageProcessor( 

-

573 processor_id, 

-

574 resolve_package_type_selectors(package_type), 

-

575 processor, 

-

576 frozenset(dependencies), 

-

577 self._plugin_metadata, 

-

578 ) 

-

579 

-

580 def _unload() -> None: 

-

581 del package_processors[processor_key] 

-

582 

-

583 self._unloaders.append(_unload) 

-

584 

-

585 def automatic_discard_rule( 

-

586 self, 

-

587 name: str, 

-

588 should_discard: Callable[[VirtualPath], bool], 

-

589 *, 

-

590 rule_reference_documentation: Optional[str] = None, 

-

591 examples: Union[ 

-

592 AutomaticDiscardRuleExample, Sequence[AutomaticDiscardRuleExample] 

-

593 ] = tuple(), 

-

594 ) -> None: 

-

595 """Register an automatic discard rule 

-

596 

-

597 An automatic discard rule is basically applied to *every* path about to be installed in to any package. 

-

598 If any discard rule concludes that a path should not be installed, then the path is not installed. 

-

599 In the case where the discard path is a: 

-

600 

-

601 * directory: Then the entire directory is excluded along with anything beneath it. 

-

602 * symlink: Then the symlink itself (but not its target) is excluded. 

-

603 * hardlink: Then the current hardlink will not be installed, but other instances of it will be. 

-

604 

-

605 Note: Discarded files are *never* deleted by `debputy`. They just make `debputy` skip the file. 

-

606 

-

607 Automatic discard rules should be written with the assumption that directories will be tested 

-

608 before their content *when it is relevant* for the discard rule to examine whether the directory 

-

609 can be excluded. 

-

610 

-

611 The packager can via the manifest overrule automatic discard rules by explicitly listing the path 

-

612 without any globs. As example: 

-

613 

-

614 installations: 

-

615 - install: 

-

616 sources: 

-

617 - usr/lib/libfoo.la # <-- This path is always installed 

-

618 # (Discard rules are never asked in this case) 

-

619 # 

-

620 - usr/lib/*.so* # <-- Discard rules applies to any path beneath usr/lib and can exclude matches 

-

621 # Though, they will not examine `libfoo.la` as it has already been installed 

-

622 # 

-

623 # Note: usr/lib itself is never tested in this case (it is assumed to be 

-

624 # explicitly requested). But any subdir of usr/lib will be examined. 

-

625 

-

626 When an automatic discard rule is evaluated, it can see the source path currently being considered 

-

627 for installation. While it can look at "surrounding" context (like parent directory), it will not 

-

628 know whether those paths are to be installed or will be installed. 

-

629 

-

630 :param name: A user visible name discard rule. It can be used on the command line, so avoid shell 

-

631 metacharacters and spaces. 

-

632 :param should_discard: A callable that is the implementation of the automatic discard rule. It will receive 

-

633 a VirtualPath representing the *source* path about to be installed. If callable returns `True`, then the 

-

634 path is discarded. If it returns `False`, the path is not discarded (by this rule at least). 

-

635 A source path will either be from the root of the source tree or the root of a search directory such as 

-

636 `debian/tmp`. Where the path will be installed is not available at the time the discard rule is 

-

637 evaluated. 

-

638 :param rule_reference_documentation: Optionally, the reference documentation to be shown when a user 

-

639 looks up this automatic discard rule. 

-

640 :param examples: Provide examples for the rule. Use the automatic_discard_rule_example function to 

-

641 generate the examples. 

-

642 

-

643 """ 

-

644 self._restricted_api() 

-

645 auto_discard_rules = self._feature_set.auto_discard_rules 

-

646 existing = auto_discard_rules.get(name) 

-

647 if existing is not None: 647 ↛ 648line 647 didn't jump to line 648, because the condition on line 647 was never true

-

648 if existing.plugin_metadata.plugin_name == self._plugin_name: 

-

649 message = ( 

-

650 f"Bug in the plugin {self._plugin_name}: It tried to register the" 

-

651 f' automatic discard rule "{name}" twice.' 

-

652 ) 

-

653 else: 

-

654 message = ( 

-

655 f"The plugins {existing.plugin_metadata.plugin_name} and {self._plugin_name}" 

-

656 f" both tried to provide the automatic discard rule {name}" 

-

657 ) 

-

658 raise PluginConflictError( 

-

659 message, existing.plugin_metadata, self._plugin_metadata 

-

660 ) 

-

661 examples = ( 

-

662 (examples,) 

-

663 if isinstance(examples, AutomaticDiscardRuleExample) 

-

664 else tuple(examples) 

-

665 ) 

-

666 auto_discard_rules[name] = PluginProvidedDiscardRule( 

-

667 name, 

-

668 self._plugin_metadata, 

-

669 should_discard, 

-

670 rule_reference_documentation, 

-

671 examples, 

-

672 ) 

-

673 

-

674 def _unload() -> None: 

-

675 del auto_discard_rules[name] 

-

676 

-

677 self._unloaders.append(_unload) 

-

678 

-

679 def service_provider( 

-

680 self, 

-

681 service_manager: str, 

-

682 detector: ServiceDetector, 

-

683 integrator: ServiceIntegrator, 

-

684 ) -> None: 

-

685 self._restricted_api() 

-

686 service_managers = self._feature_set.service_managers 

-

687 existing = service_managers.get(service_manager) 

-

688 if existing is not None: 688 ↛ 689line 688 didn't jump to line 689, because the condition on line 688 was never true

-

689 if existing.plugin_metadata.plugin_name == self._plugin_name: 

-

690 message = ( 

-

691 f"Bug in the plugin {self._plugin_name}: It tried to register the" 

-

692 f' service manager "{service_manager}" twice.' 

-

693 ) 

-

694 else: 

-

695 message = ( 

-

696 f"The plugins {existing.plugin_metadata.plugin_name} and {self._plugin_name}" 

-

697 f' both tried to provide the service manager "{service_manager}"' 

-

698 ) 

-

699 raise PluginConflictError( 

-

700 message, existing.plugin_metadata, self._plugin_metadata 

-

701 ) 

-

702 service_managers[service_manager] = ServiceManagerDetails( 

-

703 service_manager, 

-

704 detector, 

-

705 integrator, 

-

706 self._plugin_metadata, 

-

707 ) 

-

708 

-

709 def _unload() -> None: 

-

710 del service_managers[service_manager] 

-

711 

-

712 self._unloaders.append(_unload) 

-

713 

-

714 def manifest_variable( 

-

715 self, 

-

716 variable_name: str, 

-

717 value: str, 

-

718 variable_reference_documentation: Optional[str] = None, 

-

719 ) -> None: 

-

720 self._check_variable_name(variable_name) 

-

721 manifest_variables = self._feature_set.manifest_variables 

-

722 try: 

-

723 resolved_value = self._substitution.substitute( 

-

724 value, "Plugin initialization" 

-

725 ) 

-

726 depends_on_variable = resolved_value != value 

-

727 except DebputySubstitutionError: 

-

728 depends_on_variable = True 

-

729 if depends_on_variable: 

-

730 raise ValueError( 

-

731 f"The plugin {self._plugin_name} attempted to declare {variable_name} with value {value!r}." 

-

732 f" This value depends on another variable, which is not supported. This restriction may be" 

-

733 f" lifted in the future." 

-

734 ) 

-

735 

-

736 manifest_variables[variable_name] = PluginProvidedManifestVariable( 

-

737 self._plugin_metadata, 

-

738 variable_name, 

-

739 value, 

-

740 is_context_specific_variable=False, 

-

741 variable_reference_documentation=variable_reference_documentation, 

-

742 ) 

-

743 

-

744 def _unload() -> None: 

-

745 # We need to check it was never resolved 

-

746 raise PluginInitializationError( 

-

747 "Cannot unload manifest_variable (not implemented)" 

-

748 ) 

-

749 

-

750 self._unloaders.append(_unload) 

-

751 

-

752 @property 

-

753 def _plugin_name(self) -> str: 

-

754 return self._plugin_metadata.plugin_name 

-

755 

-

756 def provide_manifest_keyword( 

-

757 self, 

-

758 rule_type: TTP, 

-

759 rule_name: Union[str, List[str]], 

-

760 handler: DIPKWHandler, 

-

761 *, 

-

762 inline_reference_documentation: Optional[ParserDocumentation] = None, 

-

763 ) -> None: 

-

764 self._restricted_api() 

-

765 parser_generator = self._feature_set.manifest_parser_generator 

-

766 if rule_type not in parser_generator.dispatchable_table_parsers: 766 ↛ 767line 766 didn't jump to line 767, because the condition on line 766 was never true

-

767 types = ", ".join( 

-

768 sorted(x.__name__ for x in parser_generator.dispatchable_table_parsers) 

-

769 ) 

-

770 raise ValueError( 

-

771 f"The rule_type was not a supported type. It must be one of {types}" 

-

772 ) 

-

773 dispatching_parser = parser_generator.dispatchable_table_parsers[rule_type] 

-

774 dispatching_parser.register_keyword( 

-

775 rule_name, 

-

776 handler, 

-

777 self._plugin_metadata, 

-

778 inline_reference_documentation=inline_reference_documentation, 

-

779 ) 

-

780 

-

781 def _unload() -> None: 

-

782 raise PluginInitializationError( 

-

783 "Cannot unload provide_manifest_keyword (not implemented)" 

-

784 ) 

-

785 

-

786 self._unloaders.append(_unload) 

-

787 

-

788 def pluggable_object_parser( 

-

789 self, 

-

790 rule_type: str, 

-

791 rule_name: str, 

-

792 *, 

-

793 object_parser_key: Optional[str] = None, 

-

794 on_end_parse_step: Optional[ 

-

795 Callable[ 

-

796 [str, Optional[Mapping[str, Any]], AttributePath, ParserContextData], 

-

797 None, 

-

798 ] 

-

799 ] = None, 

-

800 nested_in_package_context: bool = False, 

-

801 ) -> None: 

-

802 self._restricted_api() 

-

803 if object_parser_key is None: 803 ↛ 804line 803 didn't jump to line 804, because the condition on line 803 was never true

-

804 object_parser_key = rule_name 

-

805 

-

806 parser_generator = self._feature_set.manifest_parser_generator 

-

807 dispatchable_object_parsers = parser_generator.dispatchable_object_parsers 

-

808 if rule_type not in dispatchable_object_parsers: 808 ↛ 809line 808 didn't jump to line 809, because the condition on line 808 was never true

-

809 types = ", ".join(sorted(dispatchable_object_parsers)) 

-

810 raise ValueError( 

-

811 f"The rule_type was not a supported type. It must be one of {types}" 

-

812 ) 

-

813 if object_parser_key not in dispatchable_object_parsers: 813 ↛ 814line 813 didn't jump to line 814, because the condition on line 813 was never true

-

814 types = ", ".join(sorted(dispatchable_object_parsers)) 

-

815 raise ValueError( 

-

816 f"The object_parser_key was not a supported type. It must be one of {types}" 

-

817 ) 

-

818 parent_dispatcher = dispatchable_object_parsers[rule_type] 

-

819 child_dispatcher = dispatchable_object_parsers[object_parser_key] 

-

820 parent_dispatcher.register_child_parser( 

-

821 rule_name, 

-

822 child_dispatcher, 

-

823 self._plugin_metadata, 

-

824 on_end_parse_step=on_end_parse_step, 

-

825 nested_in_package_context=nested_in_package_context, 

-

826 ) 

-

827 

-

828 def _unload() -> None: 

-

829 raise PluginInitializationError( 

-

830 "Cannot unload pluggable_object_parser (not implemented)" 

-

831 ) 

-

832 

-

833 self._unloaders.append(_unload) 

-

834 

-

835 def pluggable_manifest_rule( 

-

836 self, 

-

837 rule_type: Union[TTP, str], 

-

838 rule_name: Union[str, List[str]], 

-

839 parsed_format: Type[PF], 

-

840 handler: DIPHandler, 

-

841 *, 

-

842 source_format: Optional[SF] = None, 

-

843 inline_reference_documentation: Optional[ParserDocumentation] = None, 

-

844 ) -> None: 

-

845 self._restricted_api() 

-

846 feature_set = self._feature_set 

-

847 parser_generator = feature_set.manifest_parser_generator 

-

848 if isinstance(rule_type, str): 

-

849 if rule_type not in parser_generator.dispatchable_object_parsers: 849 ↛ 850line 849 didn't jump to line 850, because the condition on line 849 was never true

-

850 types = ", ".join(sorted(parser_generator.dispatchable_object_parsers)) 

-

851 raise ValueError( 

-

852 f"The rule_type was not a supported type. It must be one of {types}" 

-

853 ) 

-

854 dispatching_parser = parser_generator.dispatchable_object_parsers[rule_type] 

-

855 else: 

-

856 if rule_type not in parser_generator.dispatchable_table_parsers: 856 ↛ 857line 856 didn't jump to line 857, because the condition on line 856 was never true

-

857 types = ", ".join( 

-

858 sorted( 

-

859 x.__name__ for x in parser_generator.dispatchable_table_parsers 

-

860 ) 

-

861 ) 

-

862 raise ValueError( 

-

863 f"The rule_type was not a supported type. It must be one of {types}" 

-

864 ) 

-

865 dispatching_parser = parser_generator.dispatchable_table_parsers[rule_type] 

-

866 

-

867 parser = feature_set.manifest_parser_generator.generate_parser( 

-

868 parsed_format, 

-

869 source_content=source_format, 

-

870 inline_reference_documentation=inline_reference_documentation, 

-

871 ) 

-

872 dispatching_parser.register_parser( 

-

873 rule_name, 

-

874 parser, 

-

875 handler, 

-

876 self._plugin_metadata, 

-

877 ) 

-

878 

-

879 def _unload() -> None: 

-

880 raise PluginInitializationError( 

-

881 "Cannot unload pluggable_manifest_rule (not implemented)" 

-

882 ) 

-

883 

-

884 self._unloaders.append(_unload) 

-

885 

-

886 def known_packaging_files( 

-

887 self, 

-

888 packaging_file_details: KnownPackagingFileInfo, 

-

889 ) -> None: 

-

890 known_packaging_files = self._feature_set.known_packaging_files 

-

891 detection_method = packaging_file_details.get( 

-

892 "detection_method", cast("Literal['path']", "path") 

-

893 ) 

-

894 path = packaging_file_details.get("path") 

-

895 dhpkgfile = packaging_file_details.get("pkgfile") 

-

896 

-

897 packaging_file_details: KnownPackagingFileInfo = packaging_file_details.copy() 

-

898 

-

899 if detection_method == "path": 

-

900 if dhpkgfile is not None: 

-

901 raise ValueError( 

-

902 'The "pkgfile" attribute cannot be used when detection-method is "path" (or omitted)' 

-

903 ) 

-

904 if path != _normalize_path(path, with_prefix=False): 

-

905 raise ValueError( 

-

906 f"The path for known packaging files must be normalized. Please replace" 

-

907 f' "{path}" with "{_normalize_path(path, with_prefix=False)}"' 

-

908 ) 

-

909 detection_value = path 

-

910 else: 

-

911 assert detection_method == "dh.pkgfile" 

-

912 if path is not None: 

-

913 raise ValueError( 

-

914 'The "path" attribute cannot be used when detection-method is "dh.pkgfile"' 

-

915 ) 

-

916 if "/" in dhpkgfile: 

-

917 raise ValueError( 

-

918 'The "pkgfile" attribute ḿust be a name stem such as "install" (no "/" are allowed)' 

-

919 ) 

-

920 detection_value = dhpkgfile 

-

921 key = f"{detection_method}::{detection_value}" 

-

922 existing = known_packaging_files.get(key) 

-

923 if existing is not None: 

-

924 if existing.plugin_metadata.plugin_name != self._plugin_name: 

-

925 message = ( 

-

926 f'The key "{key}" is registered twice for known packaging files.' 

-

927 f" Once by {existing.plugin_metadata.plugin_name} and once by {self._plugin_name}" 

-

928 ) 

-

929 else: 

-

930 message = ( 

-

931 f"Bug in the plugin {self._plugin_name}: It tried to register the" 

-

932 f' key "{key}" twice for known packaging files.' 

-

933 ) 

-

934 raise PluginConflictError( 

-

935 message, existing.plugin_metadata, self._plugin_metadata 

-

936 ) 

-

937 _validate_known_packaging_file_dh_compat_rules( 

-

938 packaging_file_details.get("dh_compat_rules") 

-

939 ) 

-

940 known_packaging_files[key] = PluginProvidedKnownPackagingFile( 

-

941 packaging_file_details, 

-

942 detection_method, 

-

943 detection_value, 

-

944 self._plugin_metadata, 

-

945 ) 

-

946 

-

947 def _unload() -> None: 

-

948 del known_packaging_files[key] 

-

949 

-

950 self._unloaders.append(_unload) 

-

951 

-

952 def register_mapped_type( 

-

953 self, 

-

954 type_mapping: TypeMapping, 

-

955 *, 

-

956 reference_documentation: Optional[TypeMappingDocumentation] = None, 

-

957 ) -> None: 

-

958 self._restricted_api() 

-

959 target_type = type_mapping.target_type 

-

960 mapped_types = self._feature_set.mapped_types 

-

961 existing = mapped_types.get(target_type) 

-

962 if existing is not None: 962 ↛ 963line 962 didn't jump to line 963, because the condition on line 962 was never true

-

963 if existing.plugin_metadata.plugin_name != self._plugin_name: 

-

964 message = ( 

-

965 f'The key "{target_type.__name__}" is registered twice for known packaging files.' 

-

966 f" Once by {existing.plugin_metadata.plugin_name} and once by {self._plugin_name}" 

-

967 ) 

-

968 else: 

-

969 message = ( 

-

970 f"Bug in the plugin {self._plugin_name}: It tried to register the" 

-

971 f' key "{target_type.__name__}" twice for known packaging files.' 

-

972 ) 

-

973 raise PluginConflictError( 

-

974 message, existing.plugin_metadata, self._plugin_metadata 

-

975 ) 

-

976 parser_generator = self._feature_set.manifest_parser_generator 

-

977 mapped_types[target_type] = PluginProvidedTypeMapping( 

-

978 type_mapping, reference_documentation, self._plugin_metadata 

-

979 ) 

-

980 parser_generator.register_mapped_type(type_mapping) 

-

981 

-

982 def _restricted_api( 

-

983 self, 

-

984 *, 

-

985 allowed_plugins: Union[Set[str], FrozenSet[str]] = frozenset(), 

-

986 ) -> None: 

-

987 if self._plugin_name != "debputy" and self._plugin_name not in allowed_plugins: 987 ↛ 988line 987 didn't jump to line 988, because the condition on line 987 was never true

-

988 raise PluginAPIViolationError( 

-

989 f"Plugin {self._plugin_name} attempted to access a debputy-only API." 

-

990 " If you are the maintainer of this plugin and want access to this" 

-

991 " API, please file a feature request to make this public." 

-

992 " (The API is currently private as it is unstable.)" 

-

993 ) 

-

994 

-

995 

-

996class MaintscriptAccessorProviderBase(MaintscriptAccessor, ABC): 

-

997 __slots__ = () 

-

998 

-

999 def _append_script( 

-

1000 self, 

-

1001 caller_name: str, 

-

1002 maintscript: Maintscript, 

-

1003 full_script: str, 

-

1004 /, 

-

1005 perform_substitution: bool = True, 

-

1006 ) -> None: 

-

1007 raise NotImplementedError 

-

1008 

-

1009 @classmethod 

-

1010 def _apply_condition_to_script( 

-

1011 cls, 

-

1012 condition: str, 

-

1013 run_snippet: str, 

-

1014 /, 

-

1015 indent: Optional[bool] = None, 

-

1016 ) -> str: 

-

1017 if indent is None: 

-

1018 # We auto-determine this based on heredocs currently 

-

1019 indent = "<<" not in run_snippet 

-

1020 

-

1021 if indent: 

-

1022 run_snippet = "".join(" " + x for x in run_snippet.splitlines(True)) 

-

1023 if not run_snippet.endswith("\n"): 

-

1024 run_snippet += "\n" 

-

1025 condition_line = f"if {condition}; then\n" 

-

1026 end_line = "fi\n" 

-

1027 return "".join((condition_line, run_snippet, end_line)) 

-

1028 

-

1029 def on_configure( 

-

1030 self, 

-

1031 run_snippet: str, 

-

1032 /, 

-

1033 indent: Optional[bool] = None, 

-

1034 perform_substitution: bool = True, 

-

1035 skip_on_rollback: bool = False, 

-

1036 ) -> None: 

-

1037 condition = POSTINST_DEFAULT_CONDITION 

-

1038 if skip_on_rollback: 1038 ↛ 1039line 1038 didn't jump to line 1039, because the condition on line 1038 was never true

-

1039 condition = '[ "$1" = "configure" ]' 

-

1040 return self._append_script( 

-

1041 "on_configure", 

-

1042 "postinst", 

-

1043 self._apply_condition_to_script(condition, run_snippet, indent=indent), 

-

1044 perform_substitution=perform_substitution, 

-

1045 ) 

-

1046 

-

1047 def on_initial_install( 

-

1048 self, 

-

1049 run_snippet: str, 

-

1050 /, 

-

1051 indent: Optional[bool] = None, 

-

1052 perform_substitution: bool = True, 

-

1053 ) -> None: 

-

1054 condition = '[ "$1" = "configure" -a -z "$2" ]' 

-

1055 return self._append_script( 

-

1056 "on_initial_install", 

-

1057 "postinst", 

-

1058 self._apply_condition_to_script(condition, run_snippet, indent=indent), 

-

1059 perform_substitution=perform_substitution, 

-

1060 ) 

-

1061 

-

1062 def on_upgrade( 

-

1063 self, 

-

1064 run_snippet: str, 

-

1065 /, 

-

1066 indent: Optional[bool] = None, 

-

1067 perform_substitution: bool = True, 

-

1068 ) -> None: 

-

1069 condition = '[ "$1" = "configure" -a -n "$2" ]' 

-

1070 return self._append_script( 

-

1071 "on_upgrade", 

-

1072 "postinst", 

-

1073 self._apply_condition_to_script(condition, run_snippet, indent=indent), 

-

1074 perform_substitution=perform_substitution, 

-

1075 ) 

-

1076 

-

1077 def on_upgrade_from( 

-

1078 self, 

-

1079 version: str, 

-

1080 run_snippet: str, 

-

1081 /, 

-

1082 indent: Optional[bool] = None, 

-

1083 perform_substitution: bool = True, 

-

1084 ) -> None: 

-

1085 condition = '[ "$1" = "configure" ] && dpkg --compare-versions le-nl "$2"' 

-

1086 return self._append_script( 

-

1087 "on_upgrade_from", 

-

1088 "postinst", 

-

1089 self._apply_condition_to_script(condition, run_snippet, indent=indent), 

-

1090 perform_substitution=perform_substitution, 

-

1091 ) 

-

1092 

-

1093 def on_before_removal( 

-

1094 self, 

-

1095 run_snippet: str, 

-

1096 /, 

-

1097 indent: Optional[bool] = None, 

-

1098 perform_substitution: bool = True, 

-

1099 ) -> None: 

-

1100 condition = '[ "$1" = "remove" ]' 

-

1101 return self._append_script( 

-

1102 "on_before_removal", 

-

1103 "prerm", 

-

1104 self._apply_condition_to_script(condition, run_snippet, indent=indent), 

-

1105 perform_substitution=perform_substitution, 

-

1106 ) 

-

1107 

-

1108 def on_removed( 

-

1109 self, 

-

1110 run_snippet: str, 

-

1111 /, 

-

1112 indent: Optional[bool] = None, 

-

1113 perform_substitution: bool = True, 

-

1114 ) -> None: 

-

1115 condition = '[ "$1" = "remove" ]' 

-

1116 return self._append_script( 

-

1117 "on_removed", 

-

1118 "postrm", 

-

1119 self._apply_condition_to_script(condition, run_snippet, indent=indent), 

-

1120 perform_substitution=perform_substitution, 

-

1121 ) 

-

1122 

-

1123 def on_purge( 

-

1124 self, 

-

1125 run_snippet: str, 

-

1126 /, 

-

1127 indent: Optional[bool] = None, 

-

1128 perform_substitution: bool = True, 

-

1129 ) -> None: 

-

1130 condition = '[ "$1" = "purge" ]' 

-

1131 return self._append_script( 

-

1132 "on_purge", 

-

1133 "postrm", 

-

1134 self._apply_condition_to_script(condition, run_snippet, indent=indent), 

-

1135 perform_substitution=perform_substitution, 

-

1136 ) 

-

1137 

-

1138 def unconditionally_in_script( 

-

1139 self, 

-

1140 maintscript: Maintscript, 

-

1141 run_snippet: str, 

-

1142 /, 

-

1143 perform_substitution: bool = True, 

-

1144 ) -> None: 

-

1145 if maintscript not in STD_CONTROL_SCRIPTS: 1145 ↛ 1146line 1145 didn't jump to line 1146, because the condition on line 1145 was never true

-

1146 raise ValueError( 

-

1147 f'Unknown script "{maintscript}". Should have been one of:' 

-

1148 f' {", ".join(sorted(STD_CONTROL_SCRIPTS))}' 

-

1149 ) 

-

1150 return self._append_script( 

-

1151 "unconditionally_in_script", 

-

1152 maintscript, 

-

1153 run_snippet, 

-

1154 perform_substitution=perform_substitution, 

-

1155 ) 

-

1156 

-

1157 

-

1158class MaintscriptAccessorProvider(MaintscriptAccessorProviderBase): 

-

1159 __slots__ = ( 

-

1160 "_plugin_metadata", 

-

1161 "_maintscript_snippets", 

-

1162 "_plugin_source_id", 

-

1163 "_package_substitution", 

-

1164 "_default_snippet_order", 

-

1165 ) 

-

1166 

-

1167 def __init__( 

-

1168 self, 

-

1169 plugin_metadata: DebputyPluginMetadata, 

-

1170 plugin_source_id: str, 

-

1171 maintscript_snippets: Dict[str, MaintscriptSnippetContainer], 

-

1172 package_substitution: Substitution, 

-

1173 *, 

-

1174 default_snippet_order: Optional[Literal["service"]] = None, 

-

1175 ): 

-

1176 self._plugin_metadata = plugin_metadata 

-

1177 self._plugin_source_id = plugin_source_id 

-

1178 self._maintscript_snippets = maintscript_snippets 

-

1179 self._package_substitution = package_substitution 

-

1180 self._default_snippet_order = default_snippet_order 

-

1181 

-

1182 def _append_script( 

-

1183 self, 

-

1184 caller_name: str, 

-

1185 maintscript: Maintscript, 

-

1186 full_script: str, 

-

1187 /, 

-

1188 perform_substitution: bool = True, 

-

1189 ) -> None: 

-

1190 def_source = f"{self._plugin_metadata.plugin_name} ({self._plugin_source_id})" 

-

1191 if perform_substitution: 

-

1192 full_script = self._package_substitution.substitute(full_script, def_source) 

-

1193 

-

1194 snippet = MaintscriptSnippet( 

-

1195 snippet=full_script, 

-

1196 definition_source=def_source, 

-

1197 snippet_order=self._default_snippet_order, 

-

1198 ) 

-

1199 self._maintscript_snippets[maintscript].append(snippet) 

-

1200 

-

1201 

-

1202class BinaryCtrlAccessorProviderBase(BinaryCtrlAccessor): 

-

1203 __slots__ = ( 

-

1204 "_plugin_metadata", 

-

1205 "_plugin_source_id", 

-

1206 "_package_metadata_context", 

-

1207 "_triggers", 

-

1208 "_substvars", 

-

1209 "_maintscript", 

-

1210 "_shlibs_details", 

-

1211 ) 

-

1212 

-

1213 def __init__( 

-

1214 self, 

-

1215 plugin_metadata: DebputyPluginMetadata, 

-

1216 plugin_source_id: str, 

-

1217 package_metadata_context: PackageProcessingContext, 

-

1218 triggers: Dict[Tuple[DpkgTriggerType, str], PluginProvidedTrigger], 

-

1219 substvars: FlushableSubstvars, 

-

1220 shlibs_details: Tuple[Optional[str], Optional[List[str]]], 

-

1221 ) -> None: 

-

1222 self._plugin_metadata = plugin_metadata 

-

1223 self._plugin_source_id = plugin_source_id 

-

1224 self._package_metadata_context = package_metadata_context 

-

1225 self._triggers = triggers 

-

1226 self._substvars = substvars 

-

1227 self._maintscript: Optional[MaintscriptAccessor] = None 

-

1228 self._shlibs_details = shlibs_details 

-

1229 

-

1230 def _create_maintscript_accessor(self) -> MaintscriptAccessor: 

-

1231 raise NotImplementedError 

-

1232 

-

1233 def dpkg_trigger(self, trigger_type: DpkgTriggerType, trigger_target: str) -> None: 

-

1234 """Register a declarative dpkg level trigger 

-

1235 

-

1236 The provided trigger will be added to the package's metadata (the triggers file of the control.tar). 

-

1237 

-

1238 If the trigger has already been added previously, a second call with the same trigger data will be ignored. 

-

1239 """ 

-

1240 key = (trigger_type, trigger_target) 

-

1241 if key in self._triggers: 1241 ↛ 1242line 1241 didn't jump to line 1242, because the condition on line 1241 was never true

-

1242 return 

-

1243 self._triggers[key] = PluginProvidedTrigger( 

-

1244 dpkg_trigger_type=trigger_type, 

-

1245 dpkg_trigger_target=trigger_target, 

-

1246 provider=self._plugin_metadata, 

-

1247 provider_source_id=self._plugin_source_id, 

-

1248 ) 

-

1249 

-

1250 @property 

-

1251 def maintscript(self) -> MaintscriptAccessor: 

-

1252 maintscript = self._maintscript 

-

1253 if maintscript is None: 

-

1254 maintscript = self._create_maintscript_accessor() 

-

1255 self._maintscript = maintscript 

-

1256 return maintscript 

-

1257 

-

1258 @property 

-

1259 def substvars(self) -> FlushableSubstvars: 

-

1260 return self._substvars 

-

1261 

-

1262 def dpkg_shlibdeps(self, paths: Sequence[VirtualPath]) -> None: 

-

1263 binary_package = self._package_metadata_context.binary_package 

-

1264 with self.substvars.flush() as substvars_file: 

-

1265 dpkg_cmd = ["dpkg-shlibdeps", f"-T{substvars_file}"] 

-

1266 if binary_package.is_udeb: 

-

1267 dpkg_cmd.append("-tudeb") 

-

1268 if binary_package.is_essential: 1268 ↛ 1269line 1268 didn't jump to line 1269, because the condition on line 1268 was never true

-

1269 dpkg_cmd.append("-dPre-Depends") 

-

1270 shlibs_local, shlib_dirs = self._shlibs_details 

-

1271 if shlibs_local is not None: 1271 ↛ 1272line 1271 didn't jump to line 1272, because the condition on line 1271 was never true

-

1272 dpkg_cmd.append(f"-L{shlibs_local}") 

-

1273 if shlib_dirs: 1273 ↛ 1274line 1273 didn't jump to line 1274, because the condition on line 1273 was never true

-

1274 dpkg_cmd.extend(f"-l{sd}" for sd in shlib_dirs) 

-

1275 dpkg_cmd.extend(p.fs_path for p in paths) 

-

1276 print_command(*dpkg_cmd) 

-

1277 try: 

-

1278 subprocess.check_call(dpkg_cmd) 

-

1279 except subprocess.CalledProcessError: 

-

1280 _error( 

-

1281 f"Attempting to auto-detect dependencies via dpkg-shlibdeps for {binary_package.name} failed. Please" 

-

1282 " review the output from dpkg-shlibdeps above to understand what went wrong." 

-

1283 ) 

-

1284 

-

1285 

-

1286class BinaryCtrlAccessorProvider(BinaryCtrlAccessorProviderBase): 

-

1287 __slots__ = ( 

-

1288 "_maintscript", 

-

1289 "_maintscript_snippets", 

-

1290 "_package_substitution", 

-

1291 ) 

-

1292 

-

1293 def __init__( 

-

1294 self, 

-

1295 plugin_metadata: DebputyPluginMetadata, 

-

1296 plugin_source_id: str, 

-

1297 package_metadata_context: PackageProcessingContext, 

-

1298 triggers: Dict[Tuple[DpkgTriggerType, str], PluginProvidedTrigger], 

-

1299 substvars: FlushableSubstvars, 

-

1300 maintscript_snippets: Dict[str, MaintscriptSnippetContainer], 

-

1301 package_substitution: Substitution, 

-

1302 shlibs_details: Tuple[Optional[str], Optional[List[str]]], 

-

1303 *, 

-

1304 default_snippet_order: Optional[Literal["service"]] = None, 

-

1305 ) -> None: 

-

1306 super().__init__( 

-

1307 plugin_metadata, 

-

1308 plugin_source_id, 

-

1309 package_metadata_context, 

-

1310 triggers, 

-

1311 substvars, 

-

1312 shlibs_details, 

-

1313 ) 

-

1314 self._maintscript_snippets = maintscript_snippets 

-

1315 self._package_substitution = package_substitution 

-

1316 self._maintscript = MaintscriptAccessorProvider( 

-

1317 plugin_metadata, 

-

1318 plugin_source_id, 

-

1319 maintscript_snippets, 

-

1320 package_substitution, 

-

1321 default_snippet_order=default_snippet_order, 

-

1322 ) 

-

1323 

-

1324 def _create_maintscript_accessor(self) -> MaintscriptAccessor: 

-

1325 return MaintscriptAccessorProvider( 

-

1326 self._plugin_metadata, 

-

1327 self._plugin_source_id, 

-

1328 self._maintscript_snippets, 

-

1329 self._package_substitution, 

-

1330 ) 

-

1331 

-

1332 

-

1333class BinaryCtrlAccessorProviderCreator: 

-

1334 def __init__( 

-

1335 self, 

-

1336 package_metadata_context: PackageProcessingContext, 

-

1337 substvars: FlushableSubstvars, 

-

1338 maintscript_snippets: Dict[str, MaintscriptSnippetContainer], 

-

1339 substitution: Substitution, 

-

1340 ) -> None: 

-

1341 self._package_metadata_context = package_metadata_context 

-

1342 self._substvars = substvars 

-

1343 self._maintscript_snippets = maintscript_snippets 

-

1344 self._substitution = substitution 

-

1345 self._triggers: Dict[Tuple[DpkgTriggerType, str], PluginProvidedTrigger] = {} 

-

1346 self.shlibs_details: Tuple[Optional[str], Optional[List[str]]] = None, None 

-

1347 

-

1348 def for_plugin( 

-

1349 self, 

-

1350 plugin_metadata: DebputyPluginMetadata, 

-

1351 plugin_source_id: str, 

-

1352 *, 

-

1353 default_snippet_order: Optional[Literal["service"]] = None, 

-

1354 ) -> BinaryCtrlAccessor: 

-

1355 return BinaryCtrlAccessorProvider( 

-

1356 plugin_metadata, 

-

1357 plugin_source_id, 

-

1358 self._package_metadata_context, 

-

1359 self._triggers, 

-

1360 self._substvars, 

-

1361 self._maintscript_snippets, 

-

1362 self._substitution, 

-

1363 self.shlibs_details, 

-

1364 default_snippet_order=default_snippet_order, 

-

1365 ) 

-

1366 

-

1367 def generated_triggers(self) -> Iterable[PluginProvidedTrigger]: 

-

1368 return self._triggers.values() 

-

1369 

-

1370 

-

1371def plugin_metadata_for_debputys_own_plugin( 

-

1372 loader: Optional[PluginInitializationEntryPoint] = None, 

-

1373) -> DebputyPluginMetadata: 

-

1374 if loader is None: 

-

1375 from debputy.plugin.debputy.debputy_plugin import initialize_debputy_features 

-

1376 

-

1377 loader = initialize_debputy_features 

-

1378 return DebputyPluginMetadata( 

-

1379 plugin_name="debputy", 

-

1380 api_compat_version=1, 

-

1381 plugin_initializer=loader, 

-

1382 plugin_loader=None, 

-

1383 plugin_path="<bundled>", 

-

1384 ) 

-

1385 

-

1386 

-

1387def load_plugin_features( 

-

1388 plugin_search_dirs: Sequence[str], 

-

1389 substitution: Substitution, 

-

1390 requested_plugins_only: Optional[Sequence[str]] = None, 

-

1391 required_plugins: Optional[Set[str]] = None, 

-

1392 plugin_feature_set: Optional[PluginProvidedFeatureSet] = None, 

-

1393 debug_mode: bool = False, 

-

1394) -> PluginProvidedFeatureSet: 

-

1395 if plugin_feature_set is None: 

-

1396 plugin_feature_set = PluginProvidedFeatureSet() 

-

1397 plugins = [plugin_metadata_for_debputys_own_plugin()] 

-

1398 unloadable_plugins = set() 

-

1399 if required_plugins: 

-

1400 plugins.extend( 

-

1401 find_json_plugins( 

-

1402 plugin_search_dirs, 

-

1403 required_plugins, 

-

1404 ) 

-

1405 ) 

-

1406 if requested_plugins_only is not None: 

-

1407 plugins.extend( 

-

1408 find_json_plugins( 

-

1409 plugin_search_dirs, 

-

1410 requested_plugins_only, 

-

1411 ) 

-

1412 ) 

-

1413 else: 

-

1414 auto_loaded = _find_all_json_plugins( 

-

1415 plugin_search_dirs, 

-

1416 required_plugins if required_plugins is not None else frozenset(), 

-

1417 debug_mode=debug_mode, 

-

1418 ) 

-

1419 for plugin_metadata in auto_loaded: 

-

1420 plugins.append(plugin_metadata) 

-

1421 unloadable_plugins.add(plugin_metadata.plugin_name) 

-

1422 

-

1423 for plugin_metadata in plugins: 

-

1424 api = DebputyPluginInitializerProvider( 

-

1425 plugin_metadata, plugin_feature_set, substitution 

-

1426 ) 

-

1427 try: 

-

1428 api.load_plugin() 

-

1429 except PluginBaseError as e: 

-

1430 if plugin_metadata.plugin_name not in unloadable_plugins: 

-

1431 raise 

-

1432 if debug_mode: 

-

1433 raise 

-

1434 try: 

-

1435 api.unload_plugin() 

-

1436 except Exception: 

-

1437 _warn( 

-

1438 f"Failed to load optional {plugin_metadata.plugin_name} and an error was raised when trying to" 

-

1439 " clean up after the half-initialized plugin. Re-raising load error as the partially loaded" 

-

1440 " module might have tainted the feature set." 

-

1441 ) 

-

1442 raise e from None 

-

1443 else: 

-

1444 if debug_mode: 

-

1445 _warn( 

-

1446 f"The optional plugin {plugin_metadata.plugin_name} failed during load. Re-raising due" 

-

1447 f" to --debug/-d." 

-

1448 ) 

-

1449 _warn( 

-

1450 f"The optional plugin {plugin_metadata.plugin_name} failed during load. The plugin was" 

-

1451 f" deactivated. Use debug mode (--debug) to show the stacktrace (the warning will become an error)" 

-

1452 ) 

-

1453 

-

1454 return plugin_feature_set 

-

1455 

-

1456 

-

1457def find_json_plugin( 

-

1458 search_dirs: Sequence[str], 

-

1459 requested_plugin: str, 

-

1460) -> DebputyPluginMetadata: 

-

1461 r = list(find_json_plugins(search_dirs, [requested_plugin])) 

-

1462 assert len(r) == 1 

-

1463 return r[0] 

-

1464 

-

1465 

-

1466def find_related_implementation_files_for_plugin( 

-

1467 plugin_metadata: DebputyPluginMetadata, 

-

1468) -> List[str]: 

-

1469 plugin_path = plugin_metadata.plugin_path 

-

1470 if not os.path.isfile(plugin_path): 

-

1471 plugin_name = plugin_metadata.plugin_name 

-

1472 _error( 

-

1473 f"Cannot run find related files for {plugin_name}: The plugin seems to be bundled" 

-

1474 " or loaded via a mechanism that does not support detecting its tests." 

-

1475 ) 

-

1476 files = [] 

-

1477 module_name, module_file = _find_plugin_implementation_file( 

-

1478 plugin_metadata.plugin_name, 

-

1479 plugin_metadata.plugin_path, 

-

1480 ) 

-

1481 if os.path.isfile(module_file): 

-

1482 files.append(module_file) 

-

1483 else: 

-

1484 if not plugin_metadata.is_loaded: 

-

1485 plugin_metadata.load_plugin() 

-

1486 if module_name in sys.modules: 

-

1487 _error( 

-

1488 f'The plugin {plugin_metadata.plugin_name} uses the "module"" key in its' 

-

1489 f" JSON metadata file ({plugin_metadata.plugin_path}) and cannot be " 

-

1490 f" installed via this method. The related Python would not be installed" 

-

1491 f" (which would result in a plugin that would fail to load)" 

-

1492 ) 

-

1493 

-

1494 return files 

-

1495 

-

1496 

-

1497def find_tests_for_plugin( 

-

1498 plugin_metadata: DebputyPluginMetadata, 

-

1499) -> List[str]: 

-

1500 plugin_name = plugin_metadata.plugin_name 

-

1501 plugin_path = plugin_metadata.plugin_path 

-

1502 

-

1503 if not os.path.isfile(plugin_path): 

-

1504 _error( 

-

1505 f"Cannot run tests for {plugin_name}: The plugin seems to be bundled or loaded via a" 

-

1506 " mechanism that does not support detecting its tests." 

-

1507 ) 

-

1508 

-

1509 plugin_dir = os.path.dirname(plugin_path) 

-

1510 test_basename_prefix = plugin_metadata.plugin_name.replace("-", "_") 

-

1511 tests = [] 

-

1512 with os.scandir(plugin_dir) as dir_iter: 

-

1513 for p in dir_iter: 

-

1514 if ( 

-

1515 p.is_file() 

-

1516 and p.name.startswith(test_basename_prefix) 

-

1517 and PLUGIN_TEST_SUFFIX.search(p.name) 

-

1518 ): 

-

1519 tests.append(p.path) 

-

1520 return tests 

-

1521 

-

1522 

-

1523def find_json_plugins( 

-

1524 search_dirs: Sequence[str], 

-

1525 requested_plugins: Iterable[str], 

-

1526) -> Iterable[DebputyPluginMetadata]: 

-

1527 for plugin_name_or_path in requested_plugins: 

-

1528 found = False 

-

1529 if "/" in plugin_name_or_path: 1529 ↛ 1530line 1529 didn't jump to line 1530, because the condition on line 1529 was never true

-

1530 if not os.path.isfile(plugin_name_or_path): 

-

1531 raise PluginNotFoundError( 

-

1532 f"Unable to load the plugin {plugin_name_or_path}: The path is not a file." 

-

1533 ' (Because the plugin name contains "/", it is assumed to be a path and search path' 

-

1534 " is not used." 

-

1535 ) 

-

1536 yield parse_json_plugin_desc(plugin_name_or_path) 

-

1537 return 

-

1538 for search_dir in search_dirs: 

-

1539 path = os.path.join( 

-

1540 search_dir, "debputy", "plugins", f"{plugin_name_or_path}.json" 

-

1541 ) 

-

1542 if not os.path.isfile(path): 1542 ↛ 1543line 1542 didn't jump to line 1543, because the condition on line 1542 was never true

-

1543 continue 

-

1544 found = True 

-

1545 yield parse_json_plugin_desc(path) 

-

1546 if not found: 1546 ↛ 1547line 1546 didn't jump to line 1547, because the condition on line 1546 was never true

-

1547 search_dir_str = ":".join(search_dirs) 

-

1548 raise PluginNotFoundError( 

-

1549 f"Unable to load the plugin {plugin_name_or_path}: Could not find {plugin_name_or_path}.json in the" 

-

1550 f" debputy/plugins subdir of any of the search dirs ({search_dir_str})" 

-

1551 ) 

-

1552 

-

1553 

-

1554def _find_all_json_plugins( 

-

1555 search_dirs: Sequence[str], 

-

1556 required_plugins: AbstractSet[str], 

-

1557 debug_mode: bool = False, 

-

1558) -> Iterable[DebputyPluginMetadata]: 

-

1559 seen = set(required_plugins) 

-

1560 error_seen = False 

-

1561 for search_dir in search_dirs: 

-

1562 try: 

-

1563 dir_fd = os.scandir(os.path.join(search_dir, "debputy", "plugins")) 

-

1564 except FileNotFoundError: 

-

1565 continue 

-

1566 with dir_fd: 

-

1567 for entry in dir_fd: 

-

1568 if ( 

-

1569 not entry.is_file(follow_symlinks=True) 

-

1570 or not entry.name.endswith(".json") 

-

1571 or entry.name in seen 

-

1572 ): 

-

1573 continue 

-

1574 try: 

-

1575 plugin_metadata = parse_json_plugin_desc(entry.path) 

-

1576 except PluginBaseError as e: 

-

1577 if debug_mode: 

-

1578 raise 

-

1579 if not error_seen: 

-

1580 error_seen = True 

-

1581 _warn( 

-

1582 f"Failed to load the plugin in {entry.path} due to the following error: {e.message}" 

-

1583 ) 

-

1584 else: 

-

1585 _warn( 

-

1586 f"Failed to load plugin in {entry.path} due to errors (not shown)." 

-

1587 ) 

-

1588 else: 

-

1589 yield plugin_metadata 

-

1590 

-

1591 

-

1592def _find_plugin_implementation_file( 

-

1593 plugin_name: str, 

-

1594 json_file_path: str, 

-

1595) -> Tuple[str, str]: 

-

1596 guessed_module_basename = plugin_name.replace("-", "_") 

-

1597 module_name = f"debputy.plugin.{guessed_module_basename}" 

-

1598 module_fs_path = os.path.join( 

-

1599 os.path.dirname(json_file_path), f"{guessed_module_basename}.py" 

-

1600 ) 

-

1601 return module_name, module_fs_path 

-

1602 

-

1603 

-

1604def _resolve_module_initializer( 

-

1605 plugin_name: str, 

-

1606 plugin_initializer_name: str, 

-

1607 module_name: Optional[str], 

-

1608 json_file_path: str, 

-

1609) -> PluginInitializationEntryPoint: 

-

1610 module = None 

-

1611 module_fs_path = None 

-

1612 if module_name is None: 1612 ↛ 1640line 1612 didn't jump to line 1640, because the condition on line 1612 was never false

-

1613 module_name, module_fs_path = _find_plugin_implementation_file( 

-

1614 plugin_name, json_file_path 

-

1615 ) 

-

1616 if os.path.isfile(module_fs_path): 1616 ↛ 1640line 1616 didn't jump to line 1640, because the condition on line 1616 was never false

-

1617 spec = importlib.util.spec_from_file_location(module_name, module_fs_path) 

-

1618 if spec is None: 1618 ↛ 1619line 1618 didn't jump to line 1619, because the condition on line 1618 was never true

-

1619 raise PluginInitializationError( 

-

1620 f"Failed to load {plugin_name} (path: {module_fs_path})." 

-

1621 " The spec_from_file_location function returned None." 

-

1622 ) 

-

1623 mod = importlib.util.module_from_spec(spec) 

-

1624 loader = spec.loader 

-

1625 if loader is None: 1625 ↛ 1626line 1625 didn't jump to line 1626, because the condition on line 1625 was never true

-

1626 raise PluginInitializationError( 

-

1627 f"Failed to load {plugin_name} (path: {module_fs_path})." 

-

1628 " Python could not find a suitable loader (spec.loader was None)" 

-

1629 ) 

-

1630 sys.modules[module_name] = mod 

-

1631 try: 

-

1632 loader.exec_module(mod) 

-

1633 except (Exception, GeneratorExit) as e: 

-

1634 raise PluginInitializationError( 

-

1635 f"Failed to load {plugin_name} (path: {module_fs_path})." 

-

1636 " The module threw an exception while being loaded." 

-

1637 ) from e 

-

1638 module = mod 

-

1639 

-

1640 if module is None: 1640 ↛ 1641line 1640 didn't jump to line 1641, because the condition on line 1640 was never true

-

1641 try: 

-

1642 module = importlib.import_module(module_name) 

-

1643 except ModuleNotFoundError as e: 

-

1644 if module_fs_path is None: 

-

1645 raise PluginMetadataError( 

-

1646 f'The plugin defined in "{json_file_path}" wanted to load the module "{module_name}", but' 

-

1647 " this module is not available in the python search path" 

-

1648 ) from e 

-

1649 raise PluginInitializationError( 

-

1650 f"Failed to load {plugin_name}. Tried loading it from" 

-

1651 f' "{module_fs_path}" (which did not exist) and PYTHONPATH as' 

-

1652 f" {module_name} (where it was not found either). Please ensure" 

-

1653 " the module code is installed in the correct spot or provide an" 

-

1654 f' explicit "module" definition in {json_file_path}.' 

-

1655 ) from e 

-

1656 

-

1657 plugin_initializer = getattr(module, plugin_initializer_name) 

-

1658 

-

1659 if plugin_initializer is None: 1659 ↛ 1660line 1659 didn't jump to line 1660, because the condition on line 1659 was never true

-

1660 raise PluginMetadataError( 

-

1661 f'The plugin defined in {json_file_path} claimed that module "{module_name}" would have an' 

-

1662 f" attribute called {plugin_initializer}. However, it does not. Please correct the plugin" 

-

1663 f" metadata or initializer name in the Python module." 

-

1664 ) 

-

1665 return cast("PluginInitializationEntryPoint", plugin_initializer) 

-

1666 

-

1667 

-

1668def _json_plugin_loader( 

-

1669 plugin_name: str, 

-

1670 plugin_json_metadata: PluginJsonMetadata, 

-

1671 json_file_path: str, 

-

1672 attribute_path: AttributePath, 

-

1673) -> Callable[["DebputyPluginInitializer"], None]: 

-

1674 api_compat = plugin_json_metadata["api_compat_version"] 

-

1675 module_name = plugin_json_metadata.get("module") 

-

1676 plugin_initializer_name = plugin_json_metadata.get("plugin_initializer") 

-

1677 packager_provided_files_raw = plugin_json_metadata.get( 

-

1678 "packager_provided_files", [] 

-

1679 ) 

-

1680 manifest_variables_raw = plugin_json_metadata.get("manifest_variables") 

-

1681 known_packaging_files_raw = plugin_json_metadata.get("known_packaging_files") 

-

1682 if api_compat != 1: 1682 ↛ 1683line 1682 didn't jump to line 1683, because the condition on line 1682 was never true

-

1683 raise PluginMetadataError( 

-

1684 f'The plugin defined in "{json_file_path}" requires API compat level {api_compat}, but this' 

-

1685 f" version of debputy only supports API compat version of 1" 

-

1686 ) 

-

1687 if plugin_initializer_name is not None and "." in plugin_initializer_name: 1687 ↛ 1688line 1687 didn't jump to line 1688, because the condition on line 1687 was never true

-

1688 p = attribute_path["plugin_initializer"] 

-

1689 raise PluginMetadataError( 

-

1690 f'The "{p}" must not contain ".". Problematic file is "{json_file_path}".' 

-

1691 ) 

-

1692 

-

1693 plugin_initializers = [] 

-

1694 

-

1695 if plugin_initializer_name is not None: 

-

1696 plugin_initializer = _resolve_module_initializer( 

-

1697 plugin_name, 

-

1698 plugin_initializer_name, 

-

1699 module_name, 

-

1700 json_file_path, 

-

1701 ) 

-

1702 plugin_initializers.append(plugin_initializer) 

-

1703 

-

1704 if known_packaging_files_raw: 1704 ↛ 1705line 1704 didn't jump to line 1705, because the condition on line 1704 was never true

-

1705 kpf_root_path = attribute_path["known_packaging_files"] 

-

1706 known_packaging_files = [] 

-

1707 for k, v in enumerate(known_packaging_files_raw): 

-

1708 kpf_path = kpf_root_path[k] 

-

1709 p = v.get("path") 

-

1710 if isinstance(p, str): 

-

1711 kpf_path.path_hint = p 

-

1712 if plugin_name.startswith("debputy-") and isinstance(v, dict): 

-

1713 docs = v.get("documentation-uris") 

-

1714 if docs is not None and isinstance(docs, list): 

-

1715 docs = [ 

-

1716 ( 

-

1717 d.replace("@DEBPUTY_DOC_ROOT_DIR@", DEBPUTY_DOC_ROOT_DIR) 

-

1718 if isinstance(d, str) 

-

1719 else d 

-

1720 ) 

-

1721 for d in docs 

-

1722 ] 

-

1723 v["documentation-uris"] = docs 

-

1724 known_packaging_file: KnownPackagingFileInfo = ( 

-

1725 PLUGIN_KNOWN_PACKAGING_FILES_PARSER.parse_input( 

-

1726 v, 

-

1727 kpf_path, 

-

1728 ) 

-

1729 ) 

-

1730 known_packaging_files.append((kpf_path, known_packaging_file)) 

-

1731 

-

1732 def _initialize_json_provided_known_packaging_files( 

-

1733 api: DebputyPluginInitializerProvider, 

-

1734 ) -> None: 

-

1735 for p, details in known_packaging_files: 

-

1736 try: 

-

1737 api.known_packaging_files(details) 

-

1738 except ValueError as ex: 

-

1739 raise PluginMetadataError( 

-

1740 f"Error while processing {p.path} defined in {json_file_path}: {ex.args[0]}" 

-

1741 ) 

-

1742 

-

1743 plugin_initializers.append(_initialize_json_provided_known_packaging_files) 

-

1744 

-

1745 if manifest_variables_raw: 

-

1746 manifest_var_path = attribute_path["manifest_variables"] 

-

1747 manifest_variables = [ 

-

1748 PLUGIN_MANIFEST_VARS_PARSER.parse_input(p, manifest_var_path[i]) 

-

1749 for i, p in enumerate(manifest_variables_raw) 

-

1750 ] 

-

1751 

-

1752 def _initialize_json_provided_manifest_vars( 

-

1753 api: DebputyPluginInitializer, 

-

1754 ) -> None: 

-

1755 for idx, manifest_variable in enumerate(manifest_variables): 

-

1756 name = manifest_variable["name"] 

-

1757 value = manifest_variable["value"] 

-

1758 doc = manifest_variable.get("reference_documentation") 

-

1759 try: 

-

1760 api.manifest_variable( 

-

1761 name, value, variable_reference_documentation=doc 

-

1762 ) 

-

1763 except ValueError as ex: 

-

1764 var_path = manifest_var_path[idx] 

-

1765 raise PluginMetadataError( 

-

1766 f"Error while processing {var_path.path} defined in {json_file_path}: {ex.args[0]}" 

-

1767 ) 

-

1768 

-

1769 plugin_initializers.append(_initialize_json_provided_manifest_vars) 

-

1770 

-

1771 if packager_provided_files_raw: 

-

1772 ppf_path = attribute_path["packager_provided_files"] 

-

1773 ppfs = [ 

-

1774 PLUGIN_PPF_PARSER.parse_input(p, ppf_path[i]) 

-

1775 for i, p in enumerate(packager_provided_files_raw) 

-

1776 ] 

-

1777 

-

1778 def _initialize_json_provided_ppfs(api: DebputyPluginInitializer) -> None: 

-

1779 ppf: PackagerProvidedFileJsonDescription 

-

1780 for idx, ppf in enumerate(ppfs): 

-

1781 c = dict(ppf) 

-

1782 stem = ppf["stem"] 

-

1783 installed_path = ppf["installed_path"] 

-

1784 default_mode = ppf.get("default_mode") 

-

1785 ref_doc_dict = ppf.get("reference_documentation") 

-

1786 if default_mode is not None: 1786 ↛ 1789line 1786 didn't jump to line 1789, because the condition on line 1786 was never false

-

1787 c["default_mode"] = default_mode.octal_mode 

-

1788 

-

1789 if ref_doc_dict is not None: 1789 ↛ 1794line 1789 didn't jump to line 1794, because the condition on line 1789 was never false

-

1790 ref_doc = packager_provided_file_reference_documentation( 

-

1791 **ref_doc_dict 

-

1792 ) 

-

1793 else: 

-

1794 ref_doc = None 

-

1795 

-

1796 for k in [ 

-

1797 "stem", 

-

1798 "installed_path", 

-

1799 "reference_documentation", 

-

1800 ]: 

-

1801 try: 

-

1802 del c[k] 

-

1803 except KeyError: 

-

1804 pass 

-

1805 

-

1806 try: 

-

1807 api.packager_provided_file(stem, installed_path, reference_documentation=ref_doc, **c) # type: ignore 

-

1808 except ValueError as ex: 

-

1809 p_path = ppf_path[idx] 

-

1810 raise PluginMetadataError( 

-

1811 f"Error while processing {p_path.path} defined in {json_file_path}: {ex.args[0]}" 

-

1812 ) 

-

1813 

-

1814 plugin_initializers.append(_initialize_json_provided_ppfs) 

-

1815 

-

1816 if not plugin_initializers: 1816 ↛ 1817line 1816 didn't jump to line 1817, because the condition on line 1816 was never true

-

1817 raise PluginMetadataError( 

-

1818 f"The plugin defined in {json_file_path} does not seem to provide features, " 

-

1819 f" such as module + plugin-initializer or packager-provided-files." 

-

1820 ) 

-

1821 

-

1822 if len(plugin_initializers) == 1: 

-

1823 return plugin_initializers[0] 

-

1824 

-

1825 def _chain_loader(api: DebputyPluginInitializer) -> None: 

-

1826 for initializer in plugin_initializers: 

-

1827 initializer(api) 

-

1828 

-

1829 return _chain_loader 

-

1830 

-

1831 

-

1832@contextlib.contextmanager 

-

1833def _open(path: str, fd: Optional[IO[bytes]] = None) -> Iterator[IO[bytes]]: 

-

1834 if fd is not None: 

-

1835 yield fd 

-

1836 else: 

-

1837 with open(path, "rb") as fd: 

-

1838 yield fd 

-

1839 

-

1840 

-

1841def parse_json_plugin_desc( 

-

1842 path: str, *, fd: Optional[IO[bytes]] = None 

-

1843) -> DebputyPluginMetadata: 

-

1844 with _open(path, fd=fd) as rfd: 

-

1845 try: 

-

1846 raw = json.load(rfd) 

-

1847 except JSONDecodeError as e: 

-

1848 raise PluginMetadataError( 

-

1849 f'The plugin defined in "{path}" could not be parsed as valid JSON: {e.args[0]}' 

-

1850 ) from e 

-

1851 plugin_name = os.path.basename(path) 

-

1852 if plugin_name.endswith(".json"): 

-

1853 plugin_name = plugin_name[:-5] 

-

1854 elif plugin_name.endswith(".json.in"): 

-

1855 plugin_name = plugin_name[:-8] 

-

1856 

-

1857 if plugin_name == "debputy": 1857 ↛ 1859line 1857 didn't jump to line 1859, because the condition on line 1857 was never true

-

1858 # Provide a better error message than "The plugin has already loaded!?" 

-

1859 raise PluginMetadataError( 

-

1860 f'The plugin named {plugin_name} must be bundled with `debputy`. Please rename "{path}" so it does not' 

-

1861 f" clash with the bundled plugin of same name." 

-

1862 ) 

-

1863 

-

1864 attribute_path = AttributePath.root_path() 

-

1865 

-

1866 try: 

-

1867 plugin_json_metadata = PLUGIN_METADATA_PARSER.parse_input( 

-

1868 raw, 

-

1869 attribute_path, 

-

1870 ) 

-

1871 except ManifestParseException as e: 

-

1872 raise PluginMetadataError( 

-

1873 f'The plugin defined in "{path}" was valid JSON but could not be parsed: {e.message}' 

-

1874 ) from e 

-

1875 api_compat = plugin_json_metadata["api_compat_version"] 

-

1876 

-

1877 return DebputyPluginMetadata( 

-

1878 plugin_name=plugin_name, 

-

1879 plugin_loader=lambda: _json_plugin_loader( 

-

1880 plugin_name, 

-

1881 plugin_json_metadata, 

-

1882 path, 

-

1883 attribute_path, 

-

1884 ), 

-

1885 api_compat_version=api_compat, 

-

1886 plugin_initializer=None, 

-

1887 plugin_path=path, 

-

1888 ) 

-

1889 

-

1890 

-

1891@dataclasses.dataclass(slots=True, frozen=True) 

-

1892class ServiceDefinitionImpl(ServiceDefinition[DSD]): 

-

1893 name: str 

-

1894 names: Sequence[str] 

-

1895 path: VirtualPath 

-

1896 type_of_service: str 

-

1897 service_scope: str 

-

1898 auto_enable_on_install: bool 

-

1899 auto_start_on_install: bool 

-

1900 on_upgrade: ServiceUpgradeRule 

-

1901 definition_source: str 

-

1902 is_plugin_provided_definition: bool 

-

1903 service_context: Optional[DSD] 

-

1904 

-

1905 def replace(self, **changes: Any) -> "ServiceDefinitionImpl[DSD]": 

-

1906 return dataclasses.replace(self, **changes) 

-

1907 

-

1908 

-

1909class ServiceRegistryImpl(ServiceRegistry[DSD]): 

-

1910 __slots__ = ("_service_manager_details", "_service_definitions", "_seen_services") 

-

1911 

-

1912 def __init__(self, service_manager_details: ServiceManagerDetails) -> None: 

-

1913 self._service_manager_details = service_manager_details 

-

1914 self._service_definitions: List[ServiceDefinition[DSD]] = [] 

-

1915 self._seen_services = set() 

-

1916 

-

1917 @property 

-

1918 def detected_services(self) -> Sequence[ServiceDefinition[DSD]]: 

-

1919 return self._service_definitions 

-

1920 

-

1921 def register_service( 

-

1922 self, 

-

1923 path: VirtualPath, 

-

1924 name: Union[str, List[str]], 

-

1925 *, 

-

1926 type_of_service: str = "service", # "timer", etc. 

-

1927 service_scope: str = "system", 

-

1928 enable_by_default: bool = True, 

-

1929 start_by_default: bool = True, 

-

1930 default_upgrade_rule: ServiceUpgradeRule = "restart", 

-

1931 service_context: Optional[DSD] = None, 

-

1932 ) -> None: 

-

1933 names = name if isinstance(name, list) else [name] 

-

1934 if len(names) < 1: 

-

1935 raise ValueError( 

-

1936 f"The service must have at least one name - {path.absolute} did not have any" 

-

1937 ) 

-

1938 for n in names: 

-

1939 key = (n, type_of_service, service_scope) 

-

1940 if key in self._seen_services: 

-

1941 raise PluginAPIViolationError( 

-

1942 f"The service manager (from {self._service_manager_details.plugin_metadata.plugin_name}) used" 

-

1943 f" the service name {n} (type: {type_of_service}, scope: {service_scope}) twice. This is not" 

-

1944 " allowed by the debputy plugin API." 

-

1945 ) 

-

1946 # TODO: We cannot create a service definition immediate once the manifest is involved 

-

1947 self._service_definitions.append( 

-

1948 ServiceDefinitionImpl( 

-

1949 names[0], 

-

1950 names, 

-

1951 path, 

-

1952 type_of_service, 

-

1953 service_scope, 

-

1954 enable_by_default, 

-

1955 start_by_default, 

-

1956 default_upgrade_rule, 

-

1957 f"Auto-detected by plugin {self._service_manager_details.plugin_metadata.plugin_name}", 

-

1958 True, 

-

1959 service_context, 

-

1960 ) 

-

1961 ) 

-
- - - diff --git a/coverage-report/d_64287305fe0c6642_impl_types_py.html b/coverage-report/d_64287305fe0c6642_impl_types_py.html deleted file mode 100644 index ed43f9a..0000000 --- a/coverage-report/d_64287305fe0c6642_impl_types_py.html +++ /dev/null @@ -1,1383 +0,0 @@ - - - - - Coverage for src/debputy/plugin/api/impl_types.py: 78% - - - - - -
-
-

- Coverage for src/debputy/plugin/api/impl_types.py: - 78% -

- -

- 526 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2import os.path 

-

3import textwrap 

-

4from typing import ( 

-

5 Optional, 

-

6 Callable, 

-

7 FrozenSet, 

-

8 Dict, 

-

9 List, 

-

10 Tuple, 

-

11 Generic, 

-

12 TYPE_CHECKING, 

-

13 TypeVar, 

-

14 cast, 

-

15 Any, 

-

16 Sequence, 

-

17 Union, 

-

18 Type, 

-

19 TypedDict, 

-

20 Iterable, 

-

21 Mapping, 

-

22 NotRequired, 

-

23 Literal, 

-

24 Set, 

-

25 Iterator, 

-

26) 

-

27from weakref import ref 

-

28 

-

29from debputy import DEBPUTY_DOC_ROOT_DIR 

-

30from debputy.exceptions import ( 

-

31 DebputyFSIsROError, 

-

32 PluginAPIViolationError, 

-

33 PluginConflictError, 

-

34 UnhandledOrUnexpectedErrorFromPluginError, 

-

35) 

-

36from debputy.filesystem_scan import as_path_def 

-

37from debputy.installations import InstallRule 

-

38from debputy.maintscript_snippet import DpkgMaintscriptHelperCommand 

-

39from debputy.manifest_conditions import ManifestCondition 

-

40from debputy.manifest_parser.base_types import DebputyParsedContent, TypeMapping 

-

41from debputy.manifest_parser.exceptions import ManifestParseException 

-

42from debputy.manifest_parser.util import AttributePath 

-

43from debputy.packages import BinaryPackage 

-

44from debputy.plugin.api import ( 

-

45 VirtualPath, 

-

46 BinaryCtrlAccessor, 

-

47 PackageProcessingContext, 

-

48) 

-

49from debputy.plugin.api.spec import ( 

-

50 DebputyPluginInitializer, 

-

51 MetadataAutoDetector, 

-

52 DpkgTriggerType, 

-

53 ParserDocumentation, 

-

54 PackageProcessor, 

-

55 PathDef, 

-

56 ParserAttributeDocumentation, 

-

57 undocumented_attr, 

-

58 documented_attr, 

-

59 reference_documentation, 

-

60 PackagerProvidedFileReferenceDocumentation, 

-

61 TypeMappingDocumentation, 

-

62) 

-

63from debputy.substitution import VariableContext 

-

64from debputy.transformation_rules import TransformationRule 

-

65from debputy.util import _normalize_path, package_cross_check_precheck 

-

66 

-

67if TYPE_CHECKING: 

-

68 from debputy.plugin.api.spec import ( 

-

69 ServiceDetector, 

-

70 ServiceIntegrator, 

-

71 PackageTypeSelector, 

-

72 ) 

-

73 from debputy.manifest_parser.parser_data import ParserContextData 

-

74 from debputy.highlevel_manifest import ( 

-

75 HighLevelManifest, 

-

76 PackageTransformationDefinition, 

-

77 BinaryPackageData, 

-

78 ) 

-

79 

-

80 

-

81_PACKAGE_TYPE_DEB_ONLY = frozenset(["deb"]) 

-

82_ALL_PACKAGE_TYPES = frozenset(["deb", "udeb"]) 

-

83 

-

84 

-

85TD = TypeVar("TD", bound="Union[DebputyParsedContent, List[DebputyParsedContent]]") 

-

86PF = TypeVar("PF") 

-

87SF = TypeVar("SF") 

-

88TP = TypeVar("TP") 

-

89TTP = Type[TP] 

-

90 

-

91DIPKWHandler = Callable[[str, AttributePath, "ParserContextData"], TP] 

-

92DIPHandler = Callable[[str, PF, AttributePath, "ParserContextData"], TP] 

-

93 

-

94 

-

95def resolve_package_type_selectors( 

-

96 package_type: "PackageTypeSelector", 

-

97) -> FrozenSet[str]: 

-

98 if package_type is _ALL_PACKAGE_TYPES or package_type is _PACKAGE_TYPE_DEB_ONLY: 

-

99 return cast("FrozenSet[str]", package_type) 

-

100 if isinstance(package_type, str): 

-

101 return ( 

-

102 _PACKAGE_TYPE_DEB_ONLY 

-

103 if package_type == "deb" 

-

104 else frozenset([package_type]) 

-

105 ) 

-

106 else: 

-

107 return frozenset(package_type) 

-

108 

-

109 

-

110@dataclasses.dataclass(slots=True) 

-

111class DebputyPluginMetadata: 

-

112 plugin_name: str 

-

113 api_compat_version: int 

-

114 plugin_loader: Optional[Callable[[], Callable[["DebputyPluginInitializer"], None]]] 

-

115 plugin_initializer: Optional[Callable[["DebputyPluginInitializer"], None]] 

-

116 plugin_path: str 

-

117 _is_initialized: bool = False 

-

118 

-

119 @property 

-

120 def is_loaded(self) -> bool: 

-

121 return self.plugin_initializer is not None 

-

122 

-

123 @property 

-

124 def is_initialized(self) -> bool: 

-

125 return self._is_initialized 

-

126 

-

127 def initialize_plugin(self, api: "DebputyPluginInitializer") -> None: 

-

128 if self.is_initialized: 128 ↛ 129line 128 didn't jump to line 129, because the condition on line 128 was never true

-

129 raise RuntimeError("Cannot load plugins twice") 

-

130 if not self.is_loaded: 

-

131 self.load_plugin() 

-

132 plugin_initializer = self.plugin_initializer 

-

133 assert plugin_initializer is not None 

-

134 plugin_initializer(api) 

-

135 self._is_initialized = True 

-

136 

-

137 def load_plugin(self) -> None: 

-

138 plugin_loader = self.plugin_loader 

-

139 assert plugin_loader is not None 

-

140 self.plugin_initializer = plugin_loader() 

-

141 assert self.plugin_initializer is not None 

-

142 

-

143 

-

144@dataclasses.dataclass(slots=True, frozen=True) 

-

145class PluginProvidedParser(Generic[PF, TP]): 

-

146 parser: "DeclarativeInputParser[PF]" 

-

147 handler: Callable[[str, PF, "AttributePath", "ParserContextData"], TP] 

-

148 plugin_metadata: DebputyPluginMetadata 

-

149 

-

150 def parse( 

-

151 self, 

-

152 name: str, 

-

153 value: object, 

-

154 attribute_path: "AttributePath", 

-

155 *, 

-

156 parser_context: "ParserContextData", 

-

157 ) -> TP: 

-

158 parsed_value = self.parser.parse_input( 

-

159 value, attribute_path, parser_context=parser_context 

-

160 ) 

-

161 return self.handler(name, parsed_value, attribute_path, parser_context) 

-

162 

-

163 

-

164class PPFFormatParam(TypedDict): 

-

165 priority: Optional[int] 

-

166 name: str 

-

167 owning_package: str 

-

168 

-

169 

-

170@dataclasses.dataclass(slots=True, frozen=True) 

-

171class PackagerProvidedFileClassSpec: 

-

172 debputy_plugin_metadata: DebputyPluginMetadata 

-

173 stem: str 

-

174 installed_as_format: str 

-

175 default_mode: int 

-

176 default_priority: Optional[int] 

-

177 allow_name_segment: bool 

-

178 allow_architecture_segment: bool 

-

179 post_formatting_rewrite: Optional[Callable[[str], str]] 

-

180 packageless_is_fallback_for_all_packages: bool 

-

181 reservation_only: bool 

-

182 formatting_callback: Optional[Callable[[str, PPFFormatParam, VirtualPath], str]] = ( 

-

183 None 

-

184 ) 

-

185 reference_documentation: Optional[PackagerProvidedFileReferenceDocumentation] = None 

-

186 bug_950723: bool = False 

-

187 

-

188 @property 

-

189 def supports_priority(self) -> bool: 

-

190 return self.default_priority is not None 

-

191 

-

192 def compute_dest( 

-

193 self, 

-

194 assigned_name: str, 

-

195 # Note this method is currently used 1:1 inside plugin tests. 

-

196 *, 

-

197 owning_package: Optional[str] = None, 

-

198 assigned_priority: Optional[int] = None, 

-

199 path: Optional[VirtualPath] = None, 

-

200 ) -> Tuple[str, str]: 

-

201 if assigned_priority is not None and not self.supports_priority: 201 ↛ 202line 201 didn't jump to line 202, because the condition on line 201 was never true

-

202 raise ValueError( 

-

203 f"Cannot assign priority to packager provided files with stem" 

-

204 f' "{self.stem}" (e.g., "debian/foo.{self.stem}"). They' 

-

205 " do not use priority at all." 

-

206 ) 

-

207 

-

208 path_format = self.installed_as_format 

-

209 if self.supports_priority and assigned_priority is None: 

-

210 assigned_priority = self.default_priority 

-

211 

-

212 if owning_package is None: 

-

213 owning_package = assigned_name 

-

214 

-

215 params: PPFFormatParam = { 

-

216 "priority": assigned_priority, 

-

217 "name": assigned_name, 

-

218 "owning_package": owning_package, 

-

219 } 

-

220 

-

221 if self.formatting_callback is not None: 

-

222 if path is None: 222 ↛ 223line 222 didn't jump to line 223, because the condition on line 222 was never true

-

223 raise ValueError( 

-

224 "The path parameter is required for PPFs with formatting_callback" 

-

225 ) 

-

226 dest_path = self.formatting_callback(path_format, params, path) 

-

227 else: 

-

228 dest_path = path_format.format(**params) 

-

229 

-

230 dirname, basename = os.path.split(dest_path) 

-

231 dirname = _normalize_path(dirname) 

-

232 

-

233 if self.post_formatting_rewrite: 

-

234 basename = self.post_formatting_rewrite(basename) 

-

235 return dirname, basename 

-

236 

-

237 

-

238@dataclasses.dataclass(slots=True) 

-

239class MetadataOrMaintscriptDetector: 

-

240 plugin_metadata: DebputyPluginMetadata 

-

241 detector_id: str 

-

242 detector: MetadataAutoDetector 

-

243 applies_to_package_types: FrozenSet[str] 

-

244 enabled: bool = True 

-

245 

-

246 def applies_to(self, binary_package: BinaryPackage) -> bool: 

-

247 return binary_package.package_type in self.applies_to_package_types 

-

248 

-

249 def run_detector( 

-

250 self, 

-

251 fs_root: "VirtualPath", 

-

252 ctrl: "BinaryCtrlAccessor", 

-

253 context: "PackageProcessingContext", 

-

254 ) -> None: 

-

255 try: 

-

256 self.detector(fs_root, ctrl, context) 

-

257 except DebputyFSIsROError as e: 257 ↛ 266line 257 didn't jump to line 266

-

258 nv = self.plugin_metadata.plugin_name 

-

259 raise PluginAPIViolationError( 

-

260 f'The plugin {nv} violated the API contract for "metadata detectors"' 

-

261 " by attempting to mutate the provided file system in its metadata detector" 

-

262 f" with id {self.detector_id}. File system mutation is *not* supported at" 

-

263 " this stage (file system layout is committed and the attempted changes" 

-

264 " would be lost)." 

-

265 ) from e 

-

266 except (ChildProcessError, RuntimeError, AttributeError) as e: 

-

267 nv = f"{self.plugin_metadata.plugin_name}" 

-

268 raise UnhandledOrUnexpectedErrorFromPluginError( 

-

269 f"The plugin {nv} threw an unhandled or unexpected exception from its metadata" 

-

270 f" detector with id {self.detector_id}." 

-

271 ) from e 

-

272 

-

273 

-

274class DeclarativeInputParser(Generic[TD]): 

-

275 @property 

-

276 def inline_reference_documentation(self) -> Optional[ParserDocumentation]: 

-

277 return None 

-

278 

-

279 @property 

-

280 def reference_documentation_url(self) -> Optional[str]: 

-

281 doc = self.inline_reference_documentation 

-

282 return doc.documentation_reference_url if doc is not None else None 

-

283 

-

284 def parse_input( 

-

285 self, 

-

286 value: object, 

-

287 path: "AttributePath", 

-

288 *, 

-

289 parser_context: Optional["ParserContextData"] = None, 

-

290 ) -> TD: 

-

291 raise NotImplementedError 

-

292 

-

293 

-

294class DelegatingDeclarativeInputParser(DeclarativeInputParser[TD]): 

-

295 __slots__ = ("delegate", "_reference_documentation") 

-

296 

-

297 def __init__( 

-

298 self, 

-

299 delegate: DeclarativeInputParser[TD], 

-

300 *, 

-

301 inline_reference_documentation: Optional[ParserDocumentation] = None, 

-

302 ) -> None: 

-

303 self.delegate = delegate 

-

304 self._reference_documentation = inline_reference_documentation 

-

305 

-

306 @property 

-

307 def inline_reference_documentation(self) -> Optional[ParserDocumentation]: 

-

308 doc = self._reference_documentation 

-

309 if doc is None: 

-

310 return self.delegate.inline_reference_documentation 

-

311 return doc 

-

312 

-

313 

-

314class ListWrappedDeclarativeInputParser(DelegatingDeclarativeInputParser[TD]): 

-

315 __slots__ = () 

-

316 

-

317 def _doc_url_error_suffix(self, *, see_url_version: bool = False) -> str: 

-

318 doc_url = self.reference_documentation_url 

-

319 if doc_url is not None: 319 ↛ 323line 319 didn't jump to line 323, because the condition on line 319 was never false

-

320 if see_url_version: 320 ↛ 322line 320 didn't jump to line 322, because the condition on line 320 was never false

-

321 return f" Please see {doc_url} for the documentation." 

-

322 return f" (Documentation: {doc_url})" 

-

323 return "" 

-

324 

-

325 def parse_input( 

-

326 self, 

-

327 value: object, 

-

328 path: "AttributePath", 

-

329 *, 

-

330 parser_context: Optional["ParserContextData"] = None, 

-

331 ) -> TD: 

-

332 if not isinstance(value, list): 

-

333 doc_ref = self._doc_url_error_suffix(see_url_version=True) 

-

334 raise ManifestParseException( 

-

335 f"The attribute {path.path} must be a list.{doc_ref}" 

-

336 ) 

-

337 result = [] 

-

338 delegate = self.delegate 

-

339 for idx, element in enumerate(value): 

-

340 element_path = path[idx] 

-

341 result.append( 

-

342 delegate.parse_input( 

-

343 element, 

-

344 element_path, 

-

345 parser_context=parser_context, 

-

346 ) 

-

347 ) 

-

348 return result 

-

349 

-

350 

-

351class DispatchingParserBase(Generic[TP]): 

-

352 def __init__(self, manifest_attribute_path_template: str) -> None: 

-

353 self.manifest_attribute_path_template = manifest_attribute_path_template 

-

354 self._parsers: Dict[str, PluginProvidedParser[Any, TP]] = {} 

-

355 

-

356 def is_known_keyword(self, keyword: str) -> bool: 

-

357 return keyword in self._parsers 

-

358 

-

359 def registered_keywords(self) -> Iterable[str]: 

-

360 yield from self._parsers 

-

361 

-

362 def parser_for(self, keyword: str) -> PluginProvidedParser[Any, TP]: 

-

363 return self._parsers[keyword] 

-

364 

-

365 def register_keyword( 

-

366 self, 

-

367 keyword: Union[str, Sequence[str]], 

-

368 handler: DIPKWHandler, 

-

369 plugin_metadata: DebputyPluginMetadata, 

-

370 *, 

-

371 inline_reference_documentation: Optional[ParserDocumentation] = None, 

-

372 ) -> None: 

-

373 reference_documentation_url = None 

-

374 if inline_reference_documentation: 374 ↛ 386line 374 didn't jump to line 386, because the condition on line 374 was never false

-

375 if inline_reference_documentation.attribute_doc: 375 ↛ 376line 375 didn't jump to line 376, because the condition on line 375 was never true

-

376 raise ValueError( 

-

377 "Cannot provide per-attribute documentation for a value-less keyword!" 

-

378 ) 

-

379 if inline_reference_documentation.alt_parser_description: 379 ↛ 380line 379 didn't jump to line 380, because the condition on line 379 was never true

-

380 raise ValueError( 

-

381 "Cannot provide non-mapping-format documentation for a value-less keyword!" 

-

382 ) 

-

383 reference_documentation_url = ( 

-

384 inline_reference_documentation.documentation_reference_url 

-

385 ) 

-

386 parser = DeclarativeValuelessKeywordInputParser( 

-

387 inline_reference_documentation, 

-

388 documentation_reference=reference_documentation_url, 

-

389 ) 

-

390 

-

391 def _combined_handler( 

-

392 name: str, 

-

393 _ignored: Any, 

-

394 attr_path: AttributePath, 

-

395 context: "ParserContextData", 

-

396 ) -> TP: 

-

397 return handler(name, attr_path, context) 

-

398 

-

399 p = PluginProvidedParser( 

-

400 parser, 

-

401 _combined_handler, 

-

402 plugin_metadata, 

-

403 ) 

-

404 

-

405 self._add_parser(keyword, p) 

-

406 

-

407 def register_parser( 

-

408 self, 

-

409 keyword: Union[str, List[str]], 

-

410 parser: "DeclarativeInputParser[PF]", 

-

411 handler: Callable[[str, PF, "AttributePath", "ParserContextData"], TP], 

-

412 plugin_metadata: DebputyPluginMetadata, 

-

413 ) -> None: 

-

414 p = PluginProvidedParser( 

-

415 parser, 

-

416 handler, 

-

417 plugin_metadata, 

-

418 ) 

-

419 self._add_parser(keyword, p) 

-

420 

-

421 def _add_parser( 

-

422 self, 

-

423 keyword: Union[str, List[str]], 

-

424 ppp: "PluginProvidedParser[PF, TP]", 

-

425 ) -> None: 

-

426 ks = [keyword] if isinstance(keyword, str) else keyword 

-

427 for k in ks: 

-

428 existing_parser = self._parsers.get(k) 

-

429 if existing_parser is not None: 429 ↛ 430line 429 didn't jump to line 430

-

430 message = ( 

-

431 f'The rule name "{k}" is already taken by the plugin' 

-

432 f" {existing_parser.plugin_metadata.plugin_name}. This conflict was triggered" 

-

433 f" when plugin {ppp.plugin_metadata.plugin_name} attempted to register its parser." 

-

434 ) 

-

435 raise PluginConflictError( 

-

436 message, 

-

437 existing_parser.plugin_metadata, 

-

438 ppp.plugin_metadata, 

-

439 ) 

-

440 self._new_parser(k, ppp) 

-

441 

-

442 def _new_parser(self, keyword: str, ppp: "PluginProvidedParser[PF, TP]") -> None: 

-

443 self._parsers[keyword] = ppp 

-

444 

-

445 def parse_input( 

-

446 self, 

-

447 orig_value: object, 

-

448 attribute_path: "AttributePath", 

-

449 *, 

-

450 parser_context: "ParserContextData", 

-

451 ) -> TP: 

-

452 raise NotImplementedError 

-

453 

-

454 

-

455class DispatchingObjectParser( 

-

456 DispatchingParserBase[Mapping[str, Any]], 

-

457 DeclarativeInputParser[Mapping[str, Any]], 

-

458): 

-

459 def __init__( 

-

460 self, 

-

461 manifest_attribute_path_template: str, 

-

462 *, 

-

463 parser_documentation: Optional[ParserDocumentation] = None, 

-

464 ) -> None: 

-

465 super().__init__(manifest_attribute_path_template) 

-

466 self._attribute_documentation: List[ParserAttributeDocumentation] = [] 

-

467 if parser_documentation is None: 467 ↛ 468line 467 didn't jump to line 468, because the condition on line 467 was never true

-

468 parser_documentation = reference_documentation() 

-

469 self._parser_documentation = parser_documentation 

-

470 

-

471 @property 

-

472 def reference_documentation_url(self) -> Optional[str]: 

-

473 return self._parser_documentation.documentation_reference_url 

-

474 

-

475 @property 

-

476 def inline_reference_documentation(self) -> Optional[ParserDocumentation]: 

-

477 ref_doc = self._parser_documentation 

-

478 return reference_documentation( 

-

479 title=ref_doc.title, 

-

480 description=ref_doc.description, 

-

481 attributes=self._attribute_documentation, 

-

482 reference_documentation_url=self.reference_documentation_url, 

-

483 ) 

-

484 

-

485 def _new_parser(self, keyword: str, ppp: "PluginProvidedParser[PF, TP]") -> None: 

-

486 super()._new_parser(keyword, ppp) 

-

487 doc = ppp.parser.inline_reference_documentation 

-

488 if doc is None or doc.description is None: 

-

489 self._attribute_documentation.append(undocumented_attr(keyword)) 

-

490 else: 

-

491 self._attribute_documentation.append( 

-

492 documented_attr(keyword, doc.description) 

-

493 ) 

-

494 

-

495 def register_child_parser( 

-

496 self, 

-

497 keyword: str, 

-

498 parser: "DispatchingObjectParser", 

-

499 plugin_metadata: DebputyPluginMetadata, 

-

500 *, 

-

501 on_end_parse_step: Optional[ 

-

502 Callable[ 

-

503 [str, Optional[Mapping[str, Any]], AttributePath, "ParserContextData"], 

-

504 None, 

-

505 ] 

-

506 ] = None, 

-

507 nested_in_package_context: bool = False, 

-

508 ) -> None: 

-

509 def _handler( 

-

510 name: str, 

-

511 value: Mapping[str, Any], 

-

512 path: AttributePath, 

-

513 parser_context: "ParserContextData", 

-

514 ) -> Mapping[str, Any]: 

-

515 on_end_parse_step(name, value, path, parser_context) 

-

516 return value 

-

517 

-

518 if nested_in_package_context: 

-

519 parser = InPackageContextParser( 

-

520 keyword, 

-

521 parser, 

-

522 ) 

-

523 

-

524 p = PluginProvidedParser( 

-

525 parser, 

-

526 _handler, 

-

527 plugin_metadata, 

-

528 ) 

-

529 self._add_parser(keyword, p) 

-

530 

-

531 def parse_input( 

-

532 self, 

-

533 orig_value: object, 

-

534 attribute_path: "AttributePath", 

-

535 *, 

-

536 parser_context: "ParserContextData", 

-

537 ) -> TP: 

-

538 doc_ref = "" 

-

539 if self.reference_documentation_url is not None: 539 ↛ 543line 539 didn't jump to line 543, because the condition on line 539 was never false

-

540 doc_ref = ( 

-

541 f" Please see {self.reference_documentation_url} for the documentation." 

-

542 ) 

-

543 if not isinstance(orig_value, dict): 

-

544 raise ManifestParseException( 

-

545 f"The attribute {attribute_path.path} must be a non-empty mapping.{doc_ref}" 

-

546 ) 

-

547 if not orig_value: 547 ↛ 548line 547 didn't jump to line 548, because the condition on line 547 was never true

-

548 raise ManifestParseException( 

-

549 f"The attribute {attribute_path.path} must be a non-empty mapping.{doc_ref}" 

-

550 ) 

-

551 result = {} 

-

552 unknown_keys = orig_value.keys() - self._parsers.keys() 

-

553 if unknown_keys: 553 ↛ 554line 553 didn't jump to line 554, because the condition on line 553 was never true

-

554 first_key = next(iter(unknown_keys)) 

-

555 remaining_valid_attributes = self._parsers.keys() - orig_value.keys() 

-

556 if not remaining_valid_attributes: 

-

557 raise ManifestParseException( 

-

558 f'The attribute "{first_key}" is not applicable at {attribute_path.path} (with the' 

-

559 f" current set of plugins).{doc_ref}" 

-

560 ) 

-

561 remaining_valid_attribute_names = ", ".join(remaining_valid_attributes) 

-

562 raise ManifestParseException( 

-

563 f'The attribute "{first_key}" is not applicable at {attribute_path.path}(with the current set' 

-

564 " of plugins). Possible attributes available (and not already used) are:" 

-

565 f" {remaining_valid_attribute_names}.{doc_ref}" 

-

566 ) 

-

567 # Parse order is important for the root level (currently we use rule registration order) 

-

568 for key, provided_parser in self._parsers.items(): 

-

569 value = orig_value.get(key) 

-

570 if value is None: 

-

571 if isinstance(provided_parser.parser, DispatchingObjectParser): 

-

572 provided_parser.handler( 

-

573 key, {}, attribute_path[key], parser_context 

-

574 ) 

-

575 continue 

-

576 value_path = attribute_path[key] 

-

577 if provided_parser is None: 577 ↛ 578line 577 didn't jump to line 578, because the condition on line 577 was never true

-

578 valid_keys = ", ".join(sorted(self._parsers.keys())) 

-

579 raise ManifestParseException( 

-

580 f'Unknown or unsupported option "{key}" at {value_path.path}.' 

-

581 " Valid options at this location are:" 

-

582 f" {valid_keys}\n{doc_ref}" 

-

583 ) 

-

584 parsed_value = provided_parser.parse( 

-

585 key, value, value_path, parser_context=parser_context 

-

586 ) 

-

587 result[key] = parsed_value 

-

588 return result 

-

589 

-

590 

-

591@dataclasses.dataclass(slots=True, frozen=True) 

-

592class PackageContextData(Generic[TP]): 

-

593 resolved_package_name: str 

-

594 value: TP 

-

595 

-

596 

-

597class InPackageContextParser( 

-

598 DelegatingDeclarativeInputParser[Mapping[str, PackageContextData[TP]]] 

-

599): 

-

600 def __init__( 

-

601 self, 

-

602 manifest_attribute_path_template: str, 

-

603 delegate: DeclarativeInputParser[TP], 

-

604 *, 

-

605 parser_documentation: Optional[ParserDocumentation] = None, 

-

606 ) -> None: 

-

607 self.manifest_attribute_path_template = manifest_attribute_path_template 

-

608 self._attribute_documentation: List[ParserAttributeDocumentation] = [] 

-

609 super().__init__(delegate, inline_reference_documentation=parser_documentation) 

-

610 

-

611 def parse_input( 

-

612 self, 

-

613 orig_value: object, 

-

614 attribute_path: "AttributePath", 

-

615 *, 

-

616 parser_context: Optional["ParserContextData"] = None, 

-

617 ) -> TP: 

-

618 assert parser_context is not None 

-

619 doc_ref = "" 

-

620 if self.reference_documentation_url is not None: 620 ↛ 624line 620 didn't jump to line 624, because the condition on line 620 was never false

-

621 doc_ref = ( 

-

622 f" Please see {self.reference_documentation_url} for the documentation." 

-

623 ) 

-

624 if not isinstance(orig_value, dict) or not orig_value: 624 ↛ 625line 624 didn't jump to line 625, because the condition on line 624 was never true

-

625 raise ManifestParseException( 

-

626 f"The attribute {attribute_path.path} must be a non-empty mapping.{doc_ref}" 

-

627 ) 

-

628 delegate = self.delegate 

-

629 result = {} 

-

630 for package_name_raw, value in orig_value.items(): 

-

631 

-

632 definition_source = attribute_path[package_name_raw] 

-

633 package_name = package_name_raw 

-

634 if "{{" in package_name: 

-

635 package_name = parser_context.substitution.substitute( 

-

636 package_name_raw, 

-

637 definition_source.path, 

-

638 ) 

-

639 package_state: PackageTransformationDefinition 

-

640 with parser_context.binary_package_context(package_name) as package_state: 

-

641 if package_state.is_auto_generated_package: 641 ↛ 643line 641 didn't jump to line 643, because the condition on line 641 was never true

-

642 # Maybe lift (part) of this restriction. 

-

643 raise ManifestParseException( 

-

644 f'Cannot define rules for package "{package_name}" (at {definition_source.path}). It is an' 

-

645 " auto-generated package." 

-

646 ) 

-

647 parsed_value = delegate.parse_input( 

-

648 value, definition_source, parser_context=parser_context 

-

649 ) 

-

650 result[package_name_raw] = PackageContextData( 

-

651 package_name, parsed_value 

-

652 ) 

-

653 return result 

-

654 

-

655 

-

656class DispatchingTableParser( 

-

657 DispatchingParserBase[TP], 

-

658 DeclarativeInputParser[TP], 

-

659): 

-

660 def __init__(self, base_type: TTP, manifest_attribute_path_template: str) -> None: 

-

661 super().__init__(manifest_attribute_path_template) 

-

662 self.base_type = base_type 

-

663 

-

664 def parse_input( 

-

665 self, 

-

666 orig_value: object, 

-

667 attribute_path: "AttributePath", 

-

668 *, 

-

669 parser_context: "ParserContextData", 

-

670 ) -> TP: 

-

671 if isinstance(orig_value, str): 671 ↛ 672line 671 didn't jump to line 672, because the condition on line 671 was never true

-

672 key = orig_value 

-

673 value = None 

-

674 value_path = attribute_path 

-

675 elif isinstance(orig_value, dict): 675 ↛ 686line 675 didn't jump to line 686, because the condition on line 675 was never false

-

676 if len(orig_value) != 1: 676 ↛ 677line 676 didn't jump to line 677, because the condition on line 676 was never true

-

677 valid_keys = ", ".join(sorted(self._parsers.keys())) 

-

678 raise ManifestParseException( 

-

679 f'The mapping "{attribute_path.path}" had two keys, but it should only have one top level key.' 

-

680 " Maybe you are missing a list marker behind the second key or some indentation. The" 

-

681 f" possible keys are: {valid_keys}" 

-

682 ) 

-

683 key, value = next(iter(orig_value.items())) 

-

684 value_path = attribute_path[key] 

-

685 else: 

-

686 raise ManifestParseException( 

-

687 f"The attribute {attribute_path.path} must be a string or a mapping." 

-

688 ) 

-

689 provided_parser = self._parsers.get(key) 

-

690 if provided_parser is None: 690 ↛ 691line 690 didn't jump to line 691, because the condition on line 690 was never true

-

691 valid_keys = ", ".join(sorted(self._parsers.keys())) 

-

692 raise ManifestParseException( 

-

693 f'Unknown or unsupported action "{key}" at {value_path.path}.' 

-

694 " Valid actions at this location are:" 

-

695 f" {valid_keys}" 

-

696 ) 

-

697 return provided_parser.parse( 

-

698 key, value, value_path, parser_context=parser_context 

-

699 ) 

-

700 

-

701 

-

702@dataclasses.dataclass(slots=True) 

-

703class DeclarativeValuelessKeywordInputParser(DeclarativeInputParser[None]): 

-

704 inline_reference_documentation: Optional[ParserDocumentation] = None 

-

705 documentation_reference: Optional[str] = None 

-

706 

-

707 def parse_input( 

-

708 self, 

-

709 value: object, 

-

710 path: "AttributePath", 

-

711 *, 

-

712 parser_context: Optional["ParserContextData"] = None, 

-

713 ) -> TD: 

-

714 if value is None: 

-

715 return cast("TD", value) 

-

716 if self.documentation_reference is not None: 

-

717 doc_ref = f" (Documentation: {self.documentation_reference})" 

-

718 else: 

-

719 doc_ref = "" 

-

720 raise ManifestParseException( 

-

721 f"Expected attribute {path.path} to be a string.{doc_ref}" 

-

722 ) 

-

723 

-

724 

-

725SUPPORTED_DISPATCHABLE_TABLE_PARSERS = { 

-

726 InstallRule: "installations", 

-

727 TransformationRule: "packages.{{PACKAGE}}.transformations", 

-

728 DpkgMaintscriptHelperCommand: "packages.{{PACKAGE}}.conffile-management", 

-

729 ManifestCondition: "*.when", 

-

730} 

-

731 

-

732OPARSER_MANIFEST_ROOT = "<ROOT>" 

-

733OPARSER_PACKAGES_ROOT = "packages" 

-

734OPARSER_PACKAGES = "packages.{{PACKAGE}}" 

-

735OPARSER_MANIFEST_DEFINITIONS = "definitions" 

-

736 

-

737SUPPORTED_DISPATCHABLE_OBJECT_PARSERS = { 

-

738 OPARSER_MANIFEST_ROOT: reference_documentation( 

-

739 reference_documentation_url=f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md", 

-

740 ), 

-

741 OPARSER_MANIFEST_DEFINITIONS: reference_documentation( 

-

742 title="Packager provided definitions", 

-

743 description="Reusable packager provided definitions such as manifest variables.", 

-

744 reference_documentation_url=f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md#packager-provided-definitions", 

-

745 ), 

-

746 OPARSER_PACKAGES: reference_documentation( 

-

747 title="Binary package rules", 

-

748 description=textwrap.dedent( 

-

749 """\ 

-

750 Inside the manifest, the `packages` mapping can be used to define requests for the binary packages 

-

751 you want `debputy` to produce. Each key inside `packages` must be the name of a binary package 

-

752 defined in `debian/control`. The value is a dictionary defining which features that `debputy` 

-

753 should apply to that binary package. An example could be: 

-

754 

-

755 packages: 

-

756 foo: 

-

757 transformations: 

-

758 - create-symlink: 

-

759 path: usr/share/foo/my-first-symlink 

-

760 target: /usr/share/bar/symlink-target 

-

761 - create-symlink: 

-

762 path: usr/lib/{{DEB_HOST_MULTIARCH}}/my-second-symlink 

-

763 target: /usr/lib/{{DEB_HOST_MULTIARCH}}/baz/symlink-target 

-

764 bar: 

-

765 transformations: 

-

766 - create-directories: 

-

767 - some/empty/directory.d 

-

768 - another/empty/integration-point.d 

-

769 - create-directories: 

-

770 path: a/third-empty/directory.d 

-

771 owner: www-data 

-

772 group: www-data 

-

773 

-

774 In this case, `debputy` will create some symlinks inside the `foo` package and some directories for 

-

775 the `bar` package. The following subsections define the keys you can use under each binary package. 

-

776 """ 

-

777 ), 

-

778 reference_documentation_url=f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md#binary-package-rules", 

-

779 ), 

-

780} 

-

781 

-

782 

-

783@dataclasses.dataclass(slots=True) 

-

784class PluginProvidedManifestVariable: 

-

785 plugin_metadata: DebputyPluginMetadata 

-

786 variable_name: str 

-

787 variable_value: Optional[Union[str, Callable[[VariableContext], str]]] 

-

788 is_context_specific_variable: bool 

-

789 variable_reference_documentation: Optional[str] = None 

-

790 is_documentation_placeholder: bool = False 

-

791 is_for_special_case: bool = False 

-

792 

-

793 @property 

-

794 def is_internal(self) -> bool: 

-

795 return self.variable_name.startswith("_") or ":_" in self.variable_name 

-

796 

-

797 @property 

-

798 def is_token(self) -> bool: 

-

799 return self.variable_name.startswith("token:") 

-

800 

-

801 def resolve(self, variable_context: VariableContext) -> str: 

-

802 value_resolver = self.variable_value 

-

803 if isinstance(value_resolver, str): 

-

804 res = value_resolver 

-

805 else: 

-

806 res = value_resolver(variable_context) 

-

807 return res 

-

808 

-

809 

-

810@dataclasses.dataclass(slots=True, frozen=True) 

-

811class AutomaticDiscardRuleExample: 

-

812 content: Sequence[Tuple[PathDef, bool]] 

-

813 description: Optional[str] = None 

-

814 

-

815 

-

816def automatic_discard_rule_example( 

-

817 *content: Union[str, PathDef, Tuple[Union[str, PathDef], bool]], 

-

818 example_description: Optional[str] = None, 

-

819) -> AutomaticDiscardRuleExample: 

-

820 """Provide an example for an automatic discard rule 

-

821 

-

822 The return value of this method should be passed to the `examples` parameter of 

-

823 `automatic_discard_rule` method - either directly for a single example or as a 

-

824 part of a sequence of examples. 

-

825 

-

826 >>> # Possible example for an exclude rule for ".la" files 

-

827 >>> # Example shows two files; The ".la" file that will be removed and another file that 

-

828 >>> # will be kept. 

-

829 >>> automatic_discard_rule_example( # doctest: +ELLIPSIS 

-

830 ... "usr/lib/libfoo.la", 

-

831 ... ("usr/lib/libfoo.so.1.0.0", False), 

-

832 ... ) 

-

833 AutomaticDiscardRuleExample(...) 

-

834 

-

835 Keep in mind that you have to explicitly include directories that are relevant for the test 

-

836 if you want them shown. Also, if a directory is excluded, all path beneath it will be 

-

837 automatically excluded in the example as well. Your example data must account for that. 

-

838 

-

839 >>> # Possible example for python cache file discard rule 

-

840 >>> # In this example, we explicitly list the __pycache__ directory itself because we 

-

841 >>> # want it shown in the output (otherwise, we could have omitted it) 

-

842 >>> automatic_discard_rule_example( # doctest: +ELLIPSIS 

-

843 ... (".../foo.py", False), 

-

844 ... ".../__pycache__/", 

-

845 ... ".../__pycache__/...", 

-

846 ... ".../foo.pyc", 

-

847 ... ".../foo.pyo", 

-

848 ... ) 

-

849 AutomaticDiscardRuleExample(...) 

-

850 

-

851 Note: Even if `__pycache__` had been implicit, the result would have been the same. However, 

-

852 the rendered example would not have shown the directory on its own. The use of `...` as 

-

853 path names is useful for denoting "anywhere" or "anything". Though, there is nothing "magic" 

-

854 about this name - it happens to be allowed as a path name (unlike `.` or `..`). 

-

855 

-

856 These examples can be seen via `debputy plugin show automatic-discard-rules <name-here>`. 

-

857 

-

858 :param content: The content of the example. Each element can be either a path definition or 

-

859 a tuple of a path definition followed by a verdict (boolean). Each provided path definition 

-

860 describes the paths to be presented in the example. Implicit paths such as parent 

-

861 directories will be created but not shown in the example. Therefore, if a directory is 

-

862 relevant to the example, be sure to explicitly list it. 

-

863 

-

864 The verdict associated with a path determines whether the path should be discarded (when 

-

865 True) or kept (when False). When a path is not explicitly associated with a verdict, the 

-

866 verdict is assumed to be discarded (True). 

-

867 :param example_description: An optional description displayed together with the example. 

-

868 :return: An opaque data structure containing the example. 

-

869 """ 

-

870 example = [] 

-

871 for d in content: 

-

872 if not isinstance(d, tuple): 

-

873 pd = d 

-

874 verdict = True 

-

875 else: 

-

876 pd, verdict = d 

-

877 

-

878 path_def = as_path_def(pd) 

-

879 example.append((path_def, verdict)) 

-

880 

-

881 if not example: 881 ↛ 882line 881 didn't jump to line 882, because the condition on line 881 was never true

-

882 raise ValueError("At least one path must be given for an example") 

-

883 

-

884 return AutomaticDiscardRuleExample( 

-

885 tuple(example), 

-

886 description=example_description, 

-

887 ) 

-

888 

-

889 

-

890@dataclasses.dataclass(slots=True, frozen=True) 

-

891class PluginProvidedPackageProcessor: 

-

892 processor_id: str 

-

893 applies_to_package_types: FrozenSet[str] 

-

894 package_processor: PackageProcessor 

-

895 dependencies: FrozenSet[Tuple[str, str]] 

-

896 plugin_metadata: DebputyPluginMetadata 

-

897 

-

898 def applies_to(self, binary_package: BinaryPackage) -> bool: 

-

899 return binary_package.package_type in self.applies_to_package_types 

-

900 

-

901 @property 

-

902 def dependency_id(self) -> Tuple[str, str]: 

-

903 return self.plugin_metadata.plugin_name, self.processor_id 

-

904 

-

905 def run_package_processor( 

-

906 self, 

-

907 fs_root: "VirtualPath", 

-

908 unused: None, 

-

909 context: "PackageProcessingContext", 

-

910 ) -> None: 

-

911 self.package_processor(fs_root, unused, context) 

-

912 

-

913 

-

914@dataclasses.dataclass(slots=True, frozen=True) 

-

915class PluginProvidedDiscardRule: 

-

916 name: str 

-

917 plugin_metadata: DebputyPluginMetadata 

-

918 discard_check: Callable[[VirtualPath], bool] 

-

919 reference_documentation: Optional[str] 

-

920 examples: Sequence[AutomaticDiscardRuleExample] = tuple() 

-

921 

-

922 def should_discard(self, path: VirtualPath) -> bool: 

-

923 return self.discard_check(path) 

-

924 

-

925 

-

926@dataclasses.dataclass(slots=True, frozen=True) 

-

927class ServiceManagerDetails: 

-

928 service_manager: str 

-

929 service_detector: "ServiceDetector" 

-

930 service_integrator: "ServiceIntegrator" 

-

931 plugin_metadata: DebputyPluginMetadata 

-

932 

-

933 

-

934ReferenceValue = TypedDict( 

-

935 "ReferenceValue", 

-

936 { 

-

937 "description": str, 

-

938 }, 

-

939) 

-

940 

-

941 

-

942def _reference_data_value( 

-

943 *, 

-

944 description: str, 

-

945) -> ReferenceValue: 

-

946 return { 

-

947 "description": description, 

-

948 } 

-

949 

-

950 

-

951KnownPackagingFileCategories = Literal[ 

-

952 "generated", 

-

953 "generic-template", 

-

954 "ppf-file", 

-

955 "ppf-control-file", 

-

956 "maint-config", 

-

957 "pkg-metadata", 

-

958 "pkg-helper-config", 

-

959 "testing", 

-

960 "lint-config", 

-

961] 

-

962KNOWN_PACKAGING_FILE_CATEGORY_DESCRIPTIONS: Mapping[ 

-

963 KnownPackagingFileCategories, ReferenceValue 

-

964] = { 

-

965 "generated": _reference_data_value( 

-

966 description="The file is (likely) generated from another file" 

-

967 ), 

-

968 "generic-template": _reference_data_value( 

-

969 description="The file is (likely) a generic template that generates a known packaging file. While the" 

-

970 " file is annotated as if it was the target file, the file might uses a custom template" 

-

971 " language inside it." 

-

972 ), 

-

973 "ppf-file": _reference_data_value( 

-

974 description="Packager provided file to be installed on the file system - usually as-is." 

-

975 " When `install-pattern` or `install-path` are provided, this is where the file is installed." 

-

976 ), 

-

977 "ppf-control-file": _reference_data_value( 

-

978 description="Packager provided file that becomes a control file - possible after processing. " 

-

979 " If `install-pattern` or `install-path` are provided, they denote where the is placed" 

-

980 " (generally, this will be of the form `DEBIAN/<name>`)" 

-

981 ), 

-

982 "maint-config": _reference_data_value( 

-

983 description="Maintenance configuration for a specific tool that the maintainer uses (tool / style preferences)" 

-

984 ), 

-

985 "pkg-metadata": _reference_data_value( 

-

986 description="The file is related to standard package metadata (usually documented in Debian Policy)" 

-

987 ), 

-

988 "pkg-helper-config": _reference_data_value( 

-

989 description="The file is packaging helper configuration or instruction file" 

-

990 ), 

-

991 "testing": _reference_data_value( 

-

992 description="The file is related to automated testing (autopkgtests, salsa/gitlab CI)." 

-

993 ), 

-

994 "lint-config": _reference_data_value( 

-

995 description="The file is related to a linter (such as overrides for false-positives or style preferences)" 

-

996 ), 

-

997} 

-

998 

-

999KnownPackagingConfigFeature = Literal[ 

-

1000 "dh-filearray", 

-

1001 "dh-filedoublearray", 

-

1002 "dh-hash-subst", 

-

1003 "dh-dollar-subst", 

-

1004 "dh-glob", 

-

1005 "dh-partial-glob", 

-

1006 "dh-late-glob", 

-

1007 "dh-glob-after-execute", 

-

1008 "dh-executable-config", 

-

1009 "dh-custom-format", 

-

1010 "dh-file-list", 

-

1011 "dh-install-list", 

-

1012 "dh-install-list-dest-dir-like-dh_install", 

-

1013 "dh-install-list-fixed-dest-dir", 

-

1014 "dh-fixed-dest-dir", 

-

1015 "dh-exec-rename", 

-

1016 "dh-docs-only", 

-

1017] 

-

1018 

-

1019KNOWN_PACKAGING_FILE_CONFIG_FEATURE_DESCRIPTION: Mapping[ 

-

1020 KnownPackagingConfigFeature, ReferenceValue 

-

1021] = { 

-

1022 "dh-filearray": _reference_data_value( 

-

1023 description="The file will be read as a list of space/newline separated tokens", 

-

1024 ), 

-

1025 "dh-filedoublearray": _reference_data_value( 

-

1026 description="Each line in the file will be read as a list of space-separated tokens", 

-

1027 ), 

-

1028 "dh-hash-subst": _reference_data_value( 

-

1029 description="Supports debhelper #PACKAGE# style substitutions (udebs often excluded)", 

-

1030 ), 

-

1031 "dh-dollar-subst": _reference_data_value( 

-

1032 description="Supports debhelper ${PACKAGE} style substitutions (usually requires compat 13+)", 

-

1033 ), 

-

1034 "dh-glob": _reference_data_value( 

-

1035 description="Supports standard debhelper globing", 

-

1036 ), 

-

1037 "dh-partial-glob": _reference_data_value( 

-

1038 description="Supports standard debhelper globing but only to a subset of the values (implies dh-late-glob)", 

-

1039 ), 

-

1040 "dh-late-glob": _reference_data_value( 

-

1041 description="Globbing is done separately instead of using the built-in function", 

-

1042 ), 

-

1043 "dh-glob-after-execute": _reference_data_value( 

-

1044 description="When the dh config file is executable, the generated output will be subject to globbing", 

-

1045 ), 

-

1046 "dh-executable-config": _reference_data_value( 

-

1047 description="If marked executable, debhelper will execute the file and read its output", 

-

1048 ), 

-

1049 "dh-custom-format": _reference_data_value( 

-

1050 description="The dh tool will or may have a custom parser for this file", 

-

1051 ), 

-

1052 "dh-file-list": _reference_data_value( 

-

1053 description="The dh file contains a list of paths to be processed", 

-

1054 ), 

-

1055 "dh-install-list": _reference_data_value( 

-

1056 description="The dh file contains a list of paths/globs to be installed but the tool specific knowledge" 

-

1057 " required to understand the file cannot be conveyed via this interface.", 

-

1058 ), 

-

1059 "dh-install-list-dest-dir-like-dh_install": _reference_data_value( 

-

1060 description="The dh file is processed similar to dh_install (notably dest-dir handling derived" 

-

1061 " from the path or the last token on the line)", 

-

1062 ), 

-

1063 "dh-install-list-fixed-dest-dir": _reference_data_value( 

-

1064 description="The dh file is an install list and the dest-dir is always the same for all patterns" 

-

1065 " (when `install-pattern` or `install-path` are provided, they identify the directory - not the file location)", 

-

1066 ), 

-

1067 "dh-exec-rename": _reference_data_value( 

-

1068 description="When `dh-exec` is the interpreter of this dh config file, its renaming (=>) feature can be" 

-

1069 " requested/used", 

-

1070 ), 

-

1071 "dh-docs-only": _reference_data_value( 

-

1072 description="The dh config file is used for documentation only. Implicit <!nodocs> Build-Profiles support", 

-

1073 ), 

-

1074} 

-

1075 

-

1076CONFIG_FEATURE_ALIASES: Dict[ 

-

1077 KnownPackagingConfigFeature, List[Tuple[KnownPackagingConfigFeature, int]] 

-

1078] = { 

-

1079 "dh-filearray": [ 

-

1080 ("dh-filearray", 0), 

-

1081 ("dh-executable-config", 9), 

-

1082 ("dh-dollar-subst", 13), 

-

1083 ], 

-

1084 "dh-filedoublearray": [ 

-

1085 ("dh-filedoublearray", 0), 

-

1086 ("dh-executable-config", 9), 

-

1087 ("dh-dollar-subst", 13), 

-

1088 ], 

-

1089} 

-

1090 

-

1091 

-

1092def _implies( 

-

1093 features: List[KnownPackagingConfigFeature], 

-

1094 seen: Set[KnownPackagingConfigFeature], 

-

1095 implying: Sequence[KnownPackagingConfigFeature], 

-

1096 implied: KnownPackagingConfigFeature, 

-

1097) -> None: 

-

1098 if implied in seen: 

-

1099 return 

-

1100 if all(f in seen for f in implying): 

-

1101 seen.add(implied) 

-

1102 features.append(implied) 

-

1103 

-

1104 

-

1105def expand_known_packaging_config_features( 

-

1106 compat_level: int, 

-

1107 features: List[KnownPackagingConfigFeature], 

-

1108) -> List[KnownPackagingConfigFeature]: 

-

1109 final_features: List[KnownPackagingConfigFeature] = [] 

-

1110 seen = set() 

-

1111 for feature in features: 

-

1112 expanded = CONFIG_FEATURE_ALIASES.get(feature) 

-

1113 if not expanded: 

-

1114 expanded = [(feature, 0)] 

-

1115 for v, c in expanded: 

-

1116 if compat_level < c or v in seen: 

-

1117 continue 

-

1118 seen.add(v) 

-

1119 final_features.append(v) 

-

1120 if "dh-glob" in seen and "dh-late-glob" in seen: 

-

1121 final_features.remove("dh-glob") 

-

1122 

-

1123 _implies(final_features, seen, ["dh-partial-glob"], "dh-late-glob") 

-

1124 _implies( 

-

1125 final_features, 

-

1126 seen, 

-

1127 ["dh-late-glob", "dh-executable-config"], 

-

1128 "dh-glob-after-execute", 

-

1129 ) 

-

1130 return sorted(final_features) 

-

1131 

-

1132 

-

1133class InstallPatternDHCompatRule(DebputyParsedContent): 

-

1134 install_pattern: NotRequired[str] 

-

1135 add_config_features: NotRequired[List[KnownPackagingConfigFeature]] 

-

1136 starting_with_compat_level: NotRequired[int] 

-

1137 

-

1138 

-

1139class KnownPackagingFileInfo(DebputyParsedContent): 

-

1140 # Exposed directly in the JSON plugin parsing; be careful with changes 

-

1141 path: NotRequired[str] 

-

1142 pkgfile: NotRequired[str] 

-

1143 detection_method: NotRequired[Literal["path", "dh.pkgfile"]] 

-

1144 file_categories: NotRequired[List[KnownPackagingFileCategories]] 

-

1145 documentation_uris: NotRequired[List[str]] 

-

1146 debputy_cmd_templates: NotRequired[List[List[str]]] 

-

1147 debhelper_commands: NotRequired[List[str]] 

-

1148 config_features: NotRequired[List[KnownPackagingConfigFeature]] 

-

1149 install_pattern: NotRequired[str] 

-

1150 dh_compat_rules: NotRequired[List[InstallPatternDHCompatRule]] 

-

1151 default_priority: NotRequired[int] 

-

1152 post_formatting_rewrite: NotRequired[Literal["period-to-underscore"]] 

-

1153 packageless_is_fallback_for_all_packages: NotRequired[bool] 

-

1154 

-

1155 

-

1156@dataclasses.dataclass(slots=True) 

-

1157class PluginProvidedKnownPackagingFile: 

-

1158 info: KnownPackagingFileInfo 

-

1159 detection_method: Literal["path", "dh.pkgfile"] 

-

1160 detection_value: str 

-

1161 plugin_metadata: DebputyPluginMetadata 

-

1162 

-

1163 

-

1164@dataclasses.dataclass(slots=True, frozen=True) 

-

1165class PluginProvidedTypeMapping: 

-

1166 mapped_type: TypeMapping[Any, Any] 

-

1167 reference_documentation: Optional[TypeMappingDocumentation] 

-

1168 plugin_metadata: DebputyPluginMetadata 

-

1169 

-

1170 

-

1171class PackageDataTable: 

-

1172 def __init__(self, package_data_table: Mapping[str, "BinaryPackageData"]) -> None: 

-

1173 self._package_data_table = package_data_table 

-

1174 # This is enabled for metadata-detectors. But it is deliberate not enabled for package processors, 

-

1175 # because it is not clear how it should interact with dependencies. For metadata-detectors, things 

-

1176 # read-only and there are no dependencies, so we cannot "get them wrong". 

-

1177 self.enable_cross_package_checks = False 

-

1178 

-

1179 def __iter__(self) -> Iterator["BinaryPackageData"]: 

-

1180 return iter(self._package_data_table.values()) 

-

1181 

-

1182 def __getitem__(self, item: str) -> "BinaryPackageData": 

-

1183 return self._package_data_table[item] 

-

1184 

-

1185 def __contains__(self, item: str) -> bool: 

-

1186 return item in self._package_data_table 

-

1187 

-

1188 

-

1189class PackageProcessingContextProvider(PackageProcessingContext): 

-

1190 __slots__ = ( 

-

1191 "_manifest", 

-

1192 "_binary_package", 

-

1193 "_related_udeb_package", 

-

1194 "_package_data_table", 

-

1195 "_cross_check_cache", 

-

1196 ) 

-

1197 

-

1198 def __init__( 

-

1199 self, 

-

1200 manifest: "HighLevelManifest", 

-

1201 binary_package: BinaryPackage, 

-

1202 related_udeb_package: Optional[BinaryPackage], 

-

1203 package_data_table: PackageDataTable, 

-

1204 ) -> None: 

-

1205 self._manifest = manifest 

-

1206 self._binary_package = binary_package 

-

1207 self._related_udeb_package = related_udeb_package 

-

1208 self._package_data_table = ref(package_data_table) 

-

1209 self._cross_check_cache: Optional[ 

-

1210 Sequence[Tuple[BinaryPackage, "VirtualPath"]] 

-

1211 ] = None 

-

1212 

-

1213 def _package_state_for( 

-

1214 self, 

-

1215 package: BinaryPackage, 

-

1216 ) -> "PackageTransformationDefinition": 

-

1217 return self._manifest.package_state_for(package.name) 

-

1218 

-

1219 def _package_version_for( 

-

1220 self, 

-

1221 package: BinaryPackage, 

-

1222 ) -> str: 

-

1223 package_state = self._package_state_for(package) 

-

1224 version = package_state.binary_version 

-

1225 if version is not None: 

-

1226 return version 

-

1227 return self._manifest.source_version( 

-

1228 include_binnmu_version=not package.is_arch_all 

-

1229 ) 

-

1230 

-

1231 @property 

-

1232 def binary_package(self) -> BinaryPackage: 

-

1233 return self._binary_package 

-

1234 

-

1235 @property 

-

1236 def related_udeb_package(self) -> Optional[BinaryPackage]: 

-

1237 return self._related_udeb_package 

-

1238 

-

1239 @property 

-

1240 def binary_package_version(self) -> str: 

-

1241 return self._package_version_for(self._binary_package) 

-

1242 

-

1243 @property 

-

1244 def related_udeb_package_version(self) -> Optional[str]: 

-

1245 udeb = self._related_udeb_package 

-

1246 if udeb is None: 

-

1247 return None 

-

1248 return self._package_version_for(udeb) 

-

1249 

-

1250 def accessible_package_roots(self) -> Iterable[Tuple[BinaryPackage, "VirtualPath"]]: 

-

1251 package_table = self._package_data_table() 

-

1252 if package_table is None: 

-

1253 raise ReferenceError( 

-

1254 "Internal error: package_table was garbage collected too early" 

-

1255 ) 

-

1256 if not package_table.enable_cross_package_checks: 

-

1257 raise PluginAPIViolationError( 

-

1258 "Cross package content checks are not available at this time." 

-

1259 ) 

-

1260 cache = self._cross_check_cache 

-

1261 if cache is None: 

-

1262 matches = [] 

-

1263 pkg = self.binary_package 

-

1264 for pkg_data in package_table: 

-

1265 if pkg_data.binary_package.name == pkg.name: 

-

1266 continue 

-

1267 res = package_cross_check_precheck(pkg, pkg_data.binary_package) 

-

1268 if not res[0]: 

-

1269 continue 

-

1270 matches.append((pkg_data.binary_package, pkg_data.fs_root)) 

-

1271 cache = tuple(matches) if matches else tuple() 

-

1272 self._cross_check_cache = cache 

-

1273 return cache 

-

1274 

-

1275 

-

1276@dataclasses.dataclass(slots=True, frozen=True) 

-

1277class PluginProvidedTrigger: 

-

1278 dpkg_trigger_type: DpkgTriggerType 

-

1279 dpkg_trigger_target: str 

-

1280 provider: DebputyPluginMetadata 

-

1281 provider_source_id: str 

-

1282 

-

1283 def serialized_format(self) -> str: 

-

1284 return f"{self.dpkg_trigger_type} {self.dpkg_trigger_target}" 

-
- - - diff --git a/coverage-report/d_64287305fe0c6642_plugin_parser_py.html b/coverage-report/d_64287305fe0c6642_plugin_parser_py.html deleted file mode 100644 index e9a7713..0000000 --- a/coverage-report/d_64287305fe0c6642_plugin_parser_py.html +++ /dev/null @@ -1,165 +0,0 @@ - - - - - Coverage for src/debputy/plugin/api/plugin_parser.py: 100% - - - - - -
-
-

- Coverage for src/debputy/plugin/api/plugin_parser.py: - 100% -

- -

- 35 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from typing import NotRequired, List, Any, TypedDict 

-

2 

-

3from debputy.manifest_parser.base_types import ( 

-

4 DebputyParsedContent, 

-

5 OctalMode, 

-

6 TypeMapping, 

-

7) 

-

8from debputy.manifest_parser.declarative_parser import ParserGenerator 

-

9from debputy.plugin.api.impl_types import KnownPackagingFileInfo 

-

10 

-

11 

-

12class PPFReferenceDocumentation(TypedDict): 

-

13 description: NotRequired[str] 

-

14 format_documentation_uris: NotRequired[List[str]] 

-

15 

-

16 

-

17class PackagerProvidedFileJsonDescription(DebputyParsedContent): 

-

18 stem: str 

-

19 installed_path: str 

-

20 default_mode: NotRequired[OctalMode] 

-

21 default_priority: NotRequired[int] 

-

22 allow_name_segment: NotRequired[bool] 

-

23 allow_architecture_segment: NotRequired[bool] 

-

24 reference_documentation: NotRequired[PPFReferenceDocumentation] 

-

25 

-

26 

-

27class ManifestVariableJsonDescription(DebputyParsedContent): 

-

28 name: str 

-

29 value: str 

-

30 reference_documentation: NotRequired[str] 

-

31 

-

32 

-

33class PluginJsonMetadata(DebputyParsedContent): 

-

34 api_compat_version: int 

-

35 module: NotRequired[str] 

-

36 plugin_initializer: NotRequired[str] 

-

37 packager_provided_files: NotRequired[List[Any]] 

-

38 manifest_variables: NotRequired[List[Any]] 

-

39 known_packaging_files: NotRequired[List[Any]] 

-

40 

-

41 

-

42def _initialize_plugin_metadata_parser_generator() -> ParserGenerator: 

-

43 pc = ParserGenerator() 

-

44 pc.register_mapped_type( 

-

45 TypeMapping( 

-

46 OctalMode, 

-

47 str, 

-

48 lambda v, ap, _: OctalMode.parse_filesystem_mode(v, ap), 

-

49 ) 

-

50 ) 

-

51 return pc 

-

52 

-

53 

-

54PLUGIN_METADATA_PARSER_GENERATOR = _initialize_plugin_metadata_parser_generator() 

-

55PLUGIN_METADATA_PARSER = PLUGIN_METADATA_PARSER_GENERATOR.generate_parser( 

-

56 PluginJsonMetadata 

-

57) 

-

58PLUGIN_PPF_PARSER = PLUGIN_METADATA_PARSER_GENERATOR.generate_parser( 

-

59 PackagerProvidedFileJsonDescription 

-

60) 

-

61PLUGIN_MANIFEST_VARS_PARSER = PLUGIN_METADATA_PARSER_GENERATOR.generate_parser( 

-

62 ManifestVariableJsonDescription 

-

63) 

-

64PLUGIN_KNOWN_PACKAGING_FILES_PARSER = PLUGIN_METADATA_PARSER_GENERATOR.generate_parser( 

-

65 KnownPackagingFileInfo 

-

66) 

-
- - - diff --git a/coverage-report/d_64287305fe0c6642_spec_py.html b/coverage-report/d_64287305fe0c6642_spec_py.html deleted file mode 100644 index 700a493..0000000 --- a/coverage-report/d_64287305fe0c6642_spec_py.html +++ /dev/null @@ -1,1842 +0,0 @@ - - - - - Coverage for src/debputy/plugin/api/spec.py: 87% - - - - - -
-
-

- Coverage for src/debputy/plugin/api/spec.py: - 87% -

- -

- 282 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import contextlib 

-

2import dataclasses 

-

3import os 

-

4import tempfile 

-

5import textwrap 

-

6from typing import ( 

-

7 Iterable, 

-

8 Optional, 

-

9 Callable, 

-

10 Literal, 

-

11 Union, 

-

12 Iterator, 

-

13 overload, 

-

14 FrozenSet, 

-

15 Sequence, 

-

16 TypeVar, 

-

17 Any, 

-

18 TYPE_CHECKING, 

-

19 TextIO, 

-

20 BinaryIO, 

-

21 Generic, 

-

22 ContextManager, 

-

23 List, 

-

24 Type, 

-

25 Tuple, 

-

26) 

-

27 

-

28from debian.substvars import Substvars 

-

29 

-

30from debputy import util 

-

31from debputy.exceptions import TestPathWithNonExistentFSPathError, PureVirtualPathError 

-

32from debputy.interpreter import Interpreter, extract_shebang_interpreter_from_file 

-

33from debputy.manifest_parser.util import parse_symbolic_mode 

-

34from debputy.packages import BinaryPackage 

-

35from debputy.types import S 

-

36 

-

37if TYPE_CHECKING: 

-

38 from debputy.manifest_parser.base_types import ( 

-

39 StaticFileSystemOwner, 

-

40 StaticFileSystemGroup, 

-

41 ) 

-

42 

-

43 

-

44PluginInitializationEntryPoint = Callable[["DebputyPluginInitializer"], None] 

-

45MetadataAutoDetector = Callable[ 

-

46 ["VirtualPath", "BinaryCtrlAccessor", "PackageProcessingContext"], None 

-

47] 

-

48PackageProcessor = Callable[["VirtualPath", None, "PackageProcessingContext"], None] 

-

49DpkgTriggerType = Literal[ 

-

50 "activate", 

-

51 "activate-await", 

-

52 "activate-noawait", 

-

53 "interest", 

-

54 "interest-await", 

-

55 "interest-noawait", 

-

56] 

-

57Maintscript = Literal["postinst", "preinst", "prerm", "postrm"] 

-

58PackageTypeSelector = Union[Literal["deb", "udeb"], Iterable[Literal["deb", "udeb"]]] 

-

59ServiceUpgradeRule = Literal[ 

-

60 "do-nothing", 

-

61 "reload", 

-

62 "restart", 

-

63 "stop-then-start", 

-

64] 

-

65 

-

66DSD = TypeVar("DSD") 

-

67ServiceDetector = Callable[ 

-

68 ["VirtualPath", "ServiceRegistry[DSD]", "PackageProcessingContext"], 

-

69 None, 

-

70] 

-

71ServiceIntegrator = Callable[ 

-

72 [ 

-

73 Sequence["ServiceDefinition[DSD]"], 

-

74 "BinaryCtrlAccessor", 

-

75 "PackageProcessingContext", 

-

76 ], 

-

77 None, 

-

78] 

-

79 

-

80PMT = TypeVar("PMT") 

-

81 

-

82 

-

83@dataclasses.dataclass(slots=True, frozen=True) 

-

84class PackagerProvidedFileReferenceDocumentation: 

-

85 description: Optional[str] = None 

-

86 format_documentation_uris: Sequence[str] = tuple() 

-

87 

-

88 def replace(self, **changes: Any) -> "PackagerProvidedFileReferenceDocumentation": 

-

89 return dataclasses.replace(self, **changes) 

-

90 

-

91 

-

92def packager_provided_file_reference_documentation( 

-

93 *, 

-

94 description: Optional[str] = None, 

-

95 format_documentation_uris: Optional[Sequence[str]] = tuple(), 

-

96) -> PackagerProvidedFileReferenceDocumentation: 

-

97 """Provide documentation for a given packager provided file. 

-

98 

-

99 :param description: Textual description presented to the user. 

-

100 :param format_documentation_uris: A sequence of URIs to documentation that describes 

-

101 the format of the file. Most relevant first. 

-

102 :return: 

-

103 """ 

-

104 uris = tuple(format_documentation_uris) if format_documentation_uris else tuple() 

-

105 return PackagerProvidedFileReferenceDocumentation( 

-

106 description=description, 

-

107 format_documentation_uris=uris, 

-

108 ) 

-

109 

-

110 

-

111class PathMetadataReference(Generic[PMT]): 

-

112 """An accessor to plugin provided metadata 

-

113 

-

114 This is a *short-lived* reference to a piece of metadata. It should *not* be stored beyond 

-

115 the boundaries of the current plugin execution context as it can be become invalid (as an 

-

116 example, if the path associated with this path is removed, then this reference become invalid) 

-

117 """ 

-

118 

-

119 @property 

-

120 def is_present(self) -> bool: 

-

121 """Determine whether the value has been set 

-

122 

-

123 If the current plugin cannot access the value, then this method unconditionally returns 

-

124 `False` regardless of whether the value is there. 

-

125 

-

126 :return: `True` if the value has been set to a not None value (and not been deleted). 

-

127 Otherwise, this property is `False`. 

-

128 """ 

-

129 raise NotImplementedError 

-

130 

-

131 @property 

-

132 def can_read(self) -> bool: 

-

133 """Test whether it is possible to read the metadata 

-

134 

-

135 Note: That the metadata being readable does *not* imply that the metadata is present. 

-

136 

-

137 :return: True if it is possible to read the metadata. This is always True for the 

-

138 owning plugin. 

-

139 """ 

-

140 raise NotImplementedError 

-

141 

-

142 @property 

-

143 def can_write(self) -> bool: 

-

144 """Test whether it is possible to update the metadata 

-

145 

-

146 :return: True if it is possible to update the metadata. 

-

147 """ 

-

148 raise NotImplementedError 

-

149 

-

150 @property 

-

151 def value(self) -> Optional[PMT]: 

-

152 """Fetch the currently stored value if present. 

-

153 

-

154 :return: The value previously stored if any. Returns `None` if the value was never 

-

155 stored, explicitly set to `None` or was deleted. 

-

156 """ 

-

157 raise NotImplementedError 

-

158 

-

159 @value.setter 

-

160 def value(self, value: Optional[PMT]) -> None: 

-

161 """Replace any current value with the provided value 

-

162 

-

163 This operation is only possible if the path is writable *and* the caller is from 

-

164 the owning plugin OR the owning plugin made the reference read-write. 

-

165 """ 

-

166 raise NotImplementedError 

-

167 

-

168 @value.deleter 

-

169 def value(self) -> None: 

-

170 """Delete any current value. 

-

171 

-

172 This has the same effect as setting the value to `None`. It has the same restrictions 

-

173 as the value setter. 

-

174 """ 

-

175 self.value = None 

-

176 

-

177 

-

178@dataclasses.dataclass(slots=True) 

-

179class PathDef: 

-

180 path_name: str 

-

181 mode: Optional[int] = None 

-

182 mtime: Optional[int] = None 

-

183 has_fs_path: Optional[bool] = None 

-

184 fs_path: Optional[str] = None 

-

185 link_target: Optional[str] = None 

-

186 content: Optional[str] = None 

-

187 materialized_content: Optional[str] = None 

-

188 

-

189 

-

190def virtual_path_def( 

-

191 path_name: str, 

-

192 /, 

-

193 mode: Optional[int] = None, 

-

194 mtime: Optional[int] = None, 

-

195 fs_path: Optional[str] = None, 

-

196 link_target: Optional[str] = None, 

-

197 content: Optional[str] = None, 

-

198 materialized_content: Optional[str] = None, 

-

199) -> PathDef: 

-

200 """Define a virtual path for use with examples or, in tests, `build_virtual_file_system` 

-

201 

-

202 :param path_name: The full path. Must start with "./". If it ends with "/", the path will be interpreted 

-

203 as a directory (the `is_dir` attribute will be True). Otherwise, it will be a symlink or file depending 

-

204 on whether a `link_target` is provided. 

-

205 :param mode: The mode to use for this path. Defaults to 0644 for files and 0755 for directories. The mode 

-

206 should be None for symlinks. 

-

207 :param mtime: Define the last modified time for this path. If not provided, debputy will provide a default 

-

208 if the mtime attribute is accessed. 

-

209 :param fs_path: Define a file system path for this path. This causes `has_fs_path` to return True and the 

-

210 `fs_path` attribute will return this value. The test is required to make this path available to the extent 

-

211 required. Note that the virtual file system will *not* examine the provided path in any way nor attempt 

-

212 to resolve defaults from the path. 

-

213 :param link_target: A target for the symlink. Providing a not None value for this parameter will make the 

-

214 path a symlink. 

-

215 :param content: The content of the path (if opened). The path must be a file. 

-

216 :param materialized_content: Same as `content` except `debputy` will put the contents into a physical file 

-

217 as needed. Cannot be used with `content` or `fs_path`. 

-

218 :return: An *opaque* object to be passed to `build_virtual_file_system`. While the exact type is provided 

-

219 to aid with typing, the type name and its behaviour is not part of the API. 

-

220 """ 

-

221 

-

222 is_dir = path_name.endswith("/") 

-

223 is_symlink = link_target is not None 

-

224 

-

225 if is_symlink: 

-

226 if mode is not None: 

-

227 raise ValueError( 

-

228 f'Please do not provide mode for symlinks. Triggered by "{path_name}"' 

-

229 ) 

-

230 if is_dir: 

-

231 raise ValueError( 

-

232 "Path name looks like a directory, but a symlink target was also provided." 

-

233 f' Please remove the trailing slash OR the symlink_target. Triggered by "{path_name}"' 

-

234 ) 

-

235 

-

236 if content and (is_dir or is_symlink): 236 ↛ 237line 236 didn't jump to line 237, because the condition on line 236 was never true

-

237 raise ValueError( 

-

238 "Content was defined however, the path appears to be a directory a or a symlink" 

-

239 f' Please remove the content, the trailing slash OR the symlink_target. Triggered by "{path_name}"' 

-

240 ) 

-

241 

-

242 if materialized_content is not None: 

-

243 if content is not None: 243 ↛ 244line 243 didn't jump to line 244, because the condition on line 243 was never true

-

244 raise ValueError( 

-

245 "The materialized_content keyword is mutually exclusive with the content keyword." 

-

246 f' Triggered by "{path_name}"' 

-

247 ) 

-

248 if fs_path is not None: 248 ↛ 249line 248 didn't jump to line 249, because the condition on line 248 was never true

-

249 raise ValueError( 

-

250 "The materialized_content keyword is mutually exclusive with the fs_path keyword." 

-

251 f' Triggered by "{path_name}"' 

-

252 ) 

-

253 return PathDef( 

-

254 path_name, 

-

255 mode=mode, 

-

256 mtime=mtime, 

-

257 has_fs_path=bool(fs_path) or materialized_content is not None, 

-

258 fs_path=fs_path, 

-

259 link_target=link_target, 

-

260 content=content, 

-

261 materialized_content=materialized_content, 

-

262 ) 

-

263 

-

264 

-

265class PackageProcessingContext: 

-

266 """Context for auto-detectors of metadata and package processors (no instantiation) 

-

267 

-

268 This object holds some context related data for the metadata detector or/and package 

-

269 processors. It may receive new attributes in the future. 

-

270 """ 

-

271 

-

272 __slots__ = () 

-

273 

-

274 @property 

-

275 def binary_package(self) -> BinaryPackage: 

-

276 """The binary package stanza from `debian/control`""" 

-

277 raise NotImplementedError 

-

278 

-

279 @property 

-

280 def binary_package_version(self) -> str: 

-

281 """The version of the binary package 

-

282 

-

283 Note this never includes the binNMU version for arch:all packages, but it may for arch:any. 

-

284 """ 

-

285 raise NotImplementedError 

-

286 

-

287 @property 

-

288 def related_udeb_package(self) -> Optional[BinaryPackage]: 

-

289 """An udeb related to this binary package (if any)""" 

-

290 raise NotImplementedError 

-

291 

-

292 @property 

-

293 def related_udeb_package_version(self) -> Optional[str]: 

-

294 """The version of the related udeb package (if present) 

-

295 

-

296 Note this never includes the binNMU version for arch:all packages, but it may for arch:any. 

-

297 """ 

-

298 raise NotImplementedError 

-

299 

-

300 def accessible_package_roots(self) -> Iterable[Tuple[BinaryPackage, "VirtualPath"]]: 

-

301 raise NotImplementedError 

-

302 

-

303 # """The source package stanza from `debian/control`""" 

-

304 # source_package: SourcePackage 

-

305 

-

306 

-

307class DebputyPluginInitializer: 

-

308 __slots__ = () 

-

309 

-

310 def packager_provided_file( 

-

311 self, 

-

312 stem: str, 

-

313 installed_path: str, 

-

314 *, 

-

315 default_mode: int = 0o0644, 

-

316 default_priority: Optional[int] = None, 

-

317 allow_name_segment: bool = True, 

-

318 allow_architecture_segment: bool = False, 

-

319 post_formatting_rewrite: Optional[Callable[[str], str]] = None, 

-

320 packageless_is_fallback_for_all_packages: bool = False, 

-

321 reservation_only: bool = False, 

-

322 reference_documentation: Optional[ 

-

323 PackagerProvidedFileReferenceDocumentation 

-

324 ] = None, 

-

325 ) -> None: 

-

326 """Register a packager provided file (debian/<pkg>.foo) 

-

327 

-

328 Register a packager provided file that debputy should automatically detect and install for the 

-

329 packager (example `debian/foo.tmpfiles` -> `debian/foo/usr/lib/tmpfiles.d/foo.conf`). A packager 

-

330 provided file typically identified by a package prefix and a "stem" and by convention placed 

-

331 in the `debian/` directory. 

-

332 

-

333 Like debhelper, debputy also supports the `foo.bar.tmpfiles` variant where the file is to be 

-

334 installed into the `foo` package but be named after the `bar` segment rather than the package name. 

-

335 This feature can be controlled via the `allow_name_segment` parameter. 

-

336 

-

337 :param stem: The "stem" of the file. This would be the `tmpfiles` part of `debian/foo.tmpfiles`. 

-

338 Note that this value must be unique across all registered packager provided files. 

-

339 :param installed_path: A format string describing where the file should be installed. Would be 

-

340 `/usr/lib/tmpfiles.d/{name}.conf` from the example above. 

-

341 

-

342 The caller should provide a string with one or more of the placeholders listed below (usually `{name}` 

-

343 should be one of them). The format affect the entire path. 

-

344 

-

345 The following placeholders are supported: 

-

346 * `{name}` - The name in the name segment (defaulting the package name if no name segment is given) 

-

347 * `{priority}` / `{priority:02}` - The priority of the file. Only provided priorities are used (that 

-

348 is, default_priority is not None). The latter variant ensuring that the priority takes at least 

-

349 two characters and the `0` character is left-padded for priorities that takes less than two 

-

350 characters. 

-

351 * `{owning_package}` - The name of the package. Should only be used when `{name}` alone is insufficient. 

-

352 If you do not want the "name" segment in the first place, use `allow_name_segment=False` instead. 

-

353 

-

354 The path is always interpreted as relative to the binary package root. 

-

355 

-

356 :param default_mode: The mode the installed file should have by default. Common options are 0o0644 (the default) 

-

357 or 0o0755 (for files that must be executable). 

-

358 :param allow_architecture_segment: If True, the file may have an optional "architecture" segment at the end 

-

359 (`foo.tmpfiles.amd64`), which marks it architecture specific. When False, debputy will detect the 

-

360 "architecture" segment and report the use as an error. Note the architecture segment is only allowed for 

-

361 arch:any packages. If a file targeting an arch:all package uses an architecture specific file it will 

-

362 always result in an error. 

-

363 :param allow_name_segment: If True, the file may have an optional "name" segment after the package name prefix. 

-

364 (`foo.<name-here>.tmpfiles`). When False, debputy will detect the "name" segment and report the use as an 

-

365 error. 

-

366 :param default_priority: Special-case option for packager files that are installed into directories that have 

-

367 "parse ordering" or "priority". These files will generally be installed as something like `20-foo.conf` 

-

368 where the `20-` denotes their "priority". If the plugin is registering such a file type, then it should 

-

369 provide a default priority. 

-

370 

-

371 The following placeholders are supported: 

-

372 * `{name}` - The name in the name segment (defaulting the package name if no name segment is given) 

-

373 * `{priority}` - The priority of the file. Only provided priorities are used (that is, default_priority 

-

374 is not None) 

-

375 * `{owning_package}` - The name of the package. Should only be used when `{name}` alone is insufficient. 

-

376 If you do not want the "name" segment in the first place, use `allow_name_segment=False` instead. 

-

377 :param post_formatting_rewrite: An optional "name correcting" callback. It receives the formatted name and can 

-

378 do any transformation required. The primary use-case for this is to replace "forbidden" characters. The most 

-

379 common case for debputy itself is to replace "." with "_" for tools that refuse to work with files containing 

-

380 "." (`lambda x: x.replace(".", "_")`). The callback operates on basename of formatted version of the 

-

381 `installed_path` and the callback should return the basename. 

-

382 :param packageless_is_fallback_for_all_packages: If True, the packageless variant (such as, `debian/changelog`) 

-

383 is a fallback for every package. 

-

384 :param reference_documentation: Reference documentation for the packager provided file. Use the 

-

385 packager_provided_file_reference_documentation function to provide the value for this parameter. 

-

386 :param reservation_only: When True, tell debputy that the plugin reserves this packager provided file, but that 

-

387 debputy should not actually install it automatically. This is useful in the cases, where the plugin 

-

388 needs to process the file before installing it. The file will be marked as provided by this plugin. This 

-

389 enables introspection and detects conflicts if other plugins attempts to claim the file. 

-

390 """ 

-

391 raise NotImplementedError 

-

392 

-

393 def metadata_or_maintscript_detector( 

-

394 self, 

-

395 auto_detector_id: str, 

-

396 auto_detector: MetadataAutoDetector, 

-

397 *, 

-

398 package_type: PackageTypeSelector = "deb", 

-

399 ) -> None: 

-

400 """Provide a pre-assembly hook that can affect the metadata/maintscript of binary ("deb") packages 

-

401 

-

402 The provided hook will be run once per binary package to be assembled, and it can see all the content 

-

403 ("data.tar") planned to be included in the deb. The hook may do any *read-only* analysis of this content 

-

404 and provide metadata, alter substvars or inject maintscript snippets. However, the hook must *not* 

-

405 change the content ("data.tar") part of the deb. 

-

406 

-

407 The hook will be run unconditionally for all binary packages built. When the hook does not apply to all 

-

408 packages, it must provide its own (internal) logic for detecting whether it is relevant and reduced itself 

-

409 to a no-op if it should not apply to the current package. 

-

410 

-

411 Hooks are run in "some implementation defined order" and should not rely on being run before or after 

-

412 any other hook. 

-

413 

-

414 The hooks are only applied to packages defined in `debian/control`. Notably, the metadata detector will 

-

415 not apply to auto-generated `-dbgsym` packages (as those are not listed explicitly in `debian/control`). 

-

416 

-

417 :param auto_detector_id: A plugin-wide unique ID for this detector. Packagers may use this ID for disabling 

-

418 the detector and accordingly the ID is part of the plugin's API toward the packager. 

-

419 :param auto_detector: The code to be called that will be run at the metadata generation state (once for each 

-

420 binary package). 

-

421 :param package_type: Which kind of packages this metadata detector applies to. The package type is generally 

-

422 defined by `Package-Type` field in the binary package. The default is to only run for regular `deb` packages 

-

423 and ignore `udeb` packages. 

-

424 """ 

-

425 raise NotImplementedError 

-

426 

-

427 def manifest_variable( 

-

428 self, 

-

429 variable_name: str, 

-

430 value: str, 

-

431 variable_reference_documentation: Optional[str] = None, 

-

432 ) -> None: 

-

433 """Provide a variable that can be used in the package manifest 

-

434 

-

435 >>> # Enable users to use "{{path:BASH_COMPLETION_DIR}}/foo" in their manifest. 

-

436 >>> api.manifest_variable( # doctest: +SKIP 

-

437 ... "path:BASH_COMPLETION_DIR", 

-

438 ... "/usr/share/bash-completion/completions", 

-

439 ... variable_reference_documentation="Directory to install bash completions into", 

-

440 ... ) 

-

441 

-

442 :param variable_name: The variable name. 

-

443 :param value: The value the variable should resolve to. 

-

444 :param variable_reference_documentation: A short snippet of reference documentation that explains 

-

445 the purpose of the variable. 

-

446 """ 

-

447 raise NotImplementedError 

-

448 

-

449 

-

450class MaintscriptAccessor: 

-

451 __slots__ = () 

-

452 

-

453 def on_configure( 

-

454 self, 

-

455 run_snippet: str, 

-

456 /, 

-

457 indent: Optional[bool] = None, 

-

458 perform_substitution: bool = True, 

-

459 skip_on_rollback: bool = False, 

-

460 ) -> None: 

-

461 """Provide a snippet to be run when the package is about to be "configured" 

-

462 

-

463 This condition is the most common "post install" condition and covers the two 

-

464 common cases: 

-

465 * On initial install, OR 

-

466 * On upgrade 

-

467 

-

468 In dpkg maintscript terms, this method roughly corresponds to postinst containing 

-

469 `if [ "$1" = configure ]; then <snippet>; fi` 

-

470 

-

471 Additionally, the condition will by default also include rollback/abort scenarios such as "above-remove", 

-

472 which is normally what you want but most people forget about. 

-

473 

-

474 :param run_snippet: The actual shell snippet to be run in the given condition. The snippet must be idempotent. 

-

475 The snippet may contain newlines as necessary, which will make the result more readable. Additionally, the 

-

476 snippet may contain '{{FOO}}' substitutions by default. 

-

477 :param skip_on_rollback: By default, this condition will also cover common rollback scenarios. This 

-

478 is normally what you want (or benign in most cases due to the idempotence requirement for maintscripts). 

-

479 However, you can disable the rollback cases, leaving only "On initial install OR On upgrade". 

-

480 :param indent: If True, the provided snippet will be indented to fit the condition provided by debputy. 

-

481 In most cases, this is safe to do and provides more readable scripts. However, it may cause issues 

-

482 with some special shell syntax (such as "Heredocs"). When False, the snippet will *not* be re-indented. 

-

483 You are recommended to do 4 spaces of indentation when indent is False for readability. 

-

484 :param perform_substitution: When True, `{{FOO}}` will be substituted in the snippet. When False, no 

-

485 substitution is provided. 

-

486 """ 

-

487 raise NotImplementedError 

-

488 

-

489 def on_initial_install( 

-

490 self, 

-

491 run_snippet: str, 

-

492 /, 

-

493 indent: Optional[bool] = None, 

-

494 perform_substitution: bool = True, 

-

495 ) -> None: 

-

496 """Provide a snippet to be run when the package is about to be "configured" for the first time 

-

497 

-

498 The snippet will only be run on the first time the package is installed (ever or since last purge). 

-

499 Note that "first" does not mean "exactly once" as dpkg does *not* provide such semantics. There are two 

-

500 common cases where this can snippet can be run multiple times for the same system (and why the snippet 

-

501 must still be idempotent): 

-

502 

-

503 1) The package is installed (1), then purged and then installed again (2). This can partly be mitigated 

-

504 by having an `on_purge` script to do clean up. 

-

505 

-

506 2) As the package is installed, the `postinst` script terminates prematurely (Disk full, power loss, etc.). 

-

507 The user resolves the problem and runs `dpkg --configure <pkg>`, which in turn restarts the script 

-

508 from the beginning. This is why scripts must be idempotent in general. 

-

509 

-

510 In dpkg maintscript terms, this method roughly corresponds to postinst containing 

-

511 `if [ "$1" = configure ] && [ -z "$2" ]; then <snippet>; fi` 

-

512 

-

513 :param run_snippet: The actual shell snippet to be run in the given condition. The snippet must be idempotent. 

-

514 The snippet may contain newlines as necessary, which will make the result more readable. Additionally, the 

-

515 snippet may contain '{{FOO}}' substitutions by default. 

-

516 :param indent: If True, the provided snippet will be indented to fit the condition provided by debputy. 

-

517 In most cases, this is safe to do and provides more readable scripts. However, it may cause issues 

-

518 with some special shell syntax (such as "Heredocs"). When False, the snippet will *not* be re-indented. 

-

519 You are recommended to do 4 spaces of indentation when indent is False for readability. 

-

520 :param perform_substitution: When True, `{{FOO}}` will be substituted in the snippet. When False, no 

-

521 substitution is provided. 

-

522 """ 

-

523 raise NotImplementedError 

-

524 

-

525 def on_upgrade( 

-

526 self, 

-

527 run_snippet: str, 

-

528 /, 

-

529 indent: Optional[bool] = None, 

-

530 perform_substitution: bool = True, 

-

531 ) -> None: 

-

532 """Provide a snippet to be run when the package is about to be "configured" after an upgrade 

-

533 

-

534 The snippet will only be run on any upgrade (that is, it will be skipped on the initial install). 

-

535 

-

536 In dpkg maintscript terms, this method roughly corresponds to postinst containing 

-

537 `if [ "$1" = configure ] && [ -n "$2" ]; then <snippet>; fi` 

-

538 

-

539 :param run_snippet: The actual shell snippet to be run in the given condition. The snippet must be idempotent. 

-

540 The snippet may contain newlines as necessary, which will make the result more readable. Additionally, the 

-

541 snippet may contain '{{FOO}}' substitutions by default. 

-

542 :param indent: If True, the provided snippet will be indented to fit the condition provided by debputy. 

-

543 In most cases, this is safe to do and provides more readable scripts. However, it may cause issues 

-

544 with some special shell syntax (such as "Heredocs"). When False, the snippet will *not* be re-indented. 

-

545 You are recommended to do 4 spaces of indentation when indent is False for readability. 

-

546 :param perform_substitution: When True, `{{FOO}}` will be substituted in the snippet. When False, no 

-

547 substitution is provided. 

-

548 """ 

-

549 raise NotImplementedError 

-

550 

-

551 def on_upgrade_from( 

-

552 self, 

-

553 version: str, 

-

554 run_snippet: str, 

-

555 /, 

-

556 indent: Optional[bool] = None, 

-

557 perform_substitution: bool = True, 

-

558 ) -> None: 

-

559 """Provide a snippet to be run when the package is about to be "configured" after an upgrade from a given version 

-

560 

-

561 The snippet will only be run on any upgrade (that is, it will be skipped on the initial install). 

-

562 

-

563 In dpkg maintscript terms, this method roughly corresponds to postinst containing 

-

564 `if [ "$1" = configure ] && dpkg --compare-versions le-nl "$2" ; then <snippet>; fi` 

-

565 

-

566 :param version: The version to upgrade from 

-

567 :param run_snippet: The actual shell snippet to be run in the given condition. The snippet must be idempotent. 

-

568 The snippet may contain newlines as necessary, which will make the result more readable. Additionally, the 

-

569 snippet may contain '{{FOO}}' substitutions by default. 

-

570 :param indent: If True, the provided snippet will be indented to fit the condition provided by debputy. 

-

571 In most cases, this is safe to do and provides more readable scripts. However, it may cause issues 

-

572 with some special shell syntax (such as "Heredocs"). When False, the snippet will *not* be re-indented. 

-

573 You are recommended to do 4 spaces of indentation when indent is False for readability. 

-

574 :param perform_substitution: When True, `{{FOO}}` will be substituted in the snippet. When False, no 

-

575 substitution is provided. 

-

576 """ 

-

577 raise NotImplementedError 

-

578 

-

579 def on_before_removal( 

-

580 self, 

-

581 run_snippet: str, 

-

582 /, 

-

583 indent: Optional[bool] = None, 

-

584 perform_substitution: bool = True, 

-

585 ) -> None: 

-

586 """Provide a snippet to be run when the package is about to be removed 

-

587 

-

588 The snippet will be run before dpkg removes any files. 

-

589 

-

590 In dpkg maintscript terms, this method roughly corresponds to prerm containing 

-

591 `if [ "$1" = remove ] ; then <snippet>; fi` 

-

592 

-

593 :param run_snippet: The actual shell snippet to be run in the given condition. The snippet must be idempotent. 

-

594 The snippet may contain newlines as necessary, which will make the result more readable. Additionally, the 

-

595 snippet may contain '{{FOO}}' substitutions by default. 

-

596 :param indent: If True, the provided snippet will be indented to fit the condition provided by debputy. 

-

597 In most cases, this is safe to do and provides more readable scripts. However, it may cause issues 

-

598 with some special shell syntax (such as "Heredocs"). When False, the snippet will *not* be re-indented. 

-

599 You are recommended to do 4 spaces of indentation when indent is False for readability. 

-

600 :param perform_substitution: When True, `{{FOO}}` will be substituted in the snippet. When False, no 

-

601 substitution is provided. 

-

602 """ 

-

603 raise NotImplementedError 

-

604 

-

605 def on_removed( 

-

606 self, 

-

607 run_snippet: str, 

-

608 /, 

-

609 indent: Optional[bool] = None, 

-

610 perform_substitution: bool = True, 

-

611 ) -> None: 

-

612 """Provide a snippet to be run when the package has been removed 

-

613 

-

614 The snippet will be run after dpkg removes the package content from the file system. 

-

615 

-

616 **WARNING**: The snippet *cannot* rely on dependencies and must rely on `Essential: yes` packages. 

-

617 

-

618 In dpkg maintscript terms, this method roughly corresponds to postrm containing 

-

619 `if [ "$1" = remove ] ; then <snippet>; fi` 

-

620 

-

621 :param run_snippet: The actual shell snippet to be run in the given condition. The snippet must be idempotent. 

-

622 The snippet may contain newlines as necessary, which will make the result more readable. Additionally, the 

-

623 snippet may contain '{{FOO}}' substitutions by default. 

-

624 :param indent: If True, the provided snippet will be indented to fit the condition provided by debputy. 

-

625 In most cases, this is safe to do and provides more readable scripts. However, it may cause issues 

-

626 with some special shell syntax (such as "Heredocs"). When False, the snippet will *not* be re-indented. 

-

627 You are recommended to do 4 spaces of indentation when indent is False for readability. 

-

628 :param perform_substitution: When True, `{{FOO}}` will be substituted in the snippet. When False, no 

-

629 substitution is provided. 

-

630 """ 

-

631 raise NotImplementedError 

-

632 

-

633 def on_purge( 

-

634 self, 

-

635 run_snippet: str, 

-

636 /, 

-

637 indent: Optional[bool] = None, 

-

638 perform_substitution: bool = True, 

-

639 ) -> None: 

-

640 """Provide a snippet to be run when the package is being purged. 

-

641 

-

642 The snippet will when the package is purged from the system. 

-

643 

-

644 **WARNING**: The snippet *cannot* rely on dependencies and must rely on `Essential: yes` packages. 

-

645 

-

646 In dpkg maintscript terms, this method roughly corresponds to postrm containing 

-

647 `if [ "$1" = purge ] ; then <snippet>; fi` 

-

648 

-

649 :param run_snippet: The actual shell snippet to be run in the given condition. The snippet must be idempotent. 

-

650 The snippet may contain newlines as necessary, which will make the result more readable. Additionally, the 

-

651 snippet may contain '{{FOO}}' substitutions by default. 

-

652 :param indent: If True, the provided snippet will be indented to fit the condition provided by debputy. 

-

653 In most cases, this is safe to do and provides more readable scripts. However, it may cause issues 

-

654 with some special shell syntax (such as "Heredocs"). When False, the snippet will *not* be re-indented. 

-

655 You are recommended to do 4 spaces of indentation when indent is False for readability. 

-

656 :param perform_substitution: When True, `{{FOO}}` will be substituted in the snippet. When False, no 

-

657 substitution is provided. 

-

658 """ 

-

659 raise NotImplementedError 

-

660 

-

661 def unconditionally_in_script( 

-

662 self, 

-

663 maintscript: Maintscript, 

-

664 run_snippet: str, 

-

665 /, 

-

666 perform_substitution: bool = True, 

-

667 ) -> None: 

-

668 """Provide a snippet to be run in a given script 

-

669 

-

670 Run a given snippet unconditionally from a given script. The snippet must contain its own conditional 

-

671 for when it should be run. 

-

672 

-

673 :param maintscript: The maintscript to insert the snippet into. 

-

674 :param run_snippet: The actual shell snippet to be run. The snippet will be run unconditionally and should 

-

675 contain its own conditions as necessary. The snippet must be idempotent. The snippet may contain newlines 

-

676 as necessary, which will make the result more readable. Additionally, the snippet may contain '{{FOO}}' 

-

677 substitutions by default. 

-

678 :param perform_substitution: When True, `{{FOO}}` will be substituted in the snippet. When False, no 

-

679 substitution is provided. 

-

680 """ 

-

681 raise NotImplementedError 

-

682 

-

683 def escape_shell_words(self, *args: str) -> str: 

-

684 """Provide sh-shell escape of strings 

-

685 

-

686 `assert escape_shell("foo", "fu bar", "baz") == 'foo "fu bar" baz'` 

-

687 

-

688 This is useful for ensuring file names and other "input" are considered one parameter even when they 

-

689 contain spaces or shell meta-characters. 

-

690 

-

691 :param args: The string(s) to be escaped. 

-

692 :return: Each argument escaped such that each argument becomes a single "word" and then all these words are 

-

693 joined by a single space. 

-

694 """ 

-

695 return util.escape_shell(*args) 

-

696 

-

697 

-

698class BinaryCtrlAccessor: 

-

699 __slots__ = () 

-

700 

-

701 def dpkg_trigger(self, trigger_type: DpkgTriggerType, trigger_target: str) -> None: 

-

702 """Register a declarative dpkg level trigger 

-

703 

-

704 The provided trigger will be added to the package's metadata (the triggers file of the control.tar). 

-

705 

-

706 If the trigger has already been added previously, a second call with the same trigger data will be ignored. 

-

707 """ 

-

708 raise NotImplementedError 

-

709 

-

710 @property 

-

711 def maintscript(self) -> MaintscriptAccessor: 

-

712 """Attribute for manipulating maintscripts""" 

-

713 raise NotImplementedError 

-

714 

-

715 @property 

-

716 def substvars(self) -> "FlushableSubstvars": 

-

717 """Attribute for manipulating dpkg substvars (deb-substvars)""" 

-

718 raise NotImplementedError 

-

719 

-

720 

-

721class VirtualPath: 

-

722 __slots__ = () 

-

723 

-

724 @property 

-

725 def name(self) -> str: 

-

726 """Basename of the path a.k.a. last segment of the path 

-

727 

-

728 In a path "usr/share/doc/pkg/changelog.gz" the basename is "changelog.gz". 

-

729 

-

730 For a directory, the basename *never* ends with a `/`. 

-

731 """ 

-

732 raise NotImplementedError 

-

733 

-

734 @property 

-

735 def iterdir(self) -> Iterable["VirtualPath"]: 

-

736 """Returns an iterable that iterates over all children of this path 

-

737 

-

738 For directories, this returns an iterable of all children. For non-directories, 

-

739 the iterable is always empty. 

-

740 """ 

-

741 raise NotImplementedError 

-

742 

-

743 def lookup(self, path: str) -> Optional["VirtualPath"]: 

-

744 """Perform a path lookup relative to this path 

-

745 

-

746 As an example `doc_dir = fs_root.lookup('./usr/share/doc')` 

-

747 

-

748 If the provided path starts with `/`, then the lookup is performed relative to the 

-

749 file system root. That is, you can assume the following to always be True: 

-

750 

-

751 `fs_root.lookup("usr") == any_path_beneath_fs_root.lookup('/usr')` 

-

752 

-

753 Note: This method requires the path to be attached (see `is_detached`) regardless of 

-

754 whether the lookup is relative or absolute. 

-

755 

-

756 If the path traverse a symlink, the symlink will be resolved. 

-

757 

-

758 :param path: The path to look. Can contain "." and ".." segments. If starting with `/`, 

-

759 look up is performed relative to the file system root, otherwise the lookup is relative 

-

760 to this path. 

-

761 :return: The path object for the desired path if it can be found. Otherwise, None. 

-

762 """ 

-

763 raise NotImplementedError 

-

764 

-

765 def all_paths(self) -> Iterable["VirtualPath"]: 

-

766 """Iterate over this path and all of its descendants (if any) 

-

767 

-

768 If used on the root path, then every path in the package is returned. 

-

769 

-

770 The iterable is ordered, so using the order in output will be produce 

-

771 bit-for-bit reproducible output. Additionally, a directory will always 

-

772 be seen before its descendants. Otherwise, the order is implementation 

-

773 defined. 

-

774 

-

775 The iteration is lazy and as a side effect do account for some obvious 

-

776 mutation. Like if the current path is removed, then none of its children 

-

777 will be returned (provided mutation happens before the lazy iteration 

-

778 was required to resolve it). Likewise, mutation of the directory will 

-

779 also work (again, provided mutation happens before the lazy iteration order). 

-

780 

-

781 :return: An ordered iterable of this path followed by its descendants. 

-

782 """ 

-

783 raise NotImplementedError 

-

784 

-

785 @property 

-

786 def is_detached(self) -> bool: 

-

787 """Returns True if this path is detached 

-

788 

-

789 Paths that are detached from the file system will not be present in the package and 

-

790 most operations are unsafe on them. This usually only happens if the path or one of 

-

791 its parent directories are unlinked (rm'ed) from the file system tree. 

-

792 

-

793 All paths are attached by default and will only become detached as a result of 

-

794 an action to mutate the virtual file system. Note that the file system may not 

-

795 always be manipulated. 

-

796 

-

797 :return: True if the entry is detached. Detached entries should be discarded, so they 

-

798 can be garbage collected. 

-

799 """ 

-

800 raise NotImplementedError 

-

801 

-

802 # The __getitem__ behaves like __getitem__ from Dict but __iter__ would ideally work like a Sequence. 

-

803 # However, that does not feel compatible, so lets force people to use .children instead for the Sequence 

-

804 # behaviour to avoid surprises for now. 

-

805 # (Maybe it is a non-issue, but it is easier to add the API later than to remove it once we have committed 

-

806 # to using it) 

-

807 __iter__ = None 

-

808 

-

809 def __getitem__(self, key: object) -> "VirtualPath": 

-

810 """Lookup a (direct) child by name 

-

811 

-

812 Ignoring the possible `KeyError`, then the following are the same: 

-

813 `fs_root["usr"] == fs_root.lookup('usr')` 

-

814 

-

815 Note that unlike `.lookup` this can only locate direct children. 

-

816 """ 

-

817 raise NotImplementedError 

-

818 

-

819 def __delitem__(self, key) -> None: 

-

820 """Remove a child from this node if it exists 

-

821 

-

822 If that child is a directory, then the entire tree is removed (like `rm -fr`). 

-

823 """ 

-

824 raise NotImplementedError 

-

825 

-

826 def get(self, key: str) -> "Optional[VirtualPath]": 

-

827 """Lookup a (direct) child by name 

-

828 

-

829 The following are the same: 

-

830 `fs_root.get("usr") == fs_root.lookup('usr')` 

-

831 

-

832 Note that unlike `.lookup` this can only locate direct children. 

-

833 """ 

-

834 try: 

-

835 return self[key] 

-

836 except KeyError: 

-

837 return None 

-

838 

-

839 def __contains__(self, item: object) -> bool: 

-

840 """Determine if this path includes a given child (either by object or string) 

-

841 

-

842 Examples: 

-

843 

-

844 if 'foo' in dir: ... 

-

845 """ 

-

846 if isinstance(item, VirtualPath): 

-

847 return item.parent_dir is self 

-

848 if not isinstance(item, str): 

-

849 return False 

-

850 m = self.get(item) 

-

851 return m is not None 

-

852 

-

853 @property 

-

854 def path(self) -> str: 

-

855 """Returns the "full" path for this file system entry 

-

856 

-

857 This is the path that debputy uses to refer to this file system entry. It is always 

-

858 normalized. Use the `absolute` attribute for how the path looks 

-

859 when the package is installed. Alternatively, there is also `fs_path`, which is the 

-

860 path to the underlying file system object (assuming there is one). That is the one 

-

861 you need if you want to read the file. 

-

862 

-

863 This is attribute is mostly useful for debugging or for looking up the path relative 

-

864 to the "root" of the virtual file system that debputy maintains. 

-

865 

-

866 If the path is detached (see `is_detached`), then this method returns the path as it 

-

867 was known prior to being detached. 

-

868 """ 

-

869 raise NotImplementedError 

-

870 

-

871 @property 

-

872 def absolute(self) -> str: 

-

873 """Returns the absolute version of this path 

-

874 

-

875 This is how to refer to this path when the package is installed. 

-

876 

-

877 If the path is detached (see `is_detached`), then this method returns the last known location 

-

878 of installation (prior to being detached). 

-

879 

-

880 :return: The absolute path of this file as it would be on the installed system. 

-

881 """ 

-

882 p = self.path.lstrip(".") 

-

883 if not p.startswith("/"): 

-

884 return f"/{p}" 

-

885 return p 

-

886 

-

887 @property 

-

888 def parent_dir(self) -> Optional["VirtualPath"]: 

-

889 """The parent directory of this path 

-

890 

-

891 Note this operation requires the path is "attached" (see `is_detached`). All paths are attached 

-

892 by default but unlinking paths will cause them to become detached. 

-

893 

-

894 :return: The parent path or None for the root. 

-

895 """ 

-

896 raise NotImplementedError 

-

897 

-

898 def stat(self) -> os.stat_result: 

-

899 """Attempt to do stat of the underlying path (if it exists) 

-

900 

-

901 *Avoid* using `stat()` whenever possible where a more specialized attribute exist. The 

-

902 `stat()` call returns the data from the file system and often, `debputy` does *not* track 

-

903 its state in the file system. As an example, if you want to know the file system mode of 

-

904 a path, please use the `mode` attribute instead. 

-

905 

-

906 This never follow symlinks (it behaves like `os.lstat`). It will raise an error 

-

907 if the path is not backed by a file system object (that is, `has_fs_path` is False). 

-

908 

-

909 :return: The stat result or an error. 

-

910 """ 

-

911 raise NotImplementedError() 

-

912 

-

913 @property 

-

914 def size(self) -> int: 

-

915 """Resolve the file size (`st_size`) 

-

916 

-

917 This may be using `stat()` and therefore `fs_path`. 

-

918 

-

919 :return: The size of the file in bytes 

-

920 """ 

-

921 return self.stat().st_size 

-

922 

-

923 @property 

-

924 def mode(self) -> int: 

-

925 """Determine the mode bits of this path object 

-

926 

-

927 Note that: 

-

928 * like with `stat` above, this never follows symlinks. 

-

929 * the mode returned by this method is not always a 1:1 with the mode in the 

-

930 physical file system. As an optimization, `debputy` skips unnecessary writes 

-

931 to the underlying file system in many cases. 

-

932 

-

933 

-

934 :return: The mode bits for the path. 

-

935 """ 

-

936 raise NotImplementedError 

-

937 

-

938 @mode.setter 

-

939 def mode(self, new_mode: int) -> None: 

-

940 """Set the octal file mode of this path 

-

941 

-

942 Note that: 

-

943 * this operation will fail if `path.is_read_write` returns False. 

-

944 * this operation is generally *not* synced to the physical file system (as 

-

945 an optimization). 

-

946 

-

947 :param new_mode: The new octal mode for this path. Note that `debputy` insists 

-

948 that all paths have the `user read bit` and, for directories also, the 

-

949 `user execute bit`. The absence of these minimal mode bits causes hard to 

-

950 debug errors. 

-

951 """ 

-

952 raise NotImplementedError 

-

953 

-

954 @property 

-

955 def is_executable(self) -> bool: 

-

956 """Determine whether a path is considered executable 

-

957 

-

958 Generally, this means that at least one executable bit is set. This will 

-

959 basically always be true for directories as directories need the execute 

-

960 parameter to be traversable. 

-

961 

-

962 :return: True if the path is considered executable with its current mode 

-

963 """ 

-

964 return bool(self.mode & 0o0111) 

-

965 

-

966 def chmod(self, new_mode: Union[int, str]) -> None: 

-

967 """Set the file mode of this path 

-

968 

-

969 This is similar to setting the `mode` attribute. However, this method accepts 

-

970 a string argument, which will be parsed as a symbolic mode (example: `u+rX,go=rX`). 

-

971 

-

972 Note that: 

-

973 * this operation will fail if `path.is_read_write` returns False. 

-

974 * this operation is generally *not* synced to the physical file system (as 

-

975 an optimization). 

-

976 

-

977 :param new_mode: The new mode for this path. 

-

978 Note that `debputy` insists that all paths have the `user read bit` and, for 

-

979 directories also, the `user execute bit`. The absence of these minimal mode 

-

980 bits causes hard to debug errors. 

-

981 """ 

-

982 if isinstance(new_mode, str): 

-

983 segments = parse_symbolic_mode(new_mode, None) 

-

984 final_mode = self.mode 

-

985 is_dir = self.is_dir 

-

986 for segment in segments: 

-

987 final_mode = segment.apply(final_mode, is_dir) 

-

988 self.mode = final_mode 

-

989 else: 

-

990 self.mode = new_mode 

-

991 

-

992 def chown( 

-

993 self, 

-

994 owner: Optional["StaticFileSystemOwner"], 

-

995 group: Optional["StaticFileSystemGroup"], 

-

996 ) -> None: 

-

997 """Change the owner/group of this path 

-

998 

-

999 :param owner: The desired owner definition for this path. If None, then no change of owner is performed. 

-

1000 :param group: The desired group definition for this path. If None, then no change of group is performed. 

-

1001 """ 

-

1002 raise NotImplementedError 

-

1003 

-

1004 @property 

-

1005 def mtime(self) -> float: 

-

1006 """Determine the mtime of this path object 

-

1007 

-

1008 Note that: 

-

1009 * like with `stat` above, this never follows symlinks. 

-

1010 * the mtime returned has *not* been clamped against ´SOURCE_DATE_EPOCH`. Timestamp 

-

1011 normalization is handled later by `debputy`. 

-

1012 * the mtime returned by this method is not always a 1:1 with the mtime in the 

-

1013 physical file system. As an optimization, `debputy` skips unnecessary writes 

-

1014 to the underlying file system in many cases. 

-

1015 

-

1016 :return: The mtime for the path. 

-

1017 """ 

-

1018 raise NotImplementedError 

-

1019 

-

1020 @mtime.setter 

-

1021 def mtime(self, new_mtime: float) -> None: 

-

1022 """Set the mtime of this path 

-

1023 

-

1024 Note that: 

-

1025 * this operation will fail if `path.is_read_write` returns False. 

-

1026 * this operation is generally *not* synced to the physical file system (as 

-

1027 an optimization). 

-

1028 

-

1029 :param new_mtime: The new mtime of this path. Note that the caller does not need to 

-

1030 account for `SOURCE_DATE_EPOCH`. Timestamp normalization is handled later. 

-

1031 """ 

-

1032 raise NotImplementedError 

-

1033 

-

1034 def readlink(self) -> str: 

-

1035 """Determine the link target of this path assuming it is a symlink 

-

1036 

-

1037 For paths where `is_symlink` is True, this already returns a link target even when 

-

1038 `has_fs_path` is False. 

-

1039 

-

1040 :return: The link target of the path or an error is this is not a symlink 

-

1041 """ 

-

1042 raise NotImplementedError() 

-

1043 

-

1044 @overload 

-

1045 def open( 1045 ↛ exitline 1045 didn't jump to the function exit

-

1046 self, 

-

1047 *, 

-

1048 byte_io: Literal[False] = False, 

-

1049 buffering: Optional[int] = ..., 

-

1050 ) -> TextIO: ... 

-

1051 

-

1052 @overload 

-

1053 def open( 1053 ↛ exitline 1053 didn't jump to the function exit

-

1054 self, 

-

1055 *, 

-

1056 byte_io: Literal[True], 

-

1057 buffering: Optional[int] = ..., 

-

1058 ) -> BinaryIO: ... 

-

1059 

-

1060 @overload 

-

1061 def open( 1061 ↛ exitline 1061 didn't jump to the function exit

-

1062 self, 

-

1063 *, 

-

1064 byte_io: bool, 

-

1065 buffering: Optional[int] = ..., 

-

1066 ) -> Union[TextIO, BinaryIO]: ... 

-

1067 

-

1068 def open( 

-

1069 self, 

-

1070 *, 

-

1071 byte_io: bool = False, 

-

1072 buffering: int = -1, 

-

1073 ) -> Union[TextIO, BinaryIO]: 

-

1074 """Open the file for reading. Usually used with a context manager 

-

1075 

-

1076 By default, the file is opened in text mode (utf-8). Binary mode can be requested 

-

1077 via the `byte_io` parameter. This operation is only valid for files (`is_file` returns 

-

1078 `True`). Usage on symlinks and directories will raise exceptions. 

-

1079 

-

1080 This method *often* requires the `fs_path` to be present. However, tests as a notable 

-

1081 case can inject content without having the `fs_path` point to a real file. (To be clear, 

-

1082 such tests are generally expected to ensure `has_fs_path` returns `True`). 

-

1083 

-

1084 

-

1085 :param byte_io: If True, open the file in binary mode (like `rb` for `open`) 

-

1086 :param buffering: Same as open(..., buffering=...) where supported. Notably during 

-

1087 testing, the content may be purely in memory and use a BytesIO/StringIO 

-

1088 (which does not accept that parameter, but then is buffered in a different way) 

-

1089 :return: The file handle. 

-

1090 """ 

-

1091 

-

1092 if not self.is_file: 1092 ↛ 1093line 1092 didn't jump to line 1093, because the condition on line 1092 was never true

-

1093 raise TypeError(f"Cannot open {self.path} for reading: It is not a file") 

-

1094 

-

1095 if byte_io: 

-

1096 return open(self.fs_path, "rb", buffering=buffering) 

-

1097 return open(self.fs_path, "rt", encoding="utf-8", buffering=buffering) 

-

1098 

-

1099 @property 

-

1100 def fs_path(self) -> str: 

-

1101 """Request the underling fs_path of this path 

-

1102 

-

1103 Only available when `has_fs_path` is True. Generally this should only be used for files to read 

-

1104 the contents of the file and do some action based on the parsed result. 

-

1105 

-

1106 The path should only be used for read-only purposes as debputy may assume that it is safe to have 

-

1107 multiple paths pointing to the same file system path. 

-

1108 

-

1109 Note that: 

-

1110 * This is often *not* available for directories and symlinks. 

-

1111 * The debputy in-memory file system overrules the physical file system. Attempting to "fix" things 

-

1112 by using `os.chmod` or `os.unlink`'ing files, etc. will generally not do as you expect. Best case, 

-

1113 your actions are ignored and worst case it will cause the build to fail as it violates debputy's 

-

1114 internal invariants. 

-

1115 

-

1116 :return: The path to the underlying file system object on the build system or an error if no such 

-

1117 file exist (see `has_fs_path`). 

-

1118 """ 

-

1119 raise NotImplementedError() 

-

1120 

-

1121 @property 

-

1122 def is_dir(self) -> bool: 

-

1123 """Determine if this path is a directory 

-

1124 

-

1125 Never follows symlinks. 

-

1126 

-

1127 :return: True if this path is a directory. False otherwise. 

-

1128 """ 

-

1129 raise NotImplementedError() 

-

1130 

-

1131 @property 

-

1132 def is_file(self) -> bool: 

-

1133 """Determine if this path is a directory 

-

1134 

-

1135 Never follows symlinks. 

-

1136 

-

1137 :return: True if this path is a regular file. False otherwise. 

-

1138 """ 

-

1139 raise NotImplementedError() 

-

1140 

-

1141 @property 

-

1142 def is_symlink(self) -> bool: 

-

1143 """Determine if this path is a symlink 

-

1144 

-

1145 :return: True if this path is a symlink. False otherwise. 

-

1146 """ 

-

1147 raise NotImplementedError() 

-

1148 

-

1149 @property 

-

1150 def has_fs_path(self) -> bool: 

-

1151 """Determine whether this path is backed by a file system path 

-

1152 

-

1153 :return: True if this path is backed by a file system object on the build system. 

-

1154 """ 

-

1155 raise NotImplementedError() 

-

1156 

-

1157 @property 

-

1158 def is_read_write(self) -> bool: 

-

1159 """When true, the file system entry may be mutated 

-

1160 

-

1161 Read-write rules are: 

-

1162 

-

1163 +--------------------------+-------------------+------------------------+ 

-

1164 | File system | From / Inside | Read-Only / Read-Write | 

-

1165 +--------------------------+-------------------+------------------------+ 

-

1166 | Source directory | Any context | Read-Only | 

-

1167 | Binary staging directory | Package Processor | Read-Write | 

-

1168 | Binary staging directory | Metadata Detector | Read-Only | 

-

1169 +--------------------------+-------------------+------------------------+ 

-

1170 

-

1171 These rules apply to the virtual file system (`debputy` cannot enforce 

-

1172 these rules in the underlying file system). The `debputy` code relies 

-

1173 on these rules for its logic in multiple places to catch bugs and for 

-

1174 optimizations. 

-

1175 

-

1176 As an example, the reason why the file system is read-only when Metadata 

-

1177 Detectors are run is based the contents of the file system has already 

-

1178 been committed. New files will not be included, removals of existing 

-

1179 files will trigger a hard error when the package is assembled, etc. 

-

1180 To avoid people spending hours debugging why their code does not work 

-

1181 as intended, `debputy` instead throws a hard error if you try to mutate 

-

1182 the file system when it is read-only mode to "fail fast". 

-

1183 

-

1184 :return: Whether file system mutations are permitted. 

-

1185 """ 

-

1186 return False 

-

1187 

-

1188 def mkdir(self, name: str) -> "VirtualPath": 

-

1189 """Create a new subdirectory of the current path 

-

1190 

-

1191 :param name: Basename of the new directory. The directory must not contain a path 

-

1192 with this basename. 

-

1193 :return: The new subdirectory 

-

1194 """ 

-

1195 raise NotImplementedError 

-

1196 

-

1197 def mkdirs(self, path: str) -> "VirtualPath": 

-

1198 """Ensure a given path exists and is a directory. 

-

1199 

-

1200 :param path: Path to the directory to create. Any parent directories will be 

-

1201 created as needed. If the path already exists and is a directory, then it 

-

1202 is returned. If any part of the path exists and that is not a directory, 

-

1203 then the `mkdirs` call will raise an error. 

-

1204 :return: The directory denoted by the given path 

-

1205 """ 

-

1206 raise NotImplementedError 

-

1207 

-

1208 def add_file( 

-

1209 self, 

-

1210 name: str, 

-

1211 *, 

-

1212 unlink_if_exists: bool = True, 

-

1213 use_fs_path_mode: bool = False, 

-

1214 mode: int = 0o0644, 

-

1215 mtime: Optional[float] = None, 

-

1216 ) -> ContextManager["VirtualPath"]: 

-

1217 """Add a new regular file as a child of this path 

-

1218 

-

1219 This method will insert a new file into the virtual file system as a child 

-

1220 of the current path (which must be a directory). The caller must use the 

-

1221 return value as a context manager (see example). During the life-cycle of 

-

1222 the managed context, the caller can fill out the contents of the file 

-

1223 from the new path's `fs_path` attribute. The `fs_path` will exist as an 

-

1224 empty file when the context manager is entered. 

-

1225 

-

1226 Once the context manager exits, mutation of the `fs_path` is no longer permitted. 

-

1227 

-

1228 >>> import subprocess 

-

1229 >>> path = ... # doctest: +SKIP 

-

1230 >>> with path.add_file("foo") as new_file, open(new_file.fs_path, "w") as fd: # doctest: +SKIP 

-

1231 ... fd.writelines(["Some", "Content", "Here"]) 

-

1232 

-

1233 The caller can replace the provided `fs_path` entirely provided at the end result 

-

1234 (when the context manager exits) is a regular file with no hard links. 

-

1235 

-

1236 Note that this operation will fail if `path.is_read_write` returns False. 

-

1237 

-

1238 :param name: Basename of the new file 

-

1239 :param unlink_if_exists: If the name was already in use, then either an exception is thrown 

-

1240 (when `unlink_if_exists` is False) or the path will be removed via ´unlink(recursive=False)` 

-

1241 (when `unlink_if_exists` is True) 

-

1242 :param use_fs_path_mode: When True, the file created will have this mode in the physical file 

-

1243 system. When the context manager exists, `debputy` will refresh its mode to match the mode 

-

1244 in the physical file system. This is primarily useful if the caller uses a subprocess to 

-

1245 mutate the path and the file mode is relevant for this tool (either as input or output). 

-

1246 When the parameter is false, the new file is guaranteed to be readable and writable for 

-

1247 the current user. However, no other guarantees are given (not even that it matches the 

-

1248 `mode` parameter and any changes to the mode in the physical file system will be ignored. 

-

1249 :param mode: This is the initial file mode. Note the `use_fs_path_mode` parameter for how 

-

1250 this interacts with the physical file system. 

-

1251 :param mtime: If the caller has a more accurate mtime than the mtime of the generated file, 

-

1252 then it can be provided here. Note that all mtimes will later be clamped based on 

-

1253 `SOURCE_DATE_EPOCH`. This parameter is only for when the conceptual mtime of this path 

-

1254 should be earlier than `SOURCE_DATE_EPOCH`. 

-

1255 :return: A Context manager that upon entering provides a `VirtualPath` instance for the 

-

1256 new file. The instance remains valid after the context manager exits (assuming it exits 

-

1257 successfully), but the file denoted by `fs_path` must not be changed after the context 

-

1258 manager exits 

-

1259 """ 

-

1260 raise NotImplementedError 

-

1261 

-

1262 def replace_fs_path_content( 

-

1263 self, 

-

1264 *, 

-

1265 use_fs_path_mode: bool = False, 

-

1266 ) -> ContextManager[str]: 

-

1267 """Replace the contents of this file via inline manipulation 

-

1268 

-

1269 Used as a context manager to provide the fs path for manipulation. 

-

1270 

-

1271 Example: 

-

1272 >>> import subprocess 

-

1273 >>> path = ... # doctest: +SKIP 

-

1274 >>> with path.replace_fs_path_content() as fs_path: # doctest: +SKIP 

-

1275 ... subprocess.check_call(['strip', fs_path]) # doctest: +SKIP 

-

1276 

-

1277 The provided file system path should be manipulated inline. The debputy framework may 

-

1278 copy it first as necessary and therefore the provided fs_path may be different from 

-

1279 `path.fs_path` prior to entering the context manager. 

-

1280 

-

1281 Note that this operation will fail if `path.is_read_write` returns False. 

-

1282 

-

1283 If the mutation causes the returned `fs_path` to be a non-file or a hard-linked file 

-

1284 when the context manager exits, `debputy` will raise an error at that point. To preserve 

-

1285 the internal invariants of `debputy`, the path will be unlinked as `debputy` cannot 

-

1286 reliably restore the path. 

-

1287 

-

1288 :param use_fs_path_mode: If True, any changes to the mode on the physical FS path will be 

-

1289 recorded as the desired mode of the file when the contextmanager ends. The provided FS path 

-

1290 with start with the current mode when `use_fs_path_mode` is True. Otherwise, `debputy` will 

-

1291 ignore the mode of the file system entry and reuse its own current mode 

-

1292 definition. 

-

1293 :return: A Context manager that upon entering provides the path to a muable (copy) of 

-

1294 this path's `fs_path` attribute. The file on the underlying path may be mutated however 

-

1295 the caller wishes until the context manager exits. 

-

1296 """ 

-

1297 raise NotImplementedError 

-

1298 

-

1299 def add_symlink(self, link_name: str, link_target: str) -> "VirtualPath": 

-

1300 """Add a new regular file as a child of this path 

-

1301 

-

1302 This will create a new symlink inside the current path. If the path already exists, 

-

1303 the existing path will be unlinked via `unlink(recursive=False)`. 

-

1304 

-

1305 Note that this operation will fail if `path.is_read_write` returns False. 

-

1306 

-

1307 :param link_name: The basename of the link file entry. 

-

1308 :param link_target: The target of the link. Link target normalization will 

-

1309 be handled by `debputy`, so the caller can use relative or absolute paths. 

-

1310 (At the time of writing, symlink target normalization happens late) 

-

1311 :return: The newly created symlink. 

-

1312 """ 

-

1313 raise NotImplementedError 

-

1314 

-

1315 def unlink(self, *, recursive: bool = False) -> None: 

-

1316 """Unlink a file or a directory 

-

1317 

-

1318 This operation will remove the path from the file system (causing `is_detached` to return True). 

-

1319 

-

1320 When the path is a: 

-

1321 

-

1322 * symlink, then the symlink itself is removed. The target (if present) is not affected. 

-

1323 * *non-empty* directory, then the `recursive` parameter decides the outcome. An empty 

-

1324 directory will be removed regardless of the value of `recursive`. 

-

1325 

-

1326 Note that: 

-

1327 * the root directory cannot be deleted. 

-

1328 * this operation will fail if `path.is_read_write` returns False. 

-

1329 

-

1330 :param recursive: If True, then non-empty directories will be unlinked as well removing everything inside them 

-

1331 as well. When False, an error is raised if the path is a non-empty directory 

-

1332 """ 

-

1333 raise NotImplementedError 

-

1334 

-

1335 def interpreter(self) -> Optional[Interpreter]: 

-

1336 """Determine the interpreter of the file (`#!`-line details) 

-

1337 

-

1338 Note: this method is only applicable for files (`is_file` is True). 

-

1339 

-

1340 :return: The detected interpreter if present or None if no interpreter can be detected. 

-

1341 """ 

-

1342 if not self.is_file: 

-

1343 raise TypeError("Only files can have interpreters") 

-

1344 try: 

-

1345 with self.open(byte_io=True, buffering=4096) as fd: 

-

1346 return extract_shebang_interpreter_from_file(fd) 

-

1347 except (PureVirtualPathError, TestPathWithNonExistentFSPathError): 

-

1348 return None 

-

1349 

-

1350 def metadata( 

-

1351 self, 

-

1352 metadata_type: Type[PMT], 

-

1353 ) -> PathMetadataReference[PMT]: 

-

1354 """Fetch the path metadata reference to access the underlying metadata 

-

1355 

-

1356 Calling this method returns a reference to an arbitrary piece of metadata associated 

-

1357 with this path. Plugins can store any arbitrary data associated with a given path. 

-

1358 Keep in mind that the metadata is stored in memory, so keep the size in moderation. 

-

1359 

-

1360 To store / update the metadata, the path must be in read-write mode. However, 

-

1361 already stored metadata remains accessible even if the path becomes read-only. 

-

1362 

-

1363 Note this method is not applicable if the path is detached 

-

1364 

-

1365 :param metadata_type: Type of the metadata being stored. 

-

1366 :return: A reference to the metadata. 

-

1367 """ 

-

1368 raise NotImplementedError 

-

1369 

-

1370 

-

1371class FlushableSubstvars(Substvars): 

-

1372 __slots__ = () 

-

1373 

-

1374 @contextlib.contextmanager 

-

1375 def flush(self) -> Iterator[str]: 

-

1376 """Temporarily write the substvars to a file and then re-read it again 

-

1377 

-

1378 >>> s = FlushableSubstvars() 

-

1379 >>> 'Test:Var' in s 

-

1380 False 

-

1381 >>> with s.flush() as name, open(name, 'wt', encoding='utf-8') as fobj: 

-

1382 ... _ = fobj.write('Test:Var=bar\\n') # "_ = " is to ignore the return value of write 

-

1383 >>> 'Test:Var' in s 

-

1384 True 

-

1385 

-

1386 Used as a context manager to define when the file is flushed and can be 

-

1387 accessed via the file system. If the context terminates successfully, the 

-

1388 file is read and its content replaces the current substvars. 

-

1389 

-

1390 This is mostly useful if the plugin needs to interface with a third-party 

-

1391 tool that requires a file as interprocess communication (IPC) for sharing 

-

1392 the substvars. 

-

1393 

-

1394 The file may be truncated or completed replaced (change inode) as long as 

-

1395 the provided path points to a regular file when the context manager 

-

1396 terminates successfully. 

-

1397 

-

1398 Note that any manipulation of the substvars via the `Substvars` API while 

-

1399 the file is flushed will silently be discarded if the context manager completes 

-

1400 successfully. 

-

1401 """ 

-

1402 with tempfile.NamedTemporaryFile(mode="w+t", encoding="utf-8") as tmp: 

-

1403 self.write_substvars(tmp) 

-

1404 tmp.flush() # Temping to use close, but then we have to manually delete the file. 

-

1405 yield tmp.name 

-

1406 # Re-open; seek did not work when I last tried (if I did it work, feel free to 

-

1407 # convert back to seek - as long as it works!) 

-

1408 with open(tmp.name, "rt", encoding="utf-8") as fd: 

-

1409 self.read_substvars(fd) 

-

1410 

-

1411 def save(self) -> None: 

-

1412 # Promote the debputy extension over `save()` for the plugins. 

-

1413 if self._substvars_path is None: 

-

1414 raise TypeError( 

-

1415 "Please use `flush()` extension to temporarily write the substvars to the file system" 

-

1416 ) 

-

1417 super().save() 

-

1418 

-

1419 

-

1420class ServiceRegistry(Generic[DSD]): 

-

1421 __slots__ = () 

-

1422 

-

1423 def register_service( 

-

1424 self, 

-

1425 path: VirtualPath, 

-

1426 name: Union[str, List[str]], 

-

1427 *, 

-

1428 type_of_service: str = "service", # "timer", etc. 

-

1429 service_scope: str = "system", 

-

1430 enable_by_default: bool = True, 

-

1431 start_by_default: bool = True, 

-

1432 default_upgrade_rule: ServiceUpgradeRule = "restart", 

-

1433 service_context: Optional[DSD] = None, 

-

1434 ) -> None: 

-

1435 """Register a service detected in the package 

-

1436 

-

1437 All the details will either be provided as-is or used as default when the plugin provided 

-

1438 integration code is called. 

-

1439 

-

1440 Two services from different service managers are considered related when: 

-

1441 

-

1442 1) They are of the same type (`type_of_service`) and has the same scope (`service_scope`), AND 

-

1443 2) Their plugin provided names has an overlap 

-

1444 

-

1445 Related services can be covered by the same service definition in the manifest. 

-

1446 

-

1447 :param path: The path defining this service. 

-

1448 :param name: The name of the service. Multiple ones can be provided if the service has aliases. 

-

1449 Note that when providing multiple names, `debputy` will use the first name in the list as the 

-

1450 default name if it has to choose. Any alternative name provided can be used by the packager 

-

1451 to identify this service. 

-

1452 :param type_of_service: The type of service. By default, this is "service", but plugins can 

-

1453 provide other types (such as "timer" for the systemd timer unit). 

-

1454 :param service_scope: The scope for this service. By default, this is "system" meaning the 

-

1455 service is a system-wide service. Service managers can define their own scopes such as 

-

1456 "user" (which is used by systemd for "per-user" services). 

-

1457 :param enable_by_default: Whether the service should be enabled by default, assuming the 

-

1458 packager does not explicitly override this setting. 

-

1459 :param start_by_default: Whether the service should be started by default on install, assuming 

-

1460 the packager does not explicitly override this setting. 

-

1461 :param default_upgrade_rule: The default value for how the service should be processed during 

-

1462 upgrades. Options are: 

-

1463 * `do-nothing`: The plugin should not interact with the running service (if any) 

-

1464 (maintenance of the enabled start, start on install, etc. are still applicable) 

-

1465 * `reload`: The plugin should attempt to reload the running service (if any). 

-

1466 Note: In combination with `auto_start_in_install == False`, be careful to not 

-

1467 start the service if not is not already running. 

-

1468 * `restart`: The plugin should attempt to restart the running service (if any). 

-

1469 Note: In combination with `auto_start_in_install == False`, be careful to not 

-

1470 start the service if not is not already running. 

-

1471 * `stop-then-start`: The plugin should stop the service during `prerm upgrade` 

-

1472 and start it against in the `postinst` script. 

-

1473 

-

1474 :param service_context: Any custom data that the detector want to pass along to the 

-

1475 integrator for this service. 

-

1476 """ 

-

1477 raise NotImplementedError 

-

1478 

-

1479 

-

1480@dataclasses.dataclass(slots=True, frozen=True) 

-

1481class ParserAttributeDocumentation: 

-

1482 attributes: FrozenSet[str] 

-

1483 description: Optional[str] 

-

1484 

-

1485 

-

1486def undocumented_attr(attr: str) -> ParserAttributeDocumentation: 

-

1487 """Describe an attribute as undocumented 

-

1488 

-

1489 If you for some reason do not want to document a particular attribute, you can mark it as 

-

1490 undocumented. This is required if you are only documenting a subset of the attributes, 

-

1491 because `debputy` assumes any omission to be a mistake. 

-

1492 """ 

-

1493 return ParserAttributeDocumentation( 

-

1494 frozenset({attr}), 

-

1495 None, 

-

1496 ) 

-

1497 

-

1498 

-

1499@dataclasses.dataclass(slots=True, frozen=True) 

-

1500class ParserDocumentation: 

-

1501 title: Optional[str] = None 

-

1502 description: Optional[str] = None 

-

1503 attribute_doc: Optional[Sequence[ParserAttributeDocumentation]] = None 

-

1504 alt_parser_description: Optional[str] = None 

-

1505 documentation_reference_url: Optional[str] = None 

-

1506 

-

1507 def replace(self, **changes: Any) -> "ParserDocumentation": 

-

1508 return dataclasses.replace(self, **changes) 

-

1509 

-

1510 

-

1511@dataclasses.dataclass(slots=True, frozen=True) 

-

1512class TypeMappingExample(Generic[S]): 

-

1513 source_input: S 

-

1514 

-

1515 

-

1516@dataclasses.dataclass(slots=True, frozen=True) 

-

1517class TypeMappingDocumentation(Generic[S]): 

-

1518 description: Optional[str] = None 

-

1519 examples: Sequence[TypeMappingExample[S]] = tuple() 

-

1520 

-

1521 

-

1522def type_mapping_example(source_input: S) -> TypeMappingExample[S]: 

-

1523 return TypeMappingExample(source_input) 

-

1524 

-

1525 

-

1526def type_mapping_reference_documentation( 

-

1527 *, 

-

1528 description: Optional[str] = None, 

-

1529 examples: Union[TypeMappingExample[S], Iterable[TypeMappingExample[S]]] = tuple(), 

-

1530) -> TypeMappingDocumentation[S]: 

-

1531 e = ( 

-

1532 tuple([examples]) 

-

1533 if isinstance(examples, TypeMappingExample) 

-

1534 else tuple(examples) 

-

1535 ) 

-

1536 return TypeMappingDocumentation( 

-

1537 description=description, 

-

1538 examples=e, 

-

1539 ) 

-

1540 

-

1541 

-

1542def documented_attr( 

-

1543 attr: Union[str, Iterable[str]], 

-

1544 description: str, 

-

1545) -> ParserAttributeDocumentation: 

-

1546 """Describe an attribute or a group of attributes 

-

1547 

-

1548 :param attr: A single attribute or a sequence of attributes. The attribute must be the 

-

1549 attribute name as used in the source format version of the TypedDict. 

-

1550 

-

1551 If multiple attributes are provided, they will be documented together. This is often 

-

1552 useful if these attributes are strongly related (such as different names for the same 

-

1553 target attribute). 

-

1554 :param description: The description the user should see for this attribute / these 

-

1555 attributes. This parameter can be a Python format string with variables listed in 

-

1556 the description of `reference_documentation`. 

-

1557 :return: An opaque representation of the documentation, 

-

1558 """ 

-

1559 attributes = [attr] if isinstance(attr, str) else attr 

-

1560 return ParserAttributeDocumentation( 

-

1561 frozenset(attributes), 

-

1562 description, 

-

1563 ) 

-

1564 

-

1565 

-

1566def reference_documentation( 

-

1567 title: str = "Auto-generated reference documentation for {RULE_NAME}", 

-

1568 description: Optional[str] = textwrap.dedent( 

-

1569 """\ 

-

1570 This is an automatically generated reference documentation for {RULE_NAME}. It is generated 

-

1571 from input provided by {PLUGIN_NAME} via the debputy API. 

-

1572 

-

1573 (If you are the provider of the {PLUGIN_NAME} plugin, you can replace this text with 

-

1574 your own documentation by providing the `inline_reference_documentation` when registering 

-

1575 the manifest rule.) 

-

1576 """ 

-

1577 ), 

-

1578 attributes: Optional[Sequence[ParserAttributeDocumentation]] = None, 

-

1579 non_mapping_description: Optional[str] = None, 

-

1580 reference_documentation_url: Optional[str] = None, 

-

1581) -> ParserDocumentation: 

-

1582 """Provide inline reference documentation for the manifest snippet 

-

1583 

-

1584 For parameters that mention that they are a Python format, the following format variables 

-

1585 are available: 

-

1586 

-

1587 * RULE_NAME: Name of the rule. If manifest snippet has aliases, this will be the name of 

-

1588 the alias provided by the user. 

-

1589 * MANIFEST_FORMAT_DOC: Path OR URL to the "MANIFEST-FORMAT" reference documentation from 

-

1590 `debputy`. By using the MANIFEST_FORMAT_DOC variable, you ensure that you point to the 

-

1591 file that matches the version of `debputy` itself. 

-

1592 * PLUGIN_NAME: Name of the plugin providing this rule. 

-

1593 

-

1594 :param title: The text you want the user to see as for your rule. A placeholder is provided by default. 

-

1595 This parameter can be a Python format string with the above listed variables. 

-

1596 :param description: The text you want the user to see as a description for the rule. An auto-generated 

-

1597 placeholder is provided by default saying that no human written documentation was provided. 

-

1598 This parameter can be a Python format string with the above listed variables. 

-

1599 :param attributes: A sequence of attribute-related documentation. Each element of the sequence should 

-

1600 be the result of `documented_attr` or `undocumented_attr`. The sequence must cover all source 

-

1601 attributes exactly once. 

-

1602 :param non_mapping_description: The text you want the user to see as the description for your rule when 

-

1603 `debputy` describes its non-mapping format. Must not be provided for rules that do not have an 

-

1604 (optional) non-mapping format as source format. This parameter can be a Python format string with 

-

1605 the above listed variables. 

-

1606 :param reference_documentation_url: A URL to the reference documentation. 

-

1607 :return: An opaque representation of the documentation, 

-

1608 """ 

-

1609 return ParserDocumentation( 

-

1610 title, 

-

1611 description, 

-

1612 attributes, 

-

1613 non_mapping_description, 

-

1614 reference_documentation_url, 

-

1615 ) 

-

1616 

-

1617 

-

1618class ServiceDefinition(Generic[DSD]): 

-

1619 __slots__ = () 

-

1620 

-

1621 @property 

-

1622 def name(self) -> str: 

-

1623 """Name of the service registered by the plugin 

-

1624 

-

1625 This is always a plugin provided name for this service (that is, `x.name in x.names` 

-

1626 will always be `True`). Where possible, this will be the same as the one that the 

-

1627 packager provided when they provided any configuration related to this service. 

-

1628 When not possible, this will be the first name provided by the plugin (`x.names[0]`). 

-

1629 

-

1630 If all the aliases are equal, then using this attribute will provide traceability 

-

1631 between the manifest and the generated maintscript snippets. When the exact name 

-

1632 used is important, the plugin should ignore this attribute and pick the name that 

-

1633 is needed. 

-

1634 """ 

-

1635 raise NotImplementedError 

-

1636 

-

1637 @property 

-

1638 def names(self) -> Sequence[str]: 

-

1639 """All *plugin provided* names and aliases of the service 

-

1640 

-

1641 This is the name/sequence of names that the plugin provided when it registered 

-

1642 the service earlier. 

-

1643 """ 

-

1644 raise NotImplementedError 

-

1645 

-

1646 @property 

-

1647 def path(self) -> VirtualPath: 

-

1648 """The registered path for this service 

-

1649 

-

1650 :return: The path that was associated with this service when it was registered 

-

1651 earlier. 

-

1652 """ 

-

1653 raise NotImplementedError 

-

1654 

-

1655 @property 

-

1656 def type_of_service(self) -> str: 

-

1657 """Type of the service such as "service" (daemon), "timer", etc. 

-

1658 

-

1659 :return: The type of service scope. It is the same value as the one as the plugin provided 

-

1660 when registering the service (if not explicitly provided, it defaults to "service"). 

-

1661 """ 

-

1662 raise NotImplementedError 

-

1663 

-

1664 @property 

-

1665 def service_scope(self) -> str: 

-

1666 """Service scope such as "system" or "user" 

-

1667 

-

1668 :return: The service scope. It is the same value as the one as the plugin provided 

-

1669 when registering the service (if not explicitly provided, it defaults to "system") 

-

1670 """ 

-

1671 raise NotImplementedError 

-

1672 

-

1673 @property 

-

1674 def auto_enable_on_install(self) -> bool: 

-

1675 """Whether the service should be auto-enabled on install 

-

1676 

-

1677 :return: True if the service should be enabled automatically, false if not. 

-

1678 """ 

-

1679 raise NotImplementedError 

-

1680 

-

1681 @property 

-

1682 def auto_start_on_install(self) -> bool: 

-

1683 """Whether the service should be auto-started on install 

-

1684 

-

1685 :return: True if the service should be started automatically, false if not. 

-

1686 """ 

-

1687 raise NotImplementedError 

-

1688 

-

1689 @property 

-

1690 def on_upgrade(self) -> ServiceUpgradeRule: 

-

1691 """How to handle the service during an upgrade 

-

1692 

-

1693 Options are: 

-

1694 * `do-nothing`: The plugin should not interact with the running service (if any) 

-

1695 (maintenance of the enabled start, start on install, etc. are still applicable) 

-

1696 * `reload`: The plugin should attempt to reload the running service (if any). 

-

1697 Note: In combination with `auto_start_in_install == False`, be careful to not 

-

1698 start the service if not is not already running. 

-

1699 * `restart`: The plugin should attempt to restart the running service (if any). 

-

1700 Note: In combination with `auto_start_in_install == False`, be careful to not 

-

1701 start the service if not is not already running. 

-

1702 * `stop-then-start`: The plugin should stop the service during `prerm upgrade` 

-

1703 and start it against in the `postinst` script. 

-

1704 

-

1705 Note: In all cases, the plugin should still consider what to do in 

-

1706 `prerm remove`, which is the last point in time where the plugin can rely on the 

-

1707 service definitions in the file systems to stop the services when the package is 

-

1708 being uninstalled. 

-

1709 

-

1710 :return: The service restart rule 

-

1711 """ 

-

1712 raise NotImplementedError 

-

1713 

-

1714 @property 

-

1715 def definition_source(self) -> str: 

-

1716 """Describes where this definition came from 

-

1717 

-

1718 If the definition is provided by the packager, then this will reference the part 

-

1719 of the manifest that made this definition. Otherwise, this will be a reference 

-

1720 to the plugin providing this definition. 

-

1721 

-

1722 :return: The source of this definition 

-

1723 """ 

-

1724 raise NotImplementedError 

-

1725 

-

1726 @property 

-

1727 def is_plugin_provided_definition(self) -> bool: 

-

1728 """Whether the definition source points to the plugin or a package provided definition 

-

1729 

-

1730 :return: True if definition is 100% from the plugin. False if the definition is partially 

-

1731 or fully from another source (usually, the packager via the manifest). 

-

1732 """ 

-

1733 raise NotImplementedError 

-

1734 

-

1735 @property 

-

1736 def service_context(self) -> Optional[DSD]: 

-

1737 """Custom service context (if any) provided by the detector code of the plugin 

-

1738 

-

1739 :return: If the detection code provided a custom data when registering the 

-

1740 service, this attribute will reference that data. If nothing was provided, 

-

1741 then this attribute will be None. 

-

1742 """ 

-

1743 raise NotImplementedError 

-
- - - diff --git a/coverage-report/d_6c155ce9dd9f7742___init___py.html b/coverage-report/d_6c155ce9dd9f7742___init___py.html deleted file mode 100644 index 2481545..0000000 --- a/coverage-report/d_6c155ce9dd9f7742___init___py.html +++ /dev/null @@ -1,108 +0,0 @@ - - - - - Coverage for src/debputy/yaml/__init__.py: 100% - - - - - -
-
-

- Coverage for src/debputy/yaml/__init__.py: - 100% -

- -

- 3 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from .compat import YAML, YAMLError, MarkedYAMLError 

-

2 

-

3MANIFEST_YAML = YAML() 

-

4 

-

5__all__ = [ 

-

6 "MANIFEST_YAML", 

-

7 "YAMLError", 

-

8 "MarkedYAMLError", 

-

9] 

-
- - - diff --git a/coverage-report/d_6c155ce9dd9f7742_compat_py.html b/coverage-report/d_6c155ce9dd9f7742_compat_py.html deleted file mode 100644 index 06aa381..0000000 --- a/coverage-report/d_6c155ce9dd9f7742_compat_py.html +++ /dev/null @@ -1,118 +0,0 @@ - - - - - Coverage for src/debputy/yaml/compat.py: 56% - - - - - -
-
-

- Coverage for src/debputy/yaml/compat.py: - 56% -

- -

- 9 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1__all__ = [ 

-

2 "YAML", 

-

3 "YAMLError", 

-

4 "MarkedYAMLError", 

-

5 "Node", 

-

6 "LineCol", 

-

7 "CommentedBase", 

-

8 "CommentedMap", 

-

9 "CommentedSeq", 

-

10] 

-

11 

-

12try: 

-

13 from ruyaml import YAMLError, YAML, Node 

-

14 from ruyaml.comments import LineCol, CommentedBase, CommentedMap, CommentedSeq 

-

15 from ruyaml.error import MarkedYAMLError 

-

16except (ImportError, ModuleNotFoundError): 

-

17 from ruamel.yaml import YAMLError, YAML, Node 

-

18 from ruamel.yaml.comments import LineCol, CommentedBase, CommentedMap, CommentedSeq 

-

19 from ruamel.yaml.error import MarkedYAMLError 

-
- - - diff --git a/coverage-report/d_6e57078c9ef7177d___init___py.html b/coverage-report/d_6e57078c9ef7177d___init___py.html deleted file mode 100644 index 1b88a9d..0000000 --- a/coverage-report/d_6e57078c9ef7177d___init___py.html +++ /dev/null @@ -1,99 +0,0 @@ - - - - - Coverage for src/debputy/commands/debputy_cmd/__init__.py: 100% - - - - - -
-
-

- Coverage for src/debputy/commands/debputy_cmd/__init__.py: - 100% -

- -

- 0 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-
- - - diff --git a/coverage-report/d_6e57078c9ef7177d_context_py.html b/coverage-report/d_6e57078c9ef7177d_context_py.html deleted file mode 100644 index bd5b3ce..0000000 --- a/coverage-report/d_6e57078c9ef7177d_context_py.html +++ /dev/null @@ -1,715 +0,0 @@ - - - - - Coverage for src/debputy/commands/debputy_cmd/context.py: 42% - - - - - -
-
-

- Coverage for src/debputy/commands/debputy_cmd/context.py: - 42% -

- -

- 283 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import argparse 

-

2import dataclasses 

-

3import errno 

-

4import os 

-

5from typing import ( 

-

6 Optional, 

-

7 Tuple, 

-

8 Mapping, 

-

9 FrozenSet, 

-

10 Set, 

-

11 Union, 

-

12 Sequence, 

-

13 Iterable, 

-

14 Callable, 

-

15 Dict, 

-

16 TYPE_CHECKING, 

-

17) 

-

18 

-

19from debian.debian_support import DpkgArchTable 

-

20 

-

21from debputy._deb_options_profiles import DebBuildOptionsAndProfiles 

-

22from debputy.architecture_support import ( 

-

23 DpkgArchitectureBuildProcessValuesTable, 

-

24 dpkg_architecture_table, 

-

25) 

-

26from debputy.exceptions import DebputyRuntimeError 

-

27from debputy.filesystem_scan import FSROOverlay 

-

28from debputy.highlevel_manifest import HighLevelManifest 

-

29from debputy.highlevel_manifest_parser import YAMLManifestParser 

-

30from debputy.packages import SourcePackage, BinaryPackage, parse_source_debian_control 

-

31from debputy.plugin.api import VirtualPath 

-

32from debputy.plugin.api.impl import load_plugin_features 

-

33from debputy.plugin.api.feature_set import PluginProvidedFeatureSet 

-

34from debputy.substitution import ( 

-

35 Substitution, 

-

36 VariableContext, 

-

37 SubstitutionImpl, 

-

38 NULL_SUBSTITUTION, 

-

39) 

-

40from debputy.util import _error, PKGNAME_REGEX, resolve_source_date_epoch, setup_logging 

-

41 

-

42if TYPE_CHECKING: 

-

43 from argparse import _SubParsersAction 

-

44 

-

45 

-

46CommandHandler = Callable[["CommandContext"], None] 

-

47ArgparserConfigurator = Callable[[argparse.ArgumentParser], None] 

-

48 

-

49 

-

50def add_arg( 

-

51 *name_or_flags: str, 

-

52 **kwargs, 

-

53) -> Callable[[argparse.ArgumentParser], None]: 

-

54 def _configurator(argparser: argparse.ArgumentParser) -> None: 

-

55 argparser.add_argument( 

-

56 *name_or_flags, 

-

57 **kwargs, 

-

58 ) 

-

59 

-

60 return _configurator 

-

61 

-

62 

-

63@dataclasses.dataclass(slots=True, frozen=True) 

-

64class CommandArg: 

-

65 parsed_args: argparse.Namespace 

-

66 plugin_search_dirs: Sequence[str] 

-

67 

-

68 

-

69@dataclasses.dataclass 

-

70class Command: 

-

71 handler: Callable[["CommandContext"], None] 

-

72 require_substitution: bool = True 

-

73 requested_plugins_only: bool = False 

-

74 

-

75 

-

76class CommandContext: 

-

77 def __init__( 

-

78 self, 

-

79 parsed_args: argparse.Namespace, 

-

80 plugin_search_dirs: Sequence[str], 

-

81 require_substitution: bool = True, 

-

82 requested_plugins_only: bool = False, 

-

83 ) -> None: 

-

84 self.parsed_args = parsed_args 

-

85 self.plugin_search_dirs = plugin_search_dirs 

-

86 self._require_substitution = require_substitution 

-

87 self._requested_plugins_only = requested_plugins_only 

-

88 self._debputy_plugin_feature_set: PluginProvidedFeatureSet = ( 

-

89 PluginProvidedFeatureSet() 

-

90 ) 

-

91 self._debian_dir = FSROOverlay.create_root_dir("debian", "debian") 

-

92 self._mtime: Optional[int] = None 

-

93 self._source_variables: Optional[Mapping[str, str]] = None 

-

94 self._substitution: Optional[Substitution] = None 

-

95 self._requested_plugins: Optional[Sequence[str]] = None 

-

96 self._plugins_loaded = False 

-

97 self._dctrl_data: Optional[ 

-

98 Tuple[ 

-

99 DpkgArchitectureBuildProcessValuesTable, 

-

100 DpkgArchTable, 

-

101 DebBuildOptionsAndProfiles, 

-

102 "SourcePackage", 

-

103 Mapping[str, "BinaryPackage"], 

-

104 ] 

-

105 ] = None 

-

106 

-

107 @property 

-

108 def debian_dir(self) -> VirtualPath: 

-

109 return self._debian_dir 

-

110 

-

111 @property 

-

112 def mtime(self) -> int: 

-

113 if self._mtime is None: 

-

114 self._mtime = resolve_source_date_epoch( 

-

115 None, 

-

116 substitution=self.substitution, 

-

117 ) 

-

118 return self._mtime 

-

119 

-

120 def source_package(self) -> SourcePackage: 

-

121 _a, _b, _c, source, _d = self._parse_dctrl() 

-

122 return source 

-

123 

-

124 def binary_packages(self) -> Mapping[str, "BinaryPackage"]: 

-

125 _a, _b, _c, _source, binary_package_table = self._parse_dctrl() 

-

126 return binary_package_table 

-

127 

-

128 def requested_plugins(self) -> Sequence[str]: 

-

129 if self._requested_plugins is None: 

-

130 self._requested_plugins = self._resolve_requested_plugins() 

-

131 return self._requested_plugins 

-

132 

-

133 def required_plugins(self) -> Set[str]: 

-

134 return set(getattr(self.parsed_args, "required_plugins") or []) 

-

135 

-

136 @property 

-

137 def deb_build_options_and_profiles(self) -> "DebBuildOptionsAndProfiles": 

-

138 _a, _b, deb_build_options_and_profiles, _c, _d = self._parse_dctrl() 

-

139 return deb_build_options_and_profiles 

-

140 

-

141 @property 

-

142 def deb_build_options(self) -> Mapping[str, Optional[str]]: 

-

143 return self.deb_build_options_and_profiles.deb_build_options 

-

144 

-

145 def _create_substitution( 

-

146 self, 

-

147 parsed_args: argparse.Namespace, 

-

148 plugin_feature_set: PluginProvidedFeatureSet, 

-

149 debian_dir: VirtualPath, 

-

150 ) -> Substitution: 

-

151 requested_subst = self._require_substitution 

-

152 if hasattr(parsed_args, "substitution"): 

-

153 requested_subst = parsed_args.substitution 

-

154 if requested_subst is False and self._require_substitution: 

-

155 _error(f"--no-substitution cannot be used with {parsed_args.command}") 

-

156 if self._require_substitution or requested_subst is not False: 

-

157 variable_context = VariableContext(debian_dir) 

-

158 return SubstitutionImpl( 

-

159 plugin_feature_set=plugin_feature_set, 

-

160 unresolvable_substitutions=frozenset(["PACKAGE"]), 

-

161 variable_context=variable_context, 

-

162 ) 

-

163 return NULL_SUBSTITUTION 

-

164 

-

165 def load_plugins(self) -> PluginProvidedFeatureSet: 

-

166 if not self._plugins_loaded: 

-

167 requested_plugins = None 

-

168 required_plugins = self.required_plugins() 

-

169 if self._requested_plugins_only: 

-

170 requested_plugins = self.requested_plugins() 

-

171 debug_mode = getattr(self.parsed_args, "debug_mode", False) 

-

172 load_plugin_features( 

-

173 self.plugin_search_dirs, 

-

174 self.substitution, 

-

175 requested_plugins_only=requested_plugins, 

-

176 required_plugins=required_plugins, 

-

177 plugin_feature_set=self._debputy_plugin_feature_set, 

-

178 debug_mode=debug_mode, 

-

179 ) 

-

180 self._plugins_loaded = True 

-

181 return self._debputy_plugin_feature_set 

-

182 

-

183 @staticmethod 

-

184 def _plugin_from_dependency_field(dep_field: str) -> Iterable[str]: 

-

185 package_prefix = "debputy-plugin-" 

-

186 for dep_clause in (d.strip() for d in dep_field.split(",")): 

-

187 dep = dep_clause.split("|")[0].strip() 

-

188 if not dep.startswith(package_prefix): 

-

189 continue 

-

190 m = PKGNAME_REGEX.search(dep) 

-

191 assert m 

-

192 package_name = m.group(0) 

-

193 plugin_name = package_name[len(package_prefix) :] 

-

194 yield plugin_name 

-

195 

-

196 def _resolve_requested_plugins(self) -> Sequence[str]: 

-

197 _a, _b, _c, source_package, _d = self._parse_dctrl() 

-

198 bd = source_package.fields.get("Build-Depends", "") 

-

199 plugins = list(self._plugin_from_dependency_field(bd)) 

-

200 for field_name in ("Build-Depends-Arch", "Build-Depends-Indep"): 

-

201 f = source_package.fields.get(field_name) 

-

202 if not f: 

-

203 continue 

-

204 for plugin in self._plugin_from_dependency_field(f): 

-

205 raise DebputyRuntimeError( 

-

206 f"Cannot load plugins via {field_name}:" 

-

207 f" Please move debputy-plugin-{plugin} dependency to Build-Depends." 

-

208 ) 

-

209 

-

210 return plugins 

-

211 

-

212 @property 

-

213 def substitution(self) -> Substitution: 

-

214 if self._substitution is None: 

-

215 self._substitution = self._create_substitution( 

-

216 self.parsed_args, 

-

217 self._debputy_plugin_feature_set, 

-

218 self.debian_dir, 

-

219 ) 

-

220 return self._substitution 

-

221 

-

222 def must_be_called_in_source_root(self) -> None: 

-

223 if self.debian_dir.get("control") is None: 

-

224 _error( 

-

225 "This subcommand must be run from a source package root; expecting debian/control to exist." 

-

226 ) 

-

227 

-

228 def _parse_dctrl( 

-

229 self, 

-

230 ) -> Tuple[ 

-

231 DpkgArchitectureBuildProcessValuesTable, 

-

232 DpkgArchTable, 

-

233 DebBuildOptionsAndProfiles, 

-

234 "SourcePackage", 

-

235 Mapping[str, "BinaryPackage"], 

-

236 ]: 

-

237 if self._dctrl_data is None: 

-

238 build_env = DebBuildOptionsAndProfiles.instance() 

-

239 dpkg_architecture_variables = dpkg_architecture_table() 

-

240 dpkg_arch_query_table = DpkgArchTable.load_arch_table() 

-

241 

-

242 packages: Union[Set[str], FrozenSet[str]] = frozenset() 

-

243 if hasattr(self.parsed_args, "packages"): 

-

244 packages = self.parsed_args.packages 

-

245 

-

246 try: 

-

247 debian_control = self.debian_dir.get("control") 

-

248 if debian_control is None: 

-

249 raise FileNotFoundError( 

-

250 errno.ENOENT, 

-

251 os.strerror(errno.ENOENT), 

-

252 os.path.join(self.debian_dir.fs_path, "control"), 

-

253 ) 

-

254 source_package, binary_packages = parse_source_debian_control( 

-

255 debian_control, 

-

256 packages, # -p/--package 

-

257 set(), # -N/--no-package 

-

258 False, # -i 

-

259 False, # -a 

-

260 dpkg_architecture_variables=dpkg_architecture_variables, 

-

261 dpkg_arch_query_table=dpkg_arch_query_table, 

-

262 build_env=build_env, 

-

263 ) 

-

264 assert packages <= binary_packages.keys() 

-

265 except FileNotFoundError: 

-

266 # We are not using `must_be_called_in_source_root`, because we (in this case) require 

-

267 # the file to be readable (that is, parse_source_debian_control can also raise a 

-

268 # FileNotFoundError when trying to open the file). 

-

269 _error( 

-

270 "This subcommand must be run from a source package root; expecting debian/control to exist." 

-

271 ) 

-

272 

-

273 self._dctrl_data = ( 

-

274 dpkg_architecture_variables, 

-

275 dpkg_arch_query_table, 

-

276 build_env, 

-

277 source_package, 

-

278 binary_packages, 

-

279 ) 

-

280 

-

281 return self._dctrl_data 

-

282 

-

283 @property 

-

284 def has_dctrl_file(self) -> bool: 

-

285 debian_control = self.debian_dir.get("control") 

-

286 return debian_control is not None 

-

287 

-

288 def manifest_parser( 

-

289 self, 

-

290 *, 

-

291 manifest_path: Optional[str] = None, 

-

292 ) -> YAMLManifestParser: 

-

293 substitution = self.substitution 

-

294 

-

295 ( 

-

296 dpkg_architecture_variables, 

-

297 dpkg_arch_query_table, 

-

298 build_env, 

-

299 source_package, 

-

300 binary_packages, 

-

301 ) = self._parse_dctrl() 

-

302 

-

303 if self.parsed_args.debputy_manifest is not None: 

-

304 manifest_path = self.parsed_args.debputy_manifest 

-

305 if manifest_path is None: 

-

306 manifest_path = os.path.join(self.debian_dir.fs_path, "debputy.manifest") 

-

307 return YAMLManifestParser( 

-

308 manifest_path, 

-

309 source_package, 

-

310 binary_packages, 

-

311 substitution, 

-

312 dpkg_architecture_variables, 

-

313 dpkg_arch_query_table, 

-

314 build_env, 

-

315 self.load_plugins(), 

-

316 debian_dir=self.debian_dir, 

-

317 ) 

-

318 

-

319 def parse_manifest( 

-

320 self, 

-

321 *, 

-

322 manifest_path: Optional[str] = None, 

-

323 ) -> HighLevelManifest: 

-

324 substitution = self.substitution 

-

325 manifest_required = False 

-

326 

-

327 ( 

-

328 dpkg_architecture_variables, 

-

329 dpkg_arch_query_table, 

-

330 build_env, 

-

331 _, 

-

332 binary_packages, 

-

333 ) = self._parse_dctrl() 

-

334 

-

335 if self.parsed_args.debputy_manifest is not None: 

-

336 manifest_path = self.parsed_args.debputy_manifest 

-

337 manifest_required = True 

-

338 if manifest_path is None: 

-

339 manifest_path = os.path.join(self.debian_dir.fs_path, "debputy.manifest") 

-

340 parser = self.manifest_parser(manifest_path=manifest_path) 

-

341 

-

342 os.environ["SOURCE_DATE_EPOCH"] = substitution.substitute( 

-

343 "{{SOURCE_DATE_EPOCH}}", 

-

344 "Internal resolution", 

-

345 ) 

-

346 if os.path.isfile(manifest_path): 

-

347 return parser.parse_manifest() 

-

348 if manifest_required: 

-

349 _error(f'The path "{manifest_path}" is not a file!') 

-

350 return parser.build_manifest() 

-

351 

-

352 

-

353class CommandBase: 

-

354 __slots__ = () 

-

355 

-

356 def configure(self, argparser: argparse.ArgumentParser) -> None: 

-

357 # Does nothing by default 

-

358 pass 

-

359 

-

360 def __call__(self, command_arg: CommandArg) -> None: 

-

361 raise NotImplementedError 

-

362 

-

363 

-

364class SubcommandBase(CommandBase): 

-

365 __slots__ = ("name", "aliases", "help_description") 

-

366 

-

367 def __init__( 

-

368 self, 

-

369 name: str, 

-

370 *, 

-

371 aliases: Sequence[str] = tuple(), 

-

372 help_description: Optional[str] = None, 

-

373 ) -> None: 

-

374 self.name = name 

-

375 self.aliases = aliases 

-

376 self.help_description = help_description 

-

377 

-

378 def add_subcommand_to_subparser( 

-

379 self, 

-

380 subparser: "_SubParsersAction", 

-

381 ) -> argparse.ArgumentParser: 

-

382 parser = subparser.add_parser( 

-

383 self.name, 

-

384 aliases=self.aliases, 

-

385 help=self.help_description, 

-

386 allow_abbrev=False, 

-

387 ) 

-

388 self.configure(parser) 

-

389 return parser 

-

390 

-

391 

-

392class GenericSubCommand(SubcommandBase): 

-

393 __slots__ = ( 

-

394 "_handler", 

-

395 "_configure_handler", 

-

396 "_require_substitution", 

-

397 "_requested_plugins_only", 

-

398 "_log_only_to_stderr", 

-

399 ) 

-

400 

-

401 def __init__( 

-

402 self, 

-

403 name: str, 

-

404 handler: Callable[[CommandContext], None], 

-

405 *, 

-

406 aliases: Sequence[str] = tuple(), 

-

407 help_description: Optional[str] = None, 

-

408 configure_handler: Optional[Callable[[argparse.ArgumentParser], None]] = None, 

-

409 require_substitution: bool = True, 

-

410 requested_plugins_only: bool = False, 

-

411 log_only_to_stderr: bool = False, 

-

412 ) -> None: 

-

413 super().__init__(name, aliases=aliases, help_description=help_description) 

-

414 self._handler = handler 

-

415 self._configure_handler = configure_handler 

-

416 self._require_substitution = require_substitution 

-

417 self._requested_plugins_only = requested_plugins_only 

-

418 self._log_only_to_stderr = log_only_to_stderr 

-

419 

-

420 def configure_handler( 

-

421 self, 

-

422 handler: Callable[[argparse.ArgumentParser], None], 

-

423 ) -> None: 

-

424 if self._configure_handler is not None: 424 ↛ 425line 424 didn't jump to line 425, because the condition on line 424 was never true

-

425 raise TypeError("Only one argument handler can be provided") 

-

426 self._configure_handler = handler 

-

427 

-

428 def configure(self, argparser: argparse.ArgumentParser) -> None: 

-

429 handler = self._configure_handler 

-

430 if handler is not None: 

-

431 handler(argparser) 

-

432 

-

433 def __call__(self, command_arg: CommandArg) -> None: 

-

434 context = CommandContext( 

-

435 command_arg.parsed_args, 

-

436 command_arg.plugin_search_dirs, 

-

437 self._require_substitution, 

-

438 self._requested_plugins_only, 

-

439 ) 

-

440 if self._log_only_to_stderr: 

-

441 setup_logging(reconfigure_logging=True, log_only_to_stderr=True) 

-

442 return self._handler(context) 

-

443 

-

444 

-

445class DispatchingCommandMixin(CommandBase): 

-

446 __slots__ = () 

-

447 

-

448 def add_subcommand(self, subcommand: SubcommandBase) -> None: 

-

449 raise NotImplementedError 

-

450 

-

451 def add_dispatching_subcommand( 

-

452 self, 

-

453 name: str, 

-

454 dest: str, 

-

455 *, 

-

456 aliases: Sequence[str] = tuple(), 

-

457 help_description: Optional[str] = None, 

-

458 metavar: str = "command", 

-

459 default_subcommand: Optional[str] = None, 

-

460 ) -> "DispatcherCommand": 

-

461 ds = DispatcherCommand( 

-

462 name, 

-

463 dest, 

-

464 aliases=aliases, 

-

465 help_description=help_description, 

-

466 metavar=metavar, 

-

467 default_subcommand=default_subcommand, 

-

468 ) 

-

469 self.add_subcommand(ds) 

-

470 return ds 

-

471 

-

472 def register_subcommand( 

-

473 self, 

-

474 name: Union[str, Sequence[str]], 

-

475 *, 

-

476 help_description: Optional[str] = None, 

-

477 argparser: Optional[ 

-

478 Union[ArgparserConfigurator, Sequence[ArgparserConfigurator]] 

-

479 ] = None, 

-

480 require_substitution: bool = True, 

-

481 requested_plugins_only: bool = False, 

-

482 log_only_to_stderr: bool = False, 

-

483 ) -> Callable[[CommandHandler], GenericSubCommand]: 

-

484 if isinstance(name, str): 

-

485 cmd_name = name 

-

486 aliases = [] 

-

487 else: 

-

488 cmd_name = name[0] 

-

489 aliases = name[1:] 

-

490 

-

491 if argparser is not None and not callable(argparser): 

-

492 args = argparser 

-

493 

-

494 def _wrapper(parser: argparse.ArgumentParser) -> None: 

-

495 for configurator in args: 

-

496 configurator(parser) 

-

497 

-

498 argparser = _wrapper 

-

499 

-

500 def _annotation_impl(func: CommandHandler) -> GenericSubCommand: 

-

501 subcommand = GenericSubCommand( 

-

502 cmd_name, 

-

503 func, 

-

504 aliases=aliases, 

-

505 help_description=help_description, 

-

506 require_substitution=require_substitution, 

-

507 requested_plugins_only=requested_plugins_only, 

-

508 log_only_to_stderr=log_only_to_stderr, 

-

509 ) 

-

510 self.add_subcommand(subcommand) 

-

511 if argparser is not None: 

-

512 subcommand.configure_handler(argparser) 

-

513 

-

514 return subcommand 

-

515 

-

516 return _annotation_impl 

-

517 

-

518 

-

519class DispatcherCommand(SubcommandBase, DispatchingCommandMixin): 

-

520 __slots__ = ( 

-

521 "_subcommands", 

-

522 "_aliases", 

-

523 "_dest", 

-

524 "_metavar", 

-

525 "_required", 

-

526 "_default_subcommand", 

-

527 "_argparser", 

-

528 ) 

-

529 

-

530 def __init__( 

-

531 self, 

-

532 name: str, 

-

533 dest: str, 

-

534 *, 

-

535 aliases: Sequence[str] = tuple(), 

-

536 help_description: Optional[str] = None, 

-

537 metavar: str = "command", 

-

538 default_subcommand: Optional[str] = None, 

-

539 ) -> None: 

-

540 super().__init__(name, aliases=aliases, help_description=help_description) 

-

541 self._aliases: Dict[str, SubcommandBase] = {} 

-

542 self._subcommands: Dict[str, SubcommandBase] = {} 

-

543 self._dest = dest 

-

544 self._metavar = metavar 

-

545 self._default_subcommand = default_subcommand 

-

546 self._argparser: Optional[argparse.ArgumentParser] = None 

-

547 

-

548 def add_subcommand(self, subcommand: SubcommandBase) -> None: 

-

549 all_names = [subcommand.name] 

-

550 if subcommand.aliases: 

-

551 all_names.extend(subcommand.aliases) 

-

552 aliases = self._aliases 

-

553 for n in all_names: 

-

554 if n in aliases: 554 ↛ 555line 554 didn't jump to line 555, because the condition on line 554 was never true

-

555 raise ValueError( 

-

556 f"Internal error: Multiple handlers for {n} on topic {self.name}" 

-

557 ) 

-

558 

-

559 aliases[n] = subcommand 

-

560 self._subcommands[subcommand.name] = subcommand 

-

561 

-

562 def configure(self, argparser: argparse.ArgumentParser) -> None: 

-

563 if self._argparser is not None: 

-

564 raise TypeError("Cannot configure twice!") 

-

565 self._argparser = argparser 

-

566 subcommands = self._subcommands 

-

567 if not subcommands: 

-

568 raise ValueError( 

-

569 f"Internal error: No subcommands for subcommand {self.name} (then why do we have it?)" 

-

570 ) 

-

571 default_subcommand = self._default_subcommand 

-

572 required = default_subcommand is None 

-

573 if ( 

-

574 default_subcommand is not None 

-

575 and default_subcommand not in ("--help", "-h") 

-

576 and default_subcommand not in subcommands 

-

577 ): 

-

578 raise ValueError( 

-

579 f"Internal error: Subcommand {self.name} should have {default_subcommand} as default," 

-

580 " but it was not registered?" 

-

581 ) 

-

582 subparser = argparser.add_subparsers( 

-

583 dest=self._dest, 

-

584 required=required, 

-

585 metavar=self._metavar, 

-

586 ) 

-

587 for subcommand in subcommands.values(): 

-

588 subcommand.add_subcommand_to_subparser(subparser) 

-

589 

-

590 def has_command(self, command: str) -> bool: 

-

591 return command in self._aliases 

-

592 

-

593 def __call__(self, command_arg: CommandArg) -> None: 

-

594 argparser = self._argparser 

-

595 assert argparser is not None 

-

596 v = getattr(command_arg.parsed_args, self._dest, None) 

-

597 if v is None: 

-

598 v = self._default_subcommand 

-

599 if v in ("--help", "-h"): 

-

600 argparser.parse_args([v]) 

-

601 _error("Missing command", prog=argparser.prog) 

-

602 

-

603 assert ( 

-

604 v is not None 

-

605 ), f"Internal error: No default subcommand and argparse did not provide the required subcommand {self._dest}?" 

-

606 assert ( 

-

607 v in self._aliases 

-

608 ), f"Internal error: {v} was accepted as a topic, but it was not registered?" 

-

609 self._aliases[v](command_arg) 

-

610 

-

611 

-

612ROOT_COMMAND = DispatcherCommand( 

-

613 "root", 

-

614 dest="command", 

-

615 metavar="COMMAND", 

-

616) 

-
- - - diff --git a/coverage-report/d_6e57078c9ef7177d_dc_util_py.html b/coverage-report/d_6e57078c9ef7177d_dc_util_py.html deleted file mode 100644 index 9b4b879..0000000 --- a/coverage-report/d_6e57078c9ef7177d_dc_util_py.html +++ /dev/null @@ -1,114 +0,0 @@ - - - - - Coverage for src/debputy/commands/debputy_cmd/dc_util.py: 27% - - - - - -
-
-

- Coverage for src/debputy/commands/debputy_cmd/dc_util.py: - 27% -

- -

- 7 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from typing import Dict, Iterable 

-

2 

-

3from debputy.packager_provided_files import ( 

-

4 PerPackagePackagerProvidedResult, 

-

5 PackagerProvidedFile, 

-

6) 

-

7 

-

8 

-

9def flatten_ppfs( 

-

10 all_ppfs: Dict[str, PerPackagePackagerProvidedResult] 

-

11) -> Iterable[PackagerProvidedFile]: 

-

12 for matched_ppf in all_ppfs.values(): 

-

13 yield from matched_ppf.auto_installable 

-

14 for reserved_ppfs in matched_ppf.reserved_only.values(): 

-

15 yield from reserved_ppfs 

-
- - - diff --git a/coverage-report/d_6e57078c9ef7177d_lint_and_lsp_cmds_py.html b/coverage-report/d_6e57078c9ef7177d_lint_and_lsp_cmds_py.html deleted file mode 100644 index 24b05ad..0000000 --- a/coverage-report/d_6e57078c9ef7177d_lint_and_lsp_cmds_py.html +++ /dev/null @@ -1,340 +0,0 @@ - - - - - Coverage for src/debputy/commands/debputy_cmd/lint_and_lsp_cmds.py: 26% - - - - - -
-
-

- Coverage for src/debputy/commands/debputy_cmd/lint_and_lsp_cmds.py: - 26% -

- -

- 65 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import textwrap 

-

2from argparse import BooleanOptionalAction 

-

3 

-

4from debputy.commands.debputy_cmd.context import ROOT_COMMAND, CommandContext, add_arg 

-

5from debputy.util import _error 

-

6 

-

7 

-

8_EDITOR_SNIPPETS = { 

-

9 "emacs": "emacs+eglot", 

-

10 "emacs+eglot": textwrap.dedent( 

-

11 """\ 

-

12 ;; `deputy lsp server` glue for emacs eglot (eglot is built-in these days) 

-

13 ;; 

-

14 ;; Add to ~/.emacs or ~/.emacs.d/init.el and then activate via `M-x eglot`. 

-

15 ;; 

-

16 ;; Requires: apt install elpa-dpkg-dev-el elpa-yaml-mode 

-

17 ;; Recommends: apt install elpa-markdown-mode 

-

18 

-

19 ;; Make emacs recognize debian/debputy.manifest as a YAML file 

-

20 (add-to-list 'auto-mode-alist '("/debian/debputy.manifest\\'" . yaml-mode)) 

-

21 ;; Inform eglot about the debputy LSP 

-

22 (with-eval-after-load 'eglot 

-

23 (add-to-list 'eglot-server-programs 

-

24 '(debian-control-mode . ("debputy" "lsp" "server"))) 

-

25 (add-to-list 'eglot-server-programs 

-

26 '(debian-changelog-mode . ("debputy" "lsp" "server"))) 

-

27 (add-to-list 'eglot-server-programs 

-

28 '(debian-copyright-mode . ("debputy" "lsp" "server"))) 

-

29 ;; Requires elpa-dpkg-dev-el (>> 37.11) 

-

30 ;; (add-to-list 'eglot-server-programs 

-

31 ;; '(debian-autopkgtest-control-mode . ("debputy" "lsp" "server"))) 

-

32 ;; The debian/rules file uses the qmake mode. 

-

33 (add-to-list 'eglot-server-programs 

-

34 '(makefile-gmake-mode . ("debputy" "lsp" "server"))) 

-

35 (add-to-list 'eglot-server-programs 

-

36 '(yaml-mode . ("debputy" "lsp" "server"))) 

-

37 ) 

-

38 

-

39 ;; Auto-start eglot for the relevant modes. 

-

40 (add-hook 'debian-control-mode-hook 'eglot-ensure) 

-

41 ;; NOTE: changelog disabled by default because for some reason it 

-

42 ;; this hook causes perceivable delay (several seconds) when 

-

43 ;; opening the first changelog. It seems to be related to imenu. 

-

44 ;; (add-hook 'debian-changelog-mode-hook 'eglot-ensure) 

-

45 (add-hook 'debian-copyright-mode-hook 'eglot-ensure) 

-

46 ;; Requires elpa-dpkg-dev-el (>> 37.11) 

-

47 ;; (add-hook 'debian-autopkgtest-control-mode-hook 'eglot-ensure) 

-

48 (add-hook 'makefile-gmake-mode-hook 'eglot-ensure) 

-

49 (add-hook 'yaml-mode-hook 'eglot-ensure) 

-

50 """ 

-

51 ), 

-

52 "vim": "vim+youcompleteme", 

-

53 "vim+youcompleteme": textwrap.dedent( 

-

54 """\ 

-

55 # debputy lsp server glue for vim with vim-youcompleteme. Add to ~/.vimrc 

-

56 # 

-

57 # Requires: apt install vim-youcompleteme 

-

58 

-

59 # Make vim recognize debputy.manifest as YAML file 

-

60 au BufNewFile,BufRead debputy.manifest setf yaml 

-

61 # Inform vim/ycm about the debputy LSP 

-

62 # - NB: No known support for debian/tests/control that we can hook into. 

-

63 # Feel free to provide one :) 

-

64 let g:ycm_language_server = [ 

-

65 \\ { 'name': 'debputy', 

-

66 \\ 'filetypes': [ 'debcontrol', 'debcopyright', 'debchangelog', 'make', 'yaml'], 

-

67 \\ 'cmdline': [ 'debputy', 'lsp', 'server' ] 

-

68 \\ }, 

-

69 \\ ] 

-

70 

-

71 packadd! youcompleteme 

-

72 # Add relevant ycm keybinding such as: 

-

73 # nmap <leader>d <plug>(YCMHover) 

-

74 """ 

-

75 ), 

-

76} 

-

77 

-

78 

-

79lsp_command = ROOT_COMMAND.add_dispatching_subcommand( 

-

80 "lsp", 

-

81 dest="lsp_command", 

-

82 help_description="Language server related subcommands", 

-

83) 

-

84 

-

85 

-

86@lsp_command.register_subcommand( 

-

87 "server", 

-

88 log_only_to_stderr=True, 

-

89 help_description="Start the language server", 

-

90 argparser=[ 

-

91 add_arg( 

-

92 "--tcp", 

-

93 action="store_true", 

-

94 help="Use TCP server", 

-

95 ), 

-

96 add_arg( 

-

97 "--ws", 

-

98 action="store_true", 

-

99 help="Use WebSocket server", 

-

100 ), 

-

101 add_arg( 

-

102 "--host", 

-

103 default="127.0.0.1", 

-

104 help="Bind to this address (Use with --tcp / --ws)", 

-

105 ), 

-

106 add_arg( 

-

107 "--port", 

-

108 type=int, 

-

109 default=2087, 

-

110 help="Bind to this port (Use with --tcp / --ws)", 

-

111 ), 

-

112 ], 

-

113) 

-

114def lsp_server_cmd(context: CommandContext) -> None: 

-

115 parsed_args = context.parsed_args 

-

116 

-

117 try: 

-

118 import lsprotocol 

-

119 import pygls 

-

120 except ImportError: 

-

121 _error( 

-

122 "This feature requires lsprotocol and pygls (apt-get install python3-lsprotocol python3-pygls)" 

-

123 ) 

-

124 

-

125 feature_set = context.load_plugins() 

-

126 

-

127 from debputy.lsp.lsp_features import ( 

-

128 ensure_lsp_features_are_loaded, 

-

129 ) 

-

130 from debputy.lsp.lsp_dispatch import DEBPUTY_LANGUAGE_SERVER 

-

131 

-

132 ensure_lsp_features_are_loaded() 

-

133 debputy_language_server = DEBPUTY_LANGUAGE_SERVER 

-

134 debputy_language_server.plugin_feature_set = feature_set 

-

135 

-

136 if parsed_args.tcp: 

-

137 debputy_language_server.start_tcp(parsed_args.host, parsed_args.port) 

-

138 elif parsed_args.ws: 

-

139 debputy_language_server.start_ws(parsed_args.host, parsed_args.port) 

-

140 else: 

-

141 debputy_language_server.start_io() 

-

142 

-

143 

-

144@lsp_command.register_subcommand( 

-

145 "editor-config", 

-

146 help_description="Provide editor configuration snippets", 

-

147 argparser=[ 

-

148 add_arg( 

-

149 "editor_name", 

-

150 metavar="editor", 

-

151 choices=_EDITOR_SNIPPETS, 

-

152 default=None, 

-

153 nargs="?", 

-

154 help="The editor to provide a snippet for", 

-

155 ), 

-

156 ], 

-

157) 

-

158def lsp_editor_glue(context: CommandContext) -> None: 

-

159 editor_name = context.parsed_args.editor_name 

-

160 

-

161 if editor_name is None: 

-

162 content = [] 

-

163 for editor_name, payload in _EDITOR_SNIPPETS.items(): 

-

164 alias_of = "" 

-

165 if payload in _EDITOR_SNIPPETS: 

-

166 alias_of = f" (short for: {payload})" 

-

167 content.append((editor_name, alias_of)) 

-

168 max_name = max(len(c[0]) for c in content) 

-

169 print("This version of debputy has editor snippets for the following editors: ") 

-

170 for editor_name, alias_of in content: 

-

171 print(f" * {editor_name:<{max_name}}{alias_of}") 

-

172 return 

-

173 result = _EDITOR_SNIPPETS[editor_name] 

-

174 while result in _EDITOR_SNIPPETS: 

-

175 result = _EDITOR_SNIPPETS[result] 

-

176 print(result) 

-

177 

-

178 

-

179@lsp_command.register_subcommand( 

-

180 "features", 

-

181 help_description="Describe language ids and features", 

-

182) 

-

183def lsp_editor_glue(_context: CommandContext) -> None: 

-

184 try: 

-

185 import lsprotocol 

-

186 import pygls 

-

187 except ImportError: 

-

188 _error( 

-

189 "This feature requires lsprotocol and pygls (apt-get install python3-lsprotocol python3-pygls)" 

-

190 ) 

-

191 

-

192 from debputy.lsp.lsp_features import describe_lsp_features 

-

193 

-

194 describe_lsp_features() 

-

195 

-

196 

-

197@ROOT_COMMAND.register_subcommand( 

-

198 "lint", 

-

199 log_only_to_stderr=True, 

-

200 argparser=[ 

-

201 add_arg( 

-

202 "--spellcheck", 

-

203 dest="spellcheck", 

-

204 action="store_true", 

-

205 shared=True, 

-

206 help="Enable spellchecking", 

-

207 ), 

-

208 add_arg( 

-

209 "--auto-fix", 

-

210 dest="auto_fix", 

-

211 action="store_true", 

-

212 shared=True, 

-

213 help="Automatically fix problems with trivial or obvious corrections.", 

-

214 ), 

-

215 add_arg( 

-

216 "--linter-exit-code", 

-

217 dest="linter_exit_code", 

-

218 default=True, 

-

219 action=BooleanOptionalAction, 

-

220 help='Enable or disable the "linter" convention of exiting with an error if severe issues were found', 

-

221 ), 

-

222 ], 

-

223) 

-

224def lint_cmd(context: CommandContext) -> None: 

-

225 try: 

-

226 import lsprotocol 

-

227 except ImportError: 

-

228 _error("This feature requires lsprotocol (apt-get install python3-lsprotocol)") 

-

229 

-

230 from debputy.linting.lint_impl import perform_linting 

-

231 

-

232 context.must_be_called_in_source_root() 

-

233 perform_linting(context) 

-

234 

-

235 

-

236def ensure_lint_and_lsp_commands_are_loaded(): 

-

237 # Loading the module does the heavy lifting 

-

238 # However, having this function means that we do not have an "unused" import that some tool 

-

239 # gets tempted to remove 

-

240 assert ROOT_COMMAND.has_command("lsp") 

-

241 assert ROOT_COMMAND.has_command("lint") 

-
- - - diff --git a/coverage-report/d_6e57078c9ef7177d_output_py.html b/coverage-report/d_6e57078c9ef7177d_output_py.html deleted file mode 100644 index c212f65..0000000 --- a/coverage-report/d_6e57078c9ef7177d_output_py.html +++ /dev/null @@ -1,434 +0,0 @@ - - - - - Coverage for src/debputy/commands/debputy_cmd/output.py: 17% - - - - - -
-
-

- Coverage for src/debputy/commands/debputy_cmd/output.py: - 17% -

- -

- 191 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import argparse 

-

2import contextlib 

-

3import itertools 

-

4import os 

-

5import re 

-

6import shutil 

-

7import subprocess 

-

8import sys 

-

9from typing import ( 

-

10 Union, 

-

11 Sequence, 

-

12 Iterable, 

-

13 Iterator, 

-

14 IO, 

-

15 Mapping, 

-

16 Tuple, 

-

17 Optional, 

-

18 Any, 

-

19) 

-

20 

-

21from debputy.util import assume_not_none 

-

22 

-

23try: 

-

24 import colored 

-

25except ImportError: 

-

26 colored = None 

-

27 

-

28 

-

29def _pager() -> Optional[str]: 

-

30 pager = os.environ.get("DEBPUTY_PAGER") 

-

31 if pager is None: 

-

32 pager = os.environ.get("PAGER") 

-

33 if pager is None and shutil.which("less") is not None: 

-

34 pager = "less" 

-

35 return pager 

-

36 

-

37 

-

38URL_START = "\033]8;;" 

-

39URL_END = "\033]8;;\a" 

-

40MAN_URL_REWRITE = re.compile(r"man:(\S+)[(](\d+)[)]") 

-

41 

-

42_SUPPORTED_COLORS = { 

-

43 "black", 

-

44 "red", 

-

45 "green", 

-

46 "yellow", 

-

47 "blue", 

-

48 "magenta", 

-

49 "cyan", 

-

50 "white", 

-

51} 

-

52_SUPPORTED_STYLES = {"none", "bold"} 

-

53 

-

54 

-

55class OutputStylingBase: 

-

56 def __init__( 

-

57 self, 

-

58 stream: IO[str], 

-

59 output_format: str, 

-

60 *, 

-

61 optimize_for_screen_reader: bool = False, 

-

62 ) -> None: 

-

63 self.stream = stream 

-

64 self.output_format = output_format 

-

65 self.optimize_for_screen_reader = optimize_for_screen_reader 

-

66 self._color_support = None 

-

67 

-

68 def colored( 

-

69 self, 

-

70 text: str, 

-

71 *, 

-

72 fg: Optional[Union[str]] = None, 

-

73 bg: Optional[str] = None, 

-

74 style: Optional[str] = None, 

-

75 ) -> str: 

-

76 self._check_color(fg) 

-

77 self._check_color(bg) 

-

78 self._check_text_style(style) 

-

79 return text 

-

80 

-

81 @property 

-

82 def supports_colors(self) -> bool: 

-

83 return False 

-

84 

-

85 def print_list_table( 

-

86 self, 

-

87 headers: Sequence[Union[str, Tuple[str, str]]], 

-

88 rows: Sequence[Sequence[str]], 

-

89 ) -> None: 

-

90 if rows: 

-

91 if any(len(r) != len(rows[0]) for r in rows): 

-

92 raise ValueError( 

-

93 "Unbalanced table: All rows must have the same column count" 

-

94 ) 

-

95 if len(rows[0]) != len(headers): 

-

96 raise ValueError( 

-

97 "Unbalanced table: header list does not agree with row list on number of columns" 

-

98 ) 

-

99 

-

100 if not headers: 

-

101 raise ValueError("No headers provided!?") 

-

102 

-

103 cadjust = {} 

-

104 header_names = [] 

-

105 for c in headers: 

-

106 if isinstance(c, str): 

-

107 header_names.append(c) 

-

108 else: 

-

109 cname, adjust = c 

-

110 header_names.append(cname) 

-

111 cadjust[cname] = adjust 

-

112 

-

113 if self.output_format == "csv": 

-

114 from csv import writer 

-

115 

-

116 w = writer(self.stream) 

-

117 w.writerow(header_names) 

-

118 w.writerows(rows) 

-

119 return 

-

120 

-

121 column_lengths = [ 

-

122 max((len(h), max(len(r[i]) for r in rows))) 

-

123 for i, h in enumerate(header_names) 

-

124 ] 

-

125 # divider => "+---+---+-...-+" 

-

126 divider = "+-" + "-+-".join("-" * x for x in column_lengths) + "-+" 

-

127 # row_format => '| {:<10} | {:<8} | ... |' where the numbers are the column lengths 

-

128 row_format_inner = " | ".join( 

-

129 f"{{CELL_COLOR}}{{:{cadjust.get(cn, '<')}{x}}}{{CELL_COLOR_RESET}}" 

-

130 for cn, x in zip(header_names, column_lengths) 

-

131 ) 

-

132 

-

133 row_format = f"| {row_format_inner} |" 

-

134 

-

135 if self.supports_colors: 

-

136 c = self._color_support 

-

137 assert c is not None 

-

138 header_color = c.Style.bold 

-

139 header_color_reset = c.Style.reset 

-

140 else: 

-

141 header_color = "" 

-

142 header_color_reset = "" 

-

143 

-

144 self.print_visual_formatting(divider) 

-

145 self.print( 

-

146 row_format.format( 

-

147 *header_names, 

-

148 CELL_COLOR=header_color, 

-

149 CELL_COLOR_RESET=header_color_reset, 

-

150 ) 

-

151 ) 

-

152 self.print_visual_formatting(divider) 

-

153 for row in rows: 

-

154 self.print(row_format.format(*row, CELL_COLOR="", CELL_COLOR_RESET="")) 

-

155 self.print_visual_formatting(divider) 

-

156 

-

157 def print(self, /, string: str = "", **kwargs) -> None: 

-

158 if "file" in kwargs: 

-

159 raise ValueError("Unsupported kwarg file") 

-

160 print(string, file=self.stream, **kwargs) 

-

161 

-

162 def print_visual_formatting(self, /, format_sequence: str, **kwargs) -> None: 

-

163 if self.optimize_for_screen_reader: 

-

164 return 

-

165 self.print(format_sequence, **kwargs) 

-

166 

-

167 def print_for_screen_reader(self, /, text: str, **kwargs) -> None: 

-

168 if not self.optimize_for_screen_reader: 

-

169 return 

-

170 self.print(text, **kwargs) 

-

171 

-

172 def _check_color(self, color: Optional[str]) -> None: 

-

173 if color is not None and color not in _SUPPORTED_COLORS: 

-

174 raise ValueError( 

-

175 f"Unsupported color: {color}. Only the following are supported {','.join(_SUPPORTED_COLORS)}" 

-

176 ) 

-

177 

-

178 def _check_text_style(self, style: Optional[str]) -> None: 

-

179 if style is not None and style not in _SUPPORTED_STYLES: 

-

180 raise ValueError( 

-

181 f"Unsupported style: {style}. Only the following are supported {','.join(_SUPPORTED_STYLES)}" 

-

182 ) 

-

183 

-

184 def render_url(self, link_url: str) -> str: 

-

185 return link_url 

-

186 

-

187 

-

188class ANSIOutputStylingBase(OutputStylingBase): 

-

189 def __init__( 

-

190 self, 

-

191 stream: IO[str], 

-

192 output_format: str, 

-

193 *, 

-

194 support_colors: bool = True, 

-

195 support_clickable_urls: bool = True, 

-

196 **kwargs: Any, 

-

197 ) -> None: 

-

198 super().__init__(stream, output_format, **kwargs) 

-

199 self._stream = stream 

-

200 self._color_support = colored 

-

201 self._support_colors = ( 

-

202 support_colors if self._color_support is not None else False 

-

203 ) 

-

204 self._support_clickable_urls = support_clickable_urls 

-

205 

-

206 @property 

-

207 def supports_colors(self) -> bool: 

-

208 return self._support_colors 

-

209 

-

210 def colored( 

-

211 self, 

-

212 text: str, 

-

213 *, 

-

214 fg: Optional[str] = None, 

-

215 bg: Optional[str] = None, 

-

216 style: Optional[str] = None, 

-

217 ) -> str: 

-

218 self._check_color(fg) 

-

219 self._check_color(bg) 

-

220 self._check_text_style(style) 

-

221 if not self.supports_colors: 

-

222 return text 

-

223 _colored = self._color_support 

-

224 codes = [] 

-

225 if style is not None: 

-

226 code = getattr(_colored.Style, style) 

-

227 assert code is not None 

-

228 codes.append(code) 

-

229 if fg is not None: 

-

230 code = getattr(_colored.Fore, fg) 

-

231 assert code is not None 

-

232 codes.append(code) 

-

233 if bg is not None: 

-

234 code = getattr(_colored.Back, bg) 

-

235 assert code is not None 

-

236 codes.append(code) 

-

237 if not codes: 

-

238 return text 

-

239 return "".join(codes) + text + _colored.Style.reset 

-

240 

-

241 def render_url(self, link_url: str) -> str: 

-

242 if not self._support_clickable_urls: 

-

243 return super().render_url(link_url) 

-

244 link_text = link_url 

-

245 if not self.optimize_for_screen_reader and link_url.startswith("man:"): 

-

246 # Rewrite man page to a clickable link by default. I am not sure how the hyperlink 

-

247 # ANSI code works with screen readers, so lets not rewrite the man page link by 

-

248 # default. My fear is that both the link url and the link text gets read out. 

-

249 m = MAN_URL_REWRITE.match(link_url) 

-

250 if m: 

-

251 page, section = m.groups() 

-

252 link_url = f"https://manpages.debian.org/{page}.{section}" 

-

253 return URL_START + f"{link_url}\a{link_text}" + URL_END 

-

254 

-

255 

-

256def _output_styling( 

-

257 parsed_args: argparse.Namespace, 

-

258 stream: IO[str], 

-

259) -> OutputStylingBase: 

-

260 output_format = getattr(parsed_args, "output_format", None) 

-

261 if output_format is None: 

-

262 output_format = "text" 

-

263 optimize_for_screen_reader = os.environ.get("OPTIMIZE_FOR_SCREEN_READER", "") != "" 

-

264 if not stream.isatty(): 

-

265 return OutputStylingBase( 

-

266 stream, output_format, optimize_for_screen_reader=optimize_for_screen_reader 

-

267 ) 

-

268 return ANSIOutputStylingBase( 

-

269 stream, output_format, optimize_for_screen_reader=optimize_for_screen_reader 

-

270 ) 

-

271 

-

272 

-

273@contextlib.contextmanager 

-

274def _stream_to_pager( 

-

275 parsed_args: argparse.Namespace, 

-

276) -> Iterator[Tuple[IO[str], OutputStylingBase]]: 

-

277 fancy_output = _output_styling(parsed_args, sys.stdout) 

-

278 if ( 

-

279 not parsed_args.pager 

-

280 or not sys.stdout.isatty() 

-

281 or fancy_output.output_format != "text" 

-

282 ): 

-

283 yield sys.stdout, fancy_output 

-

284 return 

-

285 

-

286 pager = _pager() 

-

287 if pager is None: 

-

288 yield sys.stdout, fancy_output 

-

289 return 

-

290 

-

291 env: Mapping[str, str] = os.environ 

-

292 if "LESS" not in env: 

-

293 env_copy = dict(os.environ) 

-

294 env_copy["LESS"] = "-FRSXMQ" 

-

295 env = env_copy 

-

296 

-

297 cmd = subprocess.Popen( 

-

298 pager, 

-

299 stdin=subprocess.PIPE, 

-

300 encoding="utf-8", 

-

301 env=env, 

-

302 ) 

-

303 stdin = assume_not_none(cmd.stdin) 

-

304 try: 

-

305 fancy_output.stream = stdin 

-

306 yield stdin, fancy_output 

-

307 except Exception: 

-

308 stdin.close() 

-

309 cmd.kill() 

-

310 cmd.wait() 

-

311 raise 

-

312 finally: 

-

313 fancy_output.stream = sys.stdin 

-

314 stdin.close() 

-

315 cmd.wait() 

-

316 

-

317 

-

318def _normalize_cell(cell: Union[str, Sequence[str]], times: int) -> Iterable[str]: 

-

319 if isinstance(cell, str): 

-

320 return itertools.chain([cell], itertools.repeat("", times=times - 1)) 

-

321 if not cell: 

-

322 return itertools.repeat("", times=times) 

-

323 return itertools.chain(cell, itertools.repeat("", times=times - len(cell))) 

-

324 

-

325 

-

326def _expand_rows( 

-

327 rows: Sequence[Sequence[Union[str, Sequence[str]]]] 

-

328) -> Iterator[Sequence[str]]: 

-

329 for row in rows: 

-

330 if all(isinstance(c, str) for c in row): 

-

331 yield row 

-

332 else: 

-

333 longest = max(len(c) if isinstance(c, list) else 1 for c in row) 

-

334 cells = [_normalize_cell(c, times=longest) for c in row] 

-

335 yield from zip(*cells) 

-
- - - diff --git a/coverage-report/d_6e57078c9ef7177d_plugin_cmds_py.html b/coverage-report/d_6e57078c9ef7177d_plugin_cmds_py.html deleted file mode 100644 index 2f6123b..0000000 --- a/coverage-report/d_6e57078c9ef7177d_plugin_cmds_py.html +++ /dev/null @@ -1,1295 +0,0 @@ - - - - - Coverage for src/debputy/commands/debputy_cmd/plugin_cmds.py: 13% - - - - - -
-
-

- Coverage for src/debputy/commands/debputy_cmd/plugin_cmds.py: - 13% -

- -

- 541 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import argparse 

-

2import itertools 

-

3import operator 

-

4import os 

-

5import sys 

-

6from itertools import chain 

-

7from typing import ( 

-

8 Sequence, 

-

9 Union, 

-

10 Tuple, 

-

11 Iterable, 

-

12 Any, 

-

13 Optional, 

-

14 Type, 

-

15 Mapping, 

-

16 Callable, 

-

17) 

-

18 

-

19from debputy import DEBPUTY_DOC_ROOT_DIR 

-

20from debputy.commands.debputy_cmd.context import ( 

-

21 CommandContext, 

-

22 add_arg, 

-

23 ROOT_COMMAND, 

-

24) 

-

25from debputy.commands.debputy_cmd.dc_util import flatten_ppfs 

-

26from debputy.commands.debputy_cmd.output import ( 

-

27 _stream_to_pager, 

-

28 _output_styling, 

-

29 OutputStylingBase, 

-

30) 

-

31from debputy.exceptions import DebputySubstitutionError 

-

32from debputy.filesystem_scan import build_virtual_fs 

-

33from debputy.manifest_parser.base_types import TypeMapping 

-

34from debputy.manifest_parser.declarative_parser import ( 

-

35 DeclarativeMappingInputParser, 

-

36 DeclarativeNonMappingInputParser, 

-

37 BASIC_SIMPLE_TYPES, 

-

38) 

-

39from debputy.manifest_parser.parser_data import ParserContextData 

-

40from debputy.manifest_parser.parser_doc import render_rule 

-

41from debputy.manifest_parser.util import unpack_type, AttributePath 

-

42from debputy.packager_provided_files import detect_all_packager_provided_files 

-

43from debputy.plugin.api.example_processing import ( 

-

44 process_discard_rule_example, 

-

45 DiscardVerdict, 

-

46) 

-

47from debputy.plugin.api.impl import plugin_metadata_for_debputys_own_plugin 

-

48from debputy.plugin.api.impl_types import ( 

-

49 PackagerProvidedFileClassSpec, 

-

50 PluginProvidedManifestVariable, 

-

51 DispatchingParserBase, 

-

52 DeclarativeInputParser, 

-

53 DebputyPluginMetadata, 

-

54 DispatchingObjectParser, 

-

55 SUPPORTED_DISPATCHABLE_TABLE_PARSERS, 

-

56 OPARSER_MANIFEST_ROOT, 

-

57 PluginProvidedDiscardRule, 

-

58 AutomaticDiscardRuleExample, 

-

59 MetadataOrMaintscriptDetector, 

-

60 PluginProvidedTypeMapping, 

-

61) 

-

62from debputy.plugin.api.spec import ( 

-

63 ParserDocumentation, 

-

64 reference_documentation, 

-

65 undocumented_attr, 

-

66 TypeMappingExample, 

-

67) 

-

68from debputy.substitution import Substitution 

-

69from debputy.util import _error, assume_not_none, _warn 

-

70 

-

71plugin_dispatcher = ROOT_COMMAND.add_dispatching_subcommand( 

-

72 "plugin", 

-

73 "plugin_subcommand", 

-

74 default_subcommand="--help", 

-

75 help_description="Interact with debputy plugins", 

-

76 metavar="command", 

-

77) 

-

78 

-

79plugin_list_cmds = plugin_dispatcher.add_dispatching_subcommand( 

-

80 "list", 

-

81 "plugin_subcommand_list", 

-

82 metavar="topic", 

-

83 default_subcommand="plugins", 

-

84 help_description="List plugins or things provided by plugins (unstable format)." 

-

85 " Pass `--help` *after* `list` get a topic listing", 

-

86) 

-

87 

-

88plugin_show_cmds = plugin_dispatcher.add_dispatching_subcommand( 

-

89 "show", 

-

90 "plugin_subcommand_show", 

-

91 metavar="topic", 

-

92 help_description="Show details about a plugin or things provided by plugins (unstable format)." 

-

93 " Pass `--help` *after* `show` get a topic listing", 

-

94) 

-

95 

-

96 

-

97def format_output_arg( 

-

98 default_format: str, 

-

99 allowed_formats: Sequence[str], 

-

100 help_text: str, 

-

101) -> Callable[[argparse.ArgumentParser], None]: 

-

102 if default_format not in allowed_formats: 102 ↛ 103line 102 didn't jump to line 103, because the condition on line 102 was never true

-

103 raise ValueError("The default format must be in the allowed_formats...") 

-

104 

-

105 def _configurator(argparser: argparse.ArgumentParser) -> None: 

-

106 argparser.add_argument( 

-

107 "--output-format", 

-

108 dest="output_format", 

-

109 default=default_format, 

-

110 choices=allowed_formats, 

-

111 help=help_text, 

-

112 ) 

-

113 

-

114 return _configurator 

-

115 

-

116 

-

117# To let --output-format=... "always" work 

-

118TEXT_ONLY_FORMAT = format_output_arg( 

-

119 "text", 

-

120 ["text"], 

-

121 "Select a given output format (options and output are not stable between releases)", 

-

122) 

-

123 

-

124 

-

125TEXT_CSV_FORMAT_NO_STABILITY_PROMISE = format_output_arg( 

-

126 "text", 

-

127 ["text", "csv"], 

-

128 "Select a given output format (options and output are not stable between releases)", 

-

129) 

-

130 

-

131 

-

132@plugin_list_cmds.register_subcommand( 

-

133 "plugins", 

-

134 help_description="List known plugins with their versions", 

-

135 argparser=TEXT_CSV_FORMAT_NO_STABILITY_PROMISE, 

-

136) 

-

137def _plugin_cmd_list_plugins(context: CommandContext) -> None: 

-

138 plugin_metadata_entries = context.load_plugins().plugin_data.values() 

-

139 # Because the "plugins" part is optional, we are not guaranteed that TEXT_CSV_FORMAT applies 

-

140 output_format = getattr(context.parsed_args, "output_format", "text") 

-

141 assert output_format in {"text", "csv"} 

-

142 with _stream_to_pager(context.parsed_args) as (fd, fo): 

-

143 fo.print_list_table( 

-

144 ["Plugin Name", "Plugin Path"], 

-

145 [(p.plugin_name, p.plugin_path) for p in plugin_metadata_entries], 

-

146 ) 

-

147 

-

148 

-

149def _path(path: str) -> str: 

-

150 if path.startswith("./"): 

-

151 return path[1:] 

-

152 return path 

-

153 

-

154 

-

155def _ppf_flags(ppf: PackagerProvidedFileClassSpec) -> str: 

-

156 flags = [] 

-

157 if ppf.allow_name_segment: 

-

158 flags.append("named") 

-

159 if ppf.allow_architecture_segment: 

-

160 flags.append("arch") 

-

161 if ppf.supports_priority: 

-

162 flags.append(f"priority={ppf.default_priority}") 

-

163 if ppf.packageless_is_fallback_for_all_packages: 

-

164 flags.append("main-all-fallback") 

-

165 if ppf.post_formatting_rewrite: 

-

166 flags.append("post-format-hook") 

-

167 return ",".join(flags) 

-

168 

-

169 

-

170@plugin_list_cmds.register_subcommand( 

-

171 ["used-packager-provided-files", "uppf", "u-p-p-f"], 

-

172 help_description="List packager provided files used by this package (debian/pkg.foo)", 

-

173 argparser=TEXT_ONLY_FORMAT, 

-

174) 

-

175def _plugin_cmd_list_uppf(context: CommandContext) -> None: 

-

176 ppf_table = context.load_plugins().packager_provided_files 

-

177 all_ppfs = detect_all_packager_provided_files( 

-

178 ppf_table, 

-

179 context.debian_dir, 

-

180 context.binary_packages(), 

-

181 ) 

-

182 requested_plugins = set(context.requested_plugins()) 

-

183 requested_plugins.add("debputy") 

-

184 all_detected_ppfs = list(flatten_ppfs(all_ppfs)) 

-

185 

-

186 used_ppfs = [ 

-

187 p 

-

188 for p in all_detected_ppfs 

-

189 if p.definition.debputy_plugin_metadata.plugin_name in requested_plugins 

-

190 ] 

-

191 inactive_ppfs = [ 

-

192 p 

-

193 for p in all_detected_ppfs 

-

194 if p.definition.debputy_plugin_metadata.plugin_name not in requested_plugins 

-

195 ] 

-

196 

-

197 if not used_ppfs and not inactive_ppfs: 

-

198 print("No packager provided files detected; not even a changelog... ?") 

-

199 return 

-

200 

-

201 with _stream_to_pager(context.parsed_args) as (fd, fo): 

-

202 if used_ppfs: 

-

203 headers: Sequence[Union[str, Tuple[str, str]]] = [ 

-

204 "File", 

-

205 "Matched Stem", 

-

206 "Installed Into", 

-

207 "Installed As", 

-

208 ] 

-

209 fo.print_list_table( 

-

210 headers, 

-

211 [ 

-

212 ( 

-

213 ppf.path.path, 

-

214 ppf.definition.stem, 

-

215 ppf.package_name, 

-

216 "/".join(ppf.compute_dest()).lstrip("."), 

-

217 ) 

-

218 for ppf in sorted( 

-

219 used_ppfs, key=operator.attrgetter("package_name") 

-

220 ) 

-

221 ], 

-

222 ) 

-

223 

-

224 if inactive_ppfs: 

-

225 headers: Sequence[Union[str, Tuple[str, str]]] = [ 

-

226 "UNUSED FILE", 

-

227 "Matched Stem", 

-

228 "Installed Into", 

-

229 "Could Be Installed As", 

-

230 "If B-D Had", 

-

231 ] 

-

232 fo.print_list_table( 

-

233 headers, 

-

234 [ 

-

235 ( 

-

236 f"~{ppf.path.path}~", 

-

237 ppf.definition.stem, 

-

238 f"~{ppf.package_name}~", 

-

239 "/".join(ppf.compute_dest()).lstrip("."), 

-

240 f"debputy-plugin-{ppf.definition.debputy_plugin_metadata.plugin_name}", 

-

241 ) 

-

242 for ppf in sorted( 

-

243 inactive_ppfs, key=operator.attrgetter("package_name") 

-

244 ) 

-

245 ], 

-

246 ) 

-

247 

-

248 

-

249@plugin_list_cmds.register_subcommand( 

-

250 ["packager-provided-files", "ppf", "p-p-f"], 

-

251 help_description="List packager provided file definitions (debian/pkg.foo)", 

-

252 argparser=TEXT_CSV_FORMAT_NO_STABILITY_PROMISE, 

-

253) 

-

254def _plugin_cmd_list_ppf(context: CommandContext) -> None: 

-

255 ppfs: Iterable[PackagerProvidedFileClassSpec] 

-

256 ppfs = context.load_plugins().packager_provided_files.values() 

-

257 with _stream_to_pager(context.parsed_args) as (fd, fo): 

-

258 headers: Sequence[Union[str, Tuple[str, str]]] = [ 

-

259 "Stem", 

-

260 "Installed As", 

-

261 ("Mode", ">"), 

-

262 "Features", 

-

263 "Provided by", 

-

264 ] 

-

265 fo.print_list_table( 

-

266 headers, 

-

267 [ 

-

268 ( 

-

269 ppf.stem, 

-

270 _path(ppf.installed_as_format), 

-

271 "0" + oct(ppf.default_mode)[2:], 

-

272 _ppf_flags(ppf), 

-

273 ppf.debputy_plugin_metadata.plugin_name, 

-

274 ) 

-

275 for ppf in sorted(ppfs, key=operator.attrgetter("stem")) 

-

276 ], 

-

277 ) 

-

278 

-

279 if os.path.isdir("debian/") and fo.output_format == "text": 

-

280 fo.print() 

-

281 fo.print( 

-

282 "Hint: You can use `debputy plugin list used-packager-provided-files` to have `debputy`", 

-

283 ) 

-

284 fo.print("list all the files in debian/ that matches these definitions.") 

-

285 

-

286 

-

287@plugin_list_cmds.register_subcommand( 

-

288 ["metadata-detectors"], 

-

289 help_description="List metadata detectors", 

-

290 argparser=TEXT_CSV_FORMAT_NO_STABILITY_PROMISE, 

-

291) 

-

292def _plugin_cmd_list_metadata_detectors(context: CommandContext) -> None: 

-

293 mds = list( 

-

294 chain.from_iterable( 

-

295 context.load_plugins().metadata_maintscript_detectors.values() 

-

296 ) 

-

297 ) 

-

298 

-

299 def _sort_key(md: "MetadataOrMaintscriptDetector") -> Any: 

-

300 return md.plugin_metadata.plugin_name, md.detector_id 

-

301 

-

302 with _stream_to_pager(context.parsed_args) as (fd, fo): 

-

303 fo.print_list_table( 

-

304 ["Provided by", "Detector Id"], 

-

305 [ 

-

306 (md.plugin_metadata.plugin_name, md.detector_id) 

-

307 for md in sorted(mds, key=_sort_key) 

-

308 ], 

-

309 ) 

-

310 

-

311 

-

312def _resolve_variable_for_list( 

-

313 substitution: Substitution, 

-

314 variable: PluginProvidedManifestVariable, 

-

315) -> str: 

-

316 var = "{{" + variable.variable_name + "}}" 

-

317 try: 

-

318 value = substitution.substitute(var, "CLI request") 

-

319 except DebputySubstitutionError: 

-

320 value = None 

-

321 return _render_manifest_variable_value(value) 

-

322 

-

323 

-

324def _render_manifest_variable_flag(variable: PluginProvidedManifestVariable) -> str: 

-

325 flags = [] 

-

326 if variable.is_for_special_case: 

-

327 flags.append("special-use-case") 

-

328 if variable.is_internal: 

-

329 flags.append("internal") 

-

330 return ",".join(flags) 

-

331 

-

332 

-

333def _render_list_filter(v: Optional[bool]) -> str: 

-

334 if v is None: 

-

335 return "N/A" 

-

336 return "shown" if v else "hidden" 

-

337 

-

338 

-

339@plugin_list_cmds.register_subcommand( 

-

340 ["manifest-variables"], 

-

341 help_description="List plugin provided manifest variables (such as `{{path:FOO}}`)", 

-

342) 

-

343def plugin_cmd_list_manifest_variables(context: CommandContext) -> None: 

-

344 variables = context.load_plugins().manifest_variables 

-

345 substitution = context.substitution.with_extra_substitutions( 

-

346 PACKAGE="<package-name>" 

-

347 ) 

-

348 parsed_args = context.parsed_args 

-

349 show_special_case_vars = parsed_args.show_special_use_variables 

-

350 show_token_vars = parsed_args.show_token_variables 

-

351 show_all_vars = parsed_args.show_all_variables 

-

352 

-

353 def _include_var(var: PluginProvidedManifestVariable) -> bool: 

-

354 if show_all_vars: 

-

355 return True 

-

356 if var.is_internal: 

-

357 return False 

-

358 if var.is_for_special_case and not show_special_case_vars: 

-

359 return False 

-

360 if var.is_token and not show_token_vars: 

-

361 return False 

-

362 return True 

-

363 

-

364 with _stream_to_pager(context.parsed_args) as (fd, fo): 

-

365 fo.print_list_table( 

-

366 ["Variable (use via: `{{ NAME }}`)", "Value", "Flag", "Provided by"], 

-

367 [ 

-

368 ( 

-

369 k, 

-

370 _resolve_variable_for_list(substitution, var), 

-

371 _render_manifest_variable_flag(var), 

-

372 var.plugin_metadata.plugin_name, 

-

373 ) 

-

374 for k, var in sorted(variables.items()) 

-

375 if _include_var(var) 

-

376 ], 

-

377 ) 

-

378 

-

379 fo.print() 

-

380 

-

381 filters = [ 

-

382 ( 

-

383 "Token variables", 

-

384 show_token_vars if not show_all_vars else None, 

-

385 "--show-token-variables", 

-

386 ), 

-

387 ( 

-

388 "Special use variables", 

-

389 show_special_case_vars if not show_all_vars else None, 

-

390 "--show-special-case-variables", 

-

391 ), 

-

392 ] 

-

393 

-

394 fo.print_list_table( 

-

395 ["Variable type", "Value", "Option"], 

-

396 [ 

-

397 ( 

-

398 fname, 

-

399 _render_list_filter(value or show_all_vars), 

-

400 f"{option} OR --show-all-variables", 

-

401 ) 

-

402 for fname, value, option in filters 

-

403 ], 

-

404 ) 

-

405 

-

406 

-

407@plugin_cmd_list_manifest_variables.configure_handler 

-

408def list_manifest_variable_arg_parser( 

-

409 plugin_list_manifest_variables_parser: argparse.ArgumentParser, 

-

410) -> None: 

-

411 plugin_list_manifest_variables_parser.add_argument( 

-

412 "--show-special-case-variables", 

-

413 dest="show_special_use_variables", 

-

414 default=False, 

-

415 action="store_true", 

-

416 help="Show variables that are only used in special / niche cases", 

-

417 ) 

-

418 plugin_list_manifest_variables_parser.add_argument( 

-

419 "--show-token-variables", 

-

420 dest="show_token_variables", 

-

421 default=False, 

-

422 action="store_true", 

-

423 help="Show token (syntactical) variables like {{token:TAB}}", 

-

424 ) 

-

425 plugin_list_manifest_variables_parser.add_argument( 

-

426 "--show-all-variables", 

-

427 dest="show_all_variables", 

-

428 default=False, 

-

429 action="store_true", 

-

430 help="Show all variables regardless of type/kind (overrules other filter settings)", 

-

431 ) 

-

432 TEXT_ONLY_FORMAT(plugin_list_manifest_variables_parser) 

-

433 

-

434 

-

435def _parser_type_name(v: Union[str, Type[Any]]) -> str: 

-

436 if isinstance(v, str): 

-

437 return v if v != "<ROOT>" else "" 

-

438 return v.__name__ 

-

439 

-

440 

-

441@plugin_list_cmds.register_subcommand( 

-

442 ["pluggable-manifest-rules", "p-m-r", "pmr"], 

-

443 help_description="Pluggable manifest rules (such as install rules)", 

-

444 argparser=TEXT_CSV_FORMAT_NO_STABILITY_PROMISE, 

-

445) 

-

446def _plugin_cmd_list_manifest_rules(context: CommandContext) -> None: 

-

447 feature_set = context.load_plugins() 

-

448 

-

449 # Type hint to make the chain call easier for the type checker, which does not seem 

-

450 # to derive to this common base type on its own. 

-

451 base_type = Iterable[Tuple[Union[str, Type[Any]], DispatchingParserBase[Any]]] 

-

452 

-

453 parser_generator = feature_set.manifest_parser_generator 

-

454 table_parsers: base_type = parser_generator.dispatchable_table_parsers.items() 

-

455 object_parsers: base_type = parser_generator.dispatchable_object_parsers.items() 

-

456 

-

457 parsers = chain( 

-

458 table_parsers, 

-

459 object_parsers, 

-

460 ) 

-

461 

-

462 with _stream_to_pager(context.parsed_args) as (fd, fo): 

-

463 fo.print_list_table( 

-

464 ["Rule Name", "Rule Type", "Provided By"], 

-

465 [ 

-

466 ( 

-

467 rn, 

-

468 _parser_type_name(rt), 

-

469 pt.parser_for(rn).plugin_metadata.plugin_name, 

-

470 ) 

-

471 for rt, pt in parsers 

-

472 for rn in pt.registered_keywords() 

-

473 ], 

-

474 ) 

-

475 

-

476 

-

477@plugin_list_cmds.register_subcommand( 

-

478 ["automatic-discard-rules", "a-d-r"], 

-

479 help_description="List automatic discard rules", 

-

480 argparser=TEXT_CSV_FORMAT_NO_STABILITY_PROMISE, 

-

481) 

-

482def _plugin_cmd_list_automatic_discard_rules(context: CommandContext) -> None: 

-

483 auto_discard_rules = context.load_plugins().auto_discard_rules 

-

484 

-

485 with _stream_to_pager(context.parsed_args) as (fd, fo): 

-

486 fo.print_list_table( 

-

487 ["Name", "Provided By"], 

-

488 [ 

-

489 ( 

-

490 name, 

-

491 ppdr.plugin_metadata.plugin_name, 

-

492 ) 

-

493 for name, ppdr in auto_discard_rules.items() 

-

494 ], 

-

495 ) 

-

496 

-

497 

-

498def _render_manifest_variable_value(v: Optional[str]) -> str: 

-

499 if v is None: 

-

500 return "(N/A: Cannot resolve the variable)" 

-

501 v = v.replace("\n", "\\n").replace("\t", "\\t") 

-

502 return v 

-

503 

-

504 

-

505def _render_multiline_documentation( 

-

506 documentation: str, 

-

507 *, 

-

508 first_line_prefix: str = "Documentation: ", 

-

509 following_line_prefix: str = " ", 

-

510) -> None: 

-

511 current_prefix = first_line_prefix 

-

512 for line in documentation.splitlines(keepends=False): 

-

513 if line.isspace(): 

-

514 if not current_prefix.isspace(): 

-

515 print(current_prefix.rstrip()) 

-

516 current_prefix = following_line_prefix 

-

517 else: 

-

518 print() 

-

519 continue 

-

520 print(f"{current_prefix}{line}") 

-

521 current_prefix = following_line_prefix 

-

522 

-

523 

-

524@plugin_show_cmds.register_subcommand( 

-

525 ["manifest-variables"], 

-

526 help_description="Plugin provided manifest variables (such as `{{path:FOO}}`)", 

-

527 argparser=add_arg( 

-

528 "manifest_variable", 

-

529 metavar="manifest-variable", 

-

530 help="Name of the variable (such as `path:FOO` or `{{path:FOO}}`) to display details about", 

-

531 ), 

-

532) 

-

533def _plugin_cmd_show_manifest_variables(context: CommandContext) -> None: 

-

534 plugin_feature_set = context.load_plugins() 

-

535 variables = plugin_feature_set.manifest_variables 

-

536 substitution = context.substitution 

-

537 parsed_args = context.parsed_args 

-

538 variable_name = parsed_args.manifest_variable 

-

539 fo = _output_styling(context.parsed_args, sys.stdout) 

-

540 if variable_name.startswith("{{") and variable_name.endswith("}}"): 

-

541 variable_name = variable_name[2:-2] 

-

542 variable: Optional[PluginProvidedManifestVariable] 

-

543 if variable_name.startswith("env:") and len(variable_name) > 4: 

-

544 env_var = variable_name[4:] 

-

545 variable = PluginProvidedManifestVariable( 

-

546 plugin_feature_set.plugin_data["debputy"], 

-

547 variable_name, 

-

548 variable_value=None, 

-

549 is_context_specific_variable=False, 

-

550 is_documentation_placeholder=True, 

-

551 variable_reference_documentation=f'Environment variable "{env_var}"', 

-

552 ) 

-

553 else: 

-

554 variable = variables.get(variable_name) 

-

555 if variable is None: 

-

556 _error( 

-

557 f'Cannot resolve "{variable_name}" as a known variable from any of the available' 

-

558 f" plugins. Please use `debputy plugin list manifest-variables` to list all known" 

-

559 f" provided variables." 

-

560 ) 

-

561 

-

562 var_with_braces = "{{" + variable_name + "}}" 

-

563 try: 

-

564 source_value = substitution.substitute(var_with_braces, "CLI request") 

-

565 except DebputySubstitutionError: 

-

566 source_value = None 

-

567 binary_value = source_value 

-

568 print(f"Variable: {variable_name}") 

-

569 fo.print_visual_formatting(f"=========={'=' * len(variable_name)}") 

-

570 print() 

-

571 

-

572 if variable.is_context_specific_variable: 

-

573 try: 

-

574 binary_value = substitution.with_extra_substitutions( 

-

575 PACKAGE="<package-name>", 

-

576 ).substitute(var_with_braces, "CLI request") 

-

577 except DebputySubstitutionError: 

-

578 binary_value = None 

-

579 

-

580 doc = variable.variable_reference_documentation or "No documentation provided" 

-

581 _render_multiline_documentation(doc) 

-

582 

-

583 if source_value == binary_value: 

-

584 print(f"Resolved: {_render_manifest_variable_value(source_value)}") 

-

585 else: 

-

586 print("Resolved:") 

-

587 print(f" [source context]: {_render_manifest_variable_value(source_value)}") 

-

588 print(f" [binary context]: {_render_manifest_variable_value(binary_value)}") 

-

589 

-

590 if variable.is_for_special_case: 

-

591 print( 

-

592 'Special-case: The variable has been marked as a "special-case"-only variable.' 

-

593 ) 

-

594 

-

595 if not variable.is_documentation_placeholder: 

-

596 print(f"Plugin: {variable.plugin_metadata.plugin_name}") 

-

597 

-

598 if variable.is_internal: 

-

599 print() 

-

600 # I knew everything I felt was showing on my face, and I hate that. I grated out, 

-

601 print("That was private.") 

-

602 

-

603 

-

604def _determine_ppf( 

-

605 context: CommandContext, 

-

606) -> Tuple[PackagerProvidedFileClassSpec, bool]: 

-

607 feature_set = context.load_plugins() 

-

608 ppf_name = context.parsed_args.ppf_name 

-

609 try: 

-

610 return feature_set.packager_provided_files[ppf_name], False 

-

611 except KeyError: 

-

612 pass 

-

613 

-

614 orig_ppf_name = ppf_name 

-

615 if ( 

-

616 ppf_name.startswith("d/") 

-

617 and not os.path.lexists(ppf_name) 

-

618 and os.path.lexists("debian/" + ppf_name[2:]) 

-

619 ): 

-

620 ppf_name = "debian/" + ppf_name[2:] 

-

621 

-

622 if ppf_name in ("debian/control", "debian/debputy.manifest", "debian/rules"): 

-

623 if ppf_name == "debian/debputy.manifest": 

-

624 doc = f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md" 

-

625 else: 

-

626 doc = "Debian Policy Manual or a packaging tutorial" 

-

627 _error( 

-

628 f"Sorry. While {orig_ppf_name} is a well-defined packaging file, it does not match the definition of" 

-

629 f" a packager provided file. Please see {doc} for more information about this file" 

-

630 ) 

-

631 

-

632 if context.has_dctrl_file and os.path.lexists(ppf_name): 

-

633 basename = ppf_name[7:] 

-

634 if "/" not in basename: 

-

635 debian_dir = build_virtual_fs([basename]) 

-

636 all_ppfs = detect_all_packager_provided_files( 

-

637 feature_set.packager_provided_files, 

-

638 debian_dir, 

-

639 context.binary_packages(), 

-

640 ) 

-

641 if all_ppfs: 

-

642 matched = next(iter(all_ppfs.values())) 

-

643 if len(matched.auto_installable) == 1 and not matched.reserved_only: 

-

644 return matched.auto_installable[0].definition, True 

-

645 if not matched.auto_installable and len(matched.reserved_only) == 1: 

-

646 reserved = next(iter(matched.reserved_only.values())) 

-

647 if len(reserved) == 1: 

-

648 return reserved[0].definition, True 

-

649 

-

650 _error( 

-

651 f'Unknown packager provided file "{orig_ppf_name}". Please use' 

-

652 f" `debputy plugin list packager-provided-files` to see them all." 

-

653 ) 

-

654 

-

655 

-

656@plugin_show_cmds.register_subcommand( 

-

657 ["packager-provided-files", "ppf", "p-p-f"], 

-

658 help_description="Show details about a given packager provided file (debian/pkg.foo)", 

-

659 argparser=add_arg( 

-

660 "ppf_name", 

-

661 metavar="name", 

-

662 help="Name of the packager provided file (such as `changelog`) to display details about", 

-

663 ), 

-

664) 

-

665def _plugin_cmd_show_ppf(context: CommandContext) -> None: 

-

666 ppf, matched_file = _determine_ppf(context) 

-

667 

-

668 fo = _output_styling(context.parsed_args, sys.stdout) 

-

669 

-

670 fo.print(f"Packager Provided File: {ppf.stem}") 

-

671 fo.print_visual_formatting(f"========================{'=' * len(ppf.stem)}") 

-

672 fo.print() 

-

673 ref_doc = ppf.reference_documentation 

-

674 description = ref_doc.description if ref_doc else None 

-

675 doc_uris = ref_doc.format_documentation_uris if ref_doc else tuple() 

-

676 if description is None: 

-

677 fo.print( 

-

678 f"Sorry, no description provided by the plugin {ppf.debputy_plugin_metadata.plugin_name}." 

-

679 ) 

-

680 else: 

-

681 for line in description.splitlines(keepends=False): 

-

682 fo.print(line) 

-

683 

-

684 fo.print() 

-

685 fo.print("Features:") 

-

686 if ppf.packageless_is_fallback_for_all_packages: 

-

687 fo.print(f" * debian/{ppf.stem} is used for *ALL* packages") 

-

688 else: 

-

689 fo.print(f' * debian/{ppf.stem} is used for only for the "main" package') 

-

690 if ppf.allow_name_segment: 

-

691 fo.print(" * Supports naming segment (multiple files and custom naming).") 

-

692 else: 

-

693 fo.print( 

-

694 " * No naming support; at most one per package and it is named after the package." 

-

695 ) 

-

696 if ppf.allow_architecture_segment: 

-

697 fo.print(" * Supports architecture specific variants.") 

-

698 else: 

-

699 fo.print(" * No architecture specific variants.") 

-

700 if ppf.supports_priority: 

-

701 fo.print( 

-

702 f" * Has a priority system (default priority: {ppf.default_priority})." 

-

703 ) 

-

704 

-

705 fo.print() 

-

706 fo.print("Examples matches:") 

-

707 

-

708 if context.has_dctrl_file: 

-

709 first_pkg = next(iter(context.binary_packages())) 

-

710 else: 

-

711 first_pkg = "example-package" 

-

712 example_files = [ 

-

713 (f"debian/{ppf.stem}", first_pkg), 

-

714 (f"debian/{first_pkg}.{ppf.stem}", first_pkg), 

-

715 ] 

-

716 if ppf.allow_name_segment: 

-

717 example_files.append( 

-

718 (f"debian/{first_pkg}.my.custom.name.{ppf.stem}", "my.custom.name") 

-

719 ) 

-

720 if ppf.allow_architecture_segment: 

-

721 example_files.append((f"debian/{first_pkg}.{ppf.stem}.amd64", first_pkg)), 

-

722 if ppf.allow_name_segment: 

-

723 example_files.append( 

-

724 ( 

-

725 f"debian/{first_pkg}.my.custom.name.{ppf.stem}.amd64", 

-

726 "my.custom.name", 

-

727 ) 

-

728 ) 

-

729 fs_root = build_virtual_fs([x for x, _ in example_files]) 

-

730 priority = ppf.default_priority if ppf.supports_priority else None 

-

731 rendered_examples = [] 

-

732 for example_file, assigned_name in example_files: 

-

733 example_path = fs_root.lookup(example_file) 

-

734 assert example_path is not None and example_path.is_file 

-

735 dest = ppf.compute_dest( 

-

736 assigned_name, 

-

737 owning_package=first_pkg, 

-

738 assigned_priority=priority, 

-

739 path=example_path, 

-

740 ) 

-

741 dest_path = "/".join(dest).lstrip(".") 

-

742 rendered_examples.append((example_file, dest_path)) 

-

743 

-

744 fo.print_list_table(["Source file", "Installed As"], rendered_examples) 

-

745 

-

746 if doc_uris: 

-

747 fo.print() 

-

748 fo.print("Documentation URIs:") 

-

749 for uri in doc_uris: 

-

750 fo.print(f" * {fo.render_url(uri)}") 

-

751 

-

752 plugin_name = ppf.debputy_plugin_metadata.plugin_name 

-

753 fo.print() 

-

754 fo.print(f"Install Mode: 0{oct(ppf.default_mode)[2:]}") 

-

755 fo.print(f"Provided by plugin: {plugin_name}") 

-

756 if ( 

-

757 matched_file 

-

758 and plugin_name != "debputy" 

-

759 and plugin_name not in context.requested_plugins() 

-

760 ): 

-

761 fo.print() 

-

762 _warn( 

-

763 f"The file might *NOT* be used due to missing Build-Depends on debputy-plugin-{plugin_name}" 

-

764 ) 

-

765 

-

766 

-

767@plugin_show_cmds.register_subcommand( 

-

768 ["pluggable-manifest-rules", "p-m-r", "pmr"], 

-

769 help_description="Pluggable manifest rules (such as install rules)", 

-

770 argparser=add_arg( 

-

771 "pmr_rule_name", 

-

772 metavar="rule-name", 

-

773 help="Name of the rule (such as `install`) to display details about", 

-

774 ), 

-

775) 

-

776def _plugin_cmd_show_manifest_rule(context: CommandContext) -> None: 

-

777 feature_set = context.load_plugins() 

-

778 parsed_args = context.parsed_args 

-

779 req_rule_type = None 

-

780 rule_name = parsed_args.pmr_rule_name 

-

781 if "::" in rule_name and rule_name != "::": 

-

782 req_rule_type, rule_name = rule_name.split("::", 1) 

-

783 

-

784 matched = [] 

-

785 

-

786 base_type = Iterable[Tuple[Union[str, Type[Any]], DispatchingParserBase[Any]]] 

-

787 parser_generator = feature_set.manifest_parser_generator 

-

788 table_parsers: base_type = parser_generator.dispatchable_table_parsers.items() 

-

789 object_parsers: base_type = parser_generator.dispatchable_object_parsers.items() 

-

790 

-

791 parsers = chain( 

-

792 table_parsers, 

-

793 object_parsers, 

-

794 ) 

-

795 

-

796 for rule_type, dispatching_parser in parsers: 

-

797 if req_rule_type is not None and req_rule_type not in _parser_type_name( 

-

798 rule_type 

-

799 ): 

-

800 continue 

-

801 if dispatching_parser.is_known_keyword(rule_name): 

-

802 matched.append((rule_type, dispatching_parser)) 

-

803 

-

804 if len(matched) != 1 and (matched or rule_name != "::"): 

-

805 if not matched: 

-

806 _error( 

-

807 f"Could not find any pluggable manifest rule related to {parsed_args.pmr_rule_name}." 

-

808 f" Please use `debputy plugin list pluggable-manifest-rules` to see the list of rules." 

-

809 ) 

-

810 match_a = matched[0][0] 

-

811 match_b = matched[1][0] 

-

812 _error( 

-

813 f"The name {rule_name} was ambiguous and matched multiple rule types. Please use" 

-

814 f" <rule-type>::{rule_name} to clarify which rule to use" 

-

815 f" (such as {_parser_type_name(match_a)}::{rule_name} or {_parser_type_name(match_b)}::{rule_name})." 

-

816 f" Please use `debputy plugin list pluggable-manifest-rules` to see the list of rules." 

-

817 ) 

-

818 

-

819 if matched: 

-

820 rule_type, matched_dispatching_parser = matched[0] 

-

821 plugin_provided_parser = matched_dispatching_parser.parser_for(rule_name) 

-

822 if isinstance(rule_type, str): 

-

823 manifest_attribute_path = rule_type 

-

824 else: 

-

825 manifest_attribute_path = SUPPORTED_DISPATCHABLE_TABLE_PARSERS[rule_type] 

-

826 parser_type_name = _parser_type_name(rule_type) 

-

827 parser = plugin_provided_parser.parser 

-

828 plugin_metadata = plugin_provided_parser.plugin_metadata 

-

829 else: 

-

830 rule_name = "::" 

-

831 parser = parser_generator.dispatchable_object_parsers[OPARSER_MANIFEST_ROOT] 

-

832 parser_type_name = "" 

-

833 plugin_metadata = plugin_metadata_for_debputys_own_plugin() 

-

834 manifest_attribute_path = "" 

-

835 

-

836 is_root_rule = rule_name == "::" 

-

837 print( 

-

838 render_rule( 

-

839 rule_name, 

-

840 parser, 

-

841 plugin_metadata, 

-

842 is_root_rule=is_root_rule, 

-

843 ) 

-

844 ) 

-

845 

-

846 if not is_root_rule: 

-

847 print( 

-

848 f"Used in: {manifest_attribute_path if manifest_attribute_path != '<ROOT>' else 'The manifest root'}" 

-

849 ) 

-

850 print(f"Rule reference: {parser_type_name}::{rule_name}") 

-

851 print(f"Plugin: {plugin_metadata.plugin_name}") 

-

852 else: 

-

853 print(f"Rule reference: {rule_name}") 

-

854 

-

855 print() 

-

856 print( 

-

857 "PS: If you want to know more about a non-trivial type of an attribute such as `FileSystemMatchRule`," 

-

858 ) 

-

859 print( 

-

860 "you can use `debputy plugin show type-mappings FileSystemMatchRule` to look it up " 

-

861 ) 

-

862 

-

863 

-

864def _render_discard_rule_example( 

-

865 fo: OutputStylingBase, 

-

866 discard_rule: PluginProvidedDiscardRule, 

-

867 example: AutomaticDiscardRuleExample, 

-

868) -> None: 

-

869 processed = process_discard_rule_example(discard_rule, example) 

-

870 

-

871 if processed.inconsistent_paths: 

-

872 plugin_name = discard_rule.plugin_metadata.plugin_name 

-

873 _warn( 

-

874 f"This example is inconsistent with what the code actually does." 

-

875 f" Please consider filing a bug against the plugin {plugin_name}" 

-

876 ) 

-

877 

-

878 doc = example.description 

-

879 if doc: 

-

880 print(doc) 

-

881 

-

882 print("Consider the following source paths matched by a glob or directory match:") 

-

883 print() 

-

884 if fo.optimize_for_screen_reader: 

-

885 for p, _ in processed.rendered_paths: 

-

886 path_name = p.absolute 

-

887 print( 

-

888 f"The path {path_name} is a {'directory' if p.is_dir else 'file or symlink.'}" 

-

889 ) 

-

890 

-

891 print() 

-

892 if any(v.is_consistent and v.is_discarded for _, v in processed.rendered_paths): 

-

893 print("The following paths will be discarded by this rule:") 

-

894 for p, verdict in processed.rendered_paths: 

-

895 path_name = p.absolute 

-

896 if verdict.is_consistent and verdict.is_discarded: 

-

897 print() 

-

898 if p.is_dir: 

-

899 print(f"{path_name} along with anything beneath it") 

-

900 else: 

-

901 print(path_name) 

-

902 else: 

-

903 print("No paths will be discarded in this example.") 

-

904 

-

905 print() 

-

906 if any(v.is_consistent and v.is_kept for _, v in processed.rendered_paths): 

-

907 print("The following paths will be not be discarded by this rule:") 

-

908 for p, verdict in processed.rendered_paths: 

-

909 path_name = p.absolute 

-

910 if verdict.is_consistent and verdict.is_kept: 

-

911 print() 

-

912 print(path_name) 

-

913 

-

914 if any(not v.is_consistent for _, v in processed.rendered_paths): 

-

915 print() 

-

916 print( 

-

917 "The example was inconsistent with the code. These are the paths where the code disagrees with" 

-

918 " the provided example:" 

-

919 ) 

-

920 for p, verdict in processed.rendered_paths: 

-

921 path_name = p.absolute 

-

922 if not verdict.is_consistent: 

-

923 print() 

-

924 if verdict == DiscardVerdict.DISCARDED_BY_CODE: 

-

925 print( 

-

926 f"The path {path_name} was discarded by the code, but the example said it should" 

-

927 f" have been installed." 

-

928 ) 

-

929 else: 

-

930 print( 

-

931 f"The path {path_name} was not discarded by the code, but the example said it should" 

-

932 f" have been discarded." 

-

933 ) 

-

934 return 

-

935 

-

936 # Add +1 for dirs because we want trailing slashes in the output 

-

937 max_len = max( 

-

938 (len(p.absolute) + (1 if p.is_dir else 0)) for p, _ in processed.rendered_paths 

-

939 ) 

-

940 for p, verdict in processed.rendered_paths: 

-

941 path_name = p.absolute 

-

942 if p.is_dir: 

-

943 path_name += "/" 

-

944 

-

945 if not verdict.is_consistent: 

-

946 print(f" {path_name:<{max_len}} !! {verdict.message}") 

-

947 elif verdict.is_discarded: 

-

948 print(f" {path_name:<{max_len}} << {verdict.message}") 

-

949 else: 

-

950 print(f" {path_name:<{max_len}}") 

-

951 

-

952 

-

953def _render_discard_rule( 

-

954 context: CommandContext, 

-

955 discard_rule: PluginProvidedDiscardRule, 

-

956) -> None: 

-

957 fo = _output_styling(context.parsed_args, sys.stdout) 

-

958 print(fo.colored(f"Automatic Discard Rule: {discard_rule.name}", style="bold")) 

-

959 fo.print_visual_formatting( 

-

960 f"========================{'=' * len(discard_rule.name)}" 

-

961 ) 

-

962 print() 

-

963 doc = discard_rule.reference_documentation or "No documentation provided" 

-

964 _render_multiline_documentation(doc, first_line_prefix="", following_line_prefix="") 

-

965 

-

966 if len(discard_rule.examples) > 1: 

-

967 print() 

-

968 fo.print_visual_formatting("Examples") 

-

969 fo.print_visual_formatting("--------") 

-

970 print() 

-

971 for no, example in enumerate(discard_rule.examples, start=1): 

-

972 print( 

-

973 fo.colored( 

-

974 f"Example {no} of {len(discard_rule.examples)}", style="bold" 

-

975 ) 

-

976 ) 

-

977 fo.print_visual_formatting(f"........{'.' * len(str(no))}") 

-

978 _render_discard_rule_example(fo, discard_rule, example) 

-

979 elif discard_rule.examples: 

-

980 print() 

-

981 print(fo.colored("Example", style="bold")) 

-

982 fo.print_visual_formatting("-------") 

-

983 print() 

-

984 _render_discard_rule_example(fo, discard_rule, discard_rule.examples[0]) 

-

985 

-

986 

-

987@plugin_show_cmds.register_subcommand( 

-

988 ["automatic-discard-rules", "a-d-r"], 

-

989 help_description="Pluggable manifest rules (such as install rules)", 

-

990 argparser=add_arg( 

-

991 "discard_rule", 

-

992 metavar="automatic-discard-rule", 

-

993 help="Name of the automatic discard rule (such as `backup-files`)", 

-

994 ), 

-

995) 

-

996def _plugin_cmd_show_automatic_discard_rules(context: CommandContext) -> None: 

-

997 auto_discard_rules = context.load_plugins().auto_discard_rules 

-

998 name = context.parsed_args.discard_rule 

-

999 discard_rule = auto_discard_rules.get(name) 

-

1000 if discard_rule is None: 

-

1001 _error( 

-

1002 f'No automatic discard rule with the name "{name}". Please use' 

-

1003 f" `debputy plugin list automatic-discard-rules` to see the list of automatic discard rules" 

-

1004 ) 

-

1005 

-

1006 _render_discard_rule(context, discard_rule) 

-

1007 

-

1008 

-

1009def _render_source_type(t: Any) -> str: 

-

1010 _, origin_type, args = unpack_type(t, False) 

-

1011 if origin_type == Union: 

-

1012 at = ", ".join(_render_source_type(st) for st in args) 

-

1013 return f"One of: {at}" 

-

1014 name = BASIC_SIMPLE_TYPES.get(t) 

-

1015 if name is not None: 

-

1016 return name 

-

1017 try: 

-

1018 return t.__name__ 

-

1019 except AttributeError: 

-

1020 return str(t) 

-

1021 

-

1022 

-

1023@plugin_list_cmds.register_subcommand( 

-

1024 "type-mappings", 

-

1025 help_description="Registered type mappings/descriptions", 

-

1026) 

-

1027def _plugin_cmd_list_type_mappings(context: CommandContext) -> None: 

-

1028 type_mappings = context.load_plugins().mapped_types 

-

1029 

-

1030 with _stream_to_pager(context.parsed_args) as (fd, fo): 

-

1031 fo.print_list_table( 

-

1032 ["Type", "Base Type", "Provided By"], 

-

1033 [ 

-

1034 ( 

-

1035 target_type.__name__, 

-

1036 _render_source_type(type_mapping.mapped_type.source_type), 

-

1037 type_mapping.plugin_metadata.plugin_name, 

-

1038 ) 

-

1039 for target_type, type_mapping in type_mappings.items() 

-

1040 ], 

-

1041 ) 

-

1042 

-

1043 

-

1044@plugin_show_cmds.register_subcommand( 

-

1045 "type-mappings", 

-

1046 help_description="Register type mappings/descriptions", 

-

1047 argparser=add_arg( 

-

1048 "type_mapping", 

-

1049 metavar="type-mapping", 

-

1050 help="Name of the type", 

-

1051 ), 

-

1052) 

-

1053def _plugin_cmd_show_type_mappings(context: CommandContext) -> None: 

-

1054 type_mapping_name = context.parsed_args.type_mapping 

-

1055 type_mappings = context.load_plugins().mapped_types 

-

1056 

-

1057 matches = [] 

-

1058 for type_ in type_mappings: 

-

1059 if type_.__name__ == type_mapping_name: 

-

1060 matches.append(type_) 

-

1061 

-

1062 if not matches: 

-

1063 simple_types = set(BASIC_SIMPLE_TYPES.values()) 

-

1064 simple_types.update(t.__name__ for t in BASIC_SIMPLE_TYPES) 

-

1065 

-

1066 if type_mapping_name in simple_types: 

-

1067 print(f"The type {type_mapping_name} is a YAML scalar.") 

-

1068 return 

-

1069 if type_mapping_name == "Any": 

-

1070 print( 

-

1071 "The Any type is a placeholder for when no typing information is provided. Often this implies" 

-

1072 " custom parse logic." 

-

1073 ) 

-

1074 return 

-

1075 

-

1076 if type_mapping_name in ("List", "list"): 

-

1077 print( 

-

1078 f"The {type_mapping_name} is a YAML Sequence. Please see the YAML documentation for examples." 

-

1079 ) 

-

1080 return 

-

1081 

-

1082 if type_mapping_name in ("Mapping", "dict"): 

-

1083 print( 

-

1084 f"The {type_mapping_name} is a YAML mapping. Please see the YAML documentation for examples." 

-

1085 ) 

-

1086 return 

-

1087 

-

1088 if "[" in type_mapping_name: 

-

1089 _error( 

-

1090 f"No known matches for {type_mapping_name}. Note: It looks like a composite type. Try searching" 

-

1091 " for its component parts. As an example, replace List[FileSystemMatchRule] with FileSystemMatchRule." 

-

1092 ) 

-

1093 

-

1094 _error(f"Sorry, no known matches for {type_mapping_name}") 

-

1095 

-

1096 if len(matches) > 1: 

-

1097 _error( 

-

1098 f"Too many matches for {type_mapping_name}... Sorry, there is no way to avoid this right now :'(" 

-

1099 ) 

-

1100 

-

1101 match = matches[0] 

-

1102 _render_type(context, type_mappings[match]) 

-

1103 

-

1104 

-

1105def _render_type_example( 

-

1106 context: CommandContext, 

-

1107 fo: OutputStylingBase, 

-

1108 parser_context: ParserContextData, 

-

1109 type_mapping: TypeMapping[Any, Any], 

-

1110 example: TypeMappingExample, 

-

1111) -> Tuple[str, bool]: 

-

1112 attr_path = AttributePath.builtin_path()["CLI Request"] 

-

1113 v = _render_value(example.source_input) 

-

1114 try: 

-

1115 type_mapping.mapper( 

-

1116 example.source_input, 

-

1117 attr_path, 

-

1118 parser_context, 

-

1119 ) 

-

1120 except RuntimeError: 

-

1121 if context.parsed_args.debug_mode: 

-

1122 raise 

-

1123 fo.print( 

-

1124 fo.colored("Broken example: ", fg="red") 

-

1125 + f"Provided example input ({v})" 

-

1126 + " caused an exception when parsed. Please file a bug against the plugin." 

-

1127 + " Use --debug to see the stack trace" 

-

1128 ) 

-

1129 return fo.colored(v, fg="red") + " [Example value could not be parsed]", True 

-

1130 return fo.colored(v, fg="green"), False 

-

1131 

-

1132 

-

1133def _render_type( 

-

1134 context: CommandContext, 

-

1135 pptm: PluginProvidedTypeMapping, 

-

1136) -> None: 

-

1137 fo = _output_styling(context.parsed_args, sys.stdout) 

-

1138 type_mapping = pptm.mapped_type 

-

1139 target_type = type_mapping.target_type 

-

1140 ref_doc = pptm.reference_documentation 

-

1141 desc = ref_doc.description if ref_doc is not None else None 

-

1142 examples = ref_doc.examples if ref_doc is not None else tuple() 

-

1143 

-

1144 fo.print(fo.colored(f"# Type Mapping: {target_type.__name__}", style="bold")) 

-

1145 fo.print() 

-

1146 if desc is not None: 

-

1147 _render_multiline_documentation( 

-

1148 desc, first_line_prefix="", following_line_prefix="" 

-

1149 ) 

-

1150 else: 

-

1151 fo.print("No documentation provided.") 

-

1152 

-

1153 context.parse_manifest() 

-

1154 

-

1155 manifest_parser = context.manifest_parser() 

-

1156 

-

1157 if examples: 

-

1158 had_issues = False 

-

1159 fo.print() 

-

1160 fo.print(fo.colored("## Example values", style="bold")) 

-

1161 fo.print() 

-

1162 for no, example in enumerate(examples, start=1): 

-

1163 v, i = _render_type_example( 

-

1164 context, fo, manifest_parser, type_mapping, example 

-

1165 ) 

-

1166 fo.print(f" * {v}") 

-

1167 if i: 

-

1168 had_issues = True 

-

1169 else: 

-

1170 had_issues = False 

-

1171 

-

1172 fo.print() 

-

1173 fo.print(f"Provided by plugin: {pptm.plugin_metadata.plugin_name}") 

-

1174 

-

1175 if had_issues: 

-

1176 fo.print() 

-

1177 fo.print( 

-

1178 fo.colored( 

-

1179 "Examples had issues. Please file a bug against the plugin", fg="red" 

-

1180 ) 

-

1181 ) 

-

1182 fo.print() 

-

1183 fo.print("Use --debug to see the stacktrace") 

-

1184 

-

1185 

-

1186def _render_value(v: Any) -> str: 

-

1187 if isinstance(v, str) and '"' not in v: 

-

1188 return f'"{v}"' 

-

1189 return str(v) 

-

1190 

-

1191 

-

1192def ensure_plugin_commands_are_loaded(): 

-

1193 # Loading the module does the heavy lifting 

-

1194 # However, having this function means that we do not have an "unused" import that some tool 

-

1195 # gets tempted to remove 

-

1196 assert ROOT_COMMAND.has_command("plugin") 

-
- - - diff --git a/coverage-report/d_7764373ba25ba45b___init___py.html b/coverage-report/d_7764373ba25ba45b___init___py.html deleted file mode 100644 index 531f4cd..0000000 --- a/coverage-report/d_7764373ba25ba45b___init___py.html +++ /dev/null @@ -1,99 +0,0 @@ - - - - - Coverage for src/debputy/linting/__init__.py: 100% - - - - - -
-
-

- Coverage for src/debputy/linting/__init__.py: - 100% -

- -

- 0 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-
- - - diff --git a/coverage-report/d_7764373ba25ba45b_lint_impl_py.html b/coverage-report/d_7764373ba25ba45b_lint_impl_py.html deleted file mode 100644 index 6214efe..0000000 --- a/coverage-report/d_7764373ba25ba45b_lint_impl_py.html +++ /dev/null @@ -1,448 +0,0 @@ - - - - - Coverage for src/debputy/linting/lint_impl.py: 12% - - - - - -
-
-

- Coverage for src/debputy/linting/lint_impl.py: - 12% -

- -

- 152 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import os 

-

2import stat 

-

3import sys 

-

4from typing import Optional, List, Union, NoReturn 

-

5 

-

6from lsprotocol.types import ( 

-

7 CodeAction, 

-

8 Command, 

-

9 CodeActionParams, 

-

10 CodeActionContext, 

-

11 TextDocumentIdentifier, 

-

12 TextEdit, 

-

13 Position, 

-

14 DiagnosticSeverity, 

-

15) 

-

16 

-

17from debputy.commands.debputy_cmd.context import CommandContext 

-

18from debputy.commands.debputy_cmd.output import _output_styling, OutputStylingBase 

-

19from debputy.linting.lint_util import ( 

-

20 report_diagnostic, 

-

21 LinterImpl, 

-

22 LintReport, 

-

23 LintStateImpl, 

-

24) 

-

25from debputy.lsp.lsp_debian_changelog import _lint_debian_changelog 

-

26from debputy.lsp.lsp_debian_control import _lint_debian_control 

-

27from debputy.lsp.lsp_debian_copyright import _lint_debian_copyright 

-

28from debputy.lsp.lsp_debian_debputy_manifest import _lint_debian_debputy_manifest 

-

29from debputy.lsp.lsp_debian_rules import _lint_debian_rules_impl 

-

30from debputy.lsp.lsp_debian_tests_control import _lint_debian_tests_control 

-

31from debputy.lsp.quickfixes import provide_standard_quickfixes_from_diagnostics 

-

32from debputy.lsp.spellchecking import disable_spellchecking 

-

33from debputy.lsp.text_edit import ( 

-

34 get_well_formatted_edit, 

-

35 merge_sort_text_edits, 

-

36 apply_text_edits, 

-

37) 

-

38from debputy.plugin.api.feature_set import PluginProvidedFeatureSet 

-

39from debputy.util import _warn, _error, _info 

-

40 

-

41LINTER_FORMATS = { 

-

42 "debian/changelog": _lint_debian_changelog, 

-

43 "debian/control": _lint_debian_control, 

-

44 "debian/copyright": _lint_debian_copyright, 

-

45 "debian/debputy.manifest": _lint_debian_debputy_manifest, 

-

46 "debian/rules": _lint_debian_rules_impl, 

-

47 "debian/tests/control": _lint_debian_tests_control, 

-

48} 

-

49 

-

50 

-

51def perform_linting(context: CommandContext) -> None: 

-

52 parsed_args = context.parsed_args 

-

53 if not parsed_args.spellcheck: 

-

54 disable_spellchecking() 

-

55 linter_exit_code = parsed_args.linter_exit_code 

-

56 lint_report = LintReport() 

-

57 fo = _output_styling(context.parsed_args, sys.stdout) 

-

58 plugin_feature_set = context.load_plugins() 

-

59 for name_stem in LINTER_FORMATS: 

-

60 filename = f"./{name_stem}" 

-

61 if not os.path.isfile(filename): 

-

62 continue 

-

63 perform_linting_of_file( 

-

64 fo, 

-

65 plugin_feature_set, 

-

66 filename, 

-

67 name_stem, 

-

68 context.parsed_args.auto_fix, 

-

69 lint_report, 

-

70 ) 

-

71 if lint_report.diagnostics_without_severity: 

-

72 _warn( 

-

73 "Some diagnostics did not explicitly set severity. Please report the bug and include the output" 

-

74 ) 

-

75 if lint_report.diagnostic_errors: 

-

76 _error( 

-

77 "Some sub-linters reported issues. Please report the bug and include the output" 

-

78 ) 

-

79 

-

80 if os.path.isfile("debian/debputy.manifest"): 

-

81 _info("Note: Due to a limitation in the linter, debian/debputy.manifest is") 

-

82 _info("only **partially** checked by this command at the time of writing.") 

-

83 _info("Please use `debputy check-manifest` to fully check the manifest.") 

-

84 

-

85 if linter_exit_code: 

-

86 _exit_with_lint_code(lint_report) 

-

87 

-

88 

-

89def _exit_with_lint_code(lint_report: LintReport) -> NoReturn: 

-

90 diagnostics_count = lint_report.diagnostics_count 

-

91 if ( 

-

92 diagnostics_count[DiagnosticSeverity.Error] 

-

93 or diagnostics_count[DiagnosticSeverity.Warning] 

-

94 ): 

-

95 sys.exit(2) 

-

96 sys.exit(0) 

-

97 

-

98 

-

99def perform_linting_of_file( 

-

100 fo: OutputStylingBase, 

-

101 plugin_feature_set: PluginProvidedFeatureSet, 

-

102 filename: str, 

-

103 file_format: str, 

-

104 auto_fixing_enabled: bool, 

-

105 lint_report: LintReport, 

-

106) -> None: 

-

107 handler = LINTER_FORMATS.get(file_format) 

-

108 if handler is None: 

-

109 return 

-

110 with open(filename, "rt", encoding="utf-8") as fd: 

-

111 text = fd.read() 

-

112 

-

113 if auto_fixing_enabled: 

-

114 _auto_fix_run(fo, plugin_feature_set, filename, text, handler, lint_report) 

-

115 else: 

-

116 _diagnostics_run(fo, plugin_feature_set, filename, text, handler, lint_report) 

-

117 

-

118 

-

119def _edit_happens_before_last_fix( 

-

120 last_edit_pos: Position, 

-

121 last_fix_position: Position, 

-

122) -> bool: 

-

123 if last_edit_pos.line < last_fix_position.line: 

-

124 return True 

-

125 return ( 

-

126 last_edit_pos.line == last_fix_position.character 

-

127 and last_edit_pos.character < last_fix_position.character 

-

128 ) 

-

129 

-

130 

-

131def _auto_fix_run( 

-

132 fo: OutputStylingBase, 

-

133 plugin_feature_set: PluginProvidedFeatureSet, 

-

134 filename: str, 

-

135 text: str, 

-

136 linter: LinterImpl, 

-

137 lint_report: LintReport, 

-

138) -> None: 

-

139 another_round = True 

-

140 unfixed_diagnostics = [] 

-

141 remaining_rounds = 10 

-

142 fixed_count = False 

-

143 too_many_rounds = False 

-

144 lines = text.splitlines(keepends=True) 

-

145 lint_state = LintStateImpl( 

-

146 plugin_feature_set, 

-

147 filename, 

-

148 lines, 

-

149 ) 

-

150 current_issues = linter(lint_state) 

-

151 issue_count_start = len(current_issues) if current_issues else 0 

-

152 while another_round and current_issues: 

-

153 another_round = False 

-

154 last_fix_position = Position(0, 0) 

-

155 unfixed_diagnostics.clear() 

-

156 edits = [] 

-

157 fixed_diagnostics = [] 

-

158 for diagnostic in current_issues: 

-

159 actions = provide_standard_quickfixes_from_diagnostics( 

-

160 CodeActionParams( 

-

161 TextDocumentIdentifier(filename), 

-

162 diagnostic.range, 

-

163 CodeActionContext( 

-

164 [diagnostic], 

-

165 ), 

-

166 ), 

-

167 ) 

-

168 auto_fixing_edits = resolve_auto_fixer(filename, actions) 

-

169 

-

170 if not auto_fixing_edits: 

-

171 unfixed_diagnostics.append(diagnostic) 

-

172 continue 

-

173 

-

174 sorted_edits = merge_sort_text_edits( 

-

175 [get_well_formatted_edit(e) for e in auto_fixing_edits], 

-

176 ) 

-

177 last_edit = sorted_edits[-1] 

-

178 last_edit_pos = last_edit.range.start 

-

179 if _edit_happens_before_last_fix(last_edit_pos, last_fix_position): 

-

180 if not another_round: 

-

181 if remaining_rounds > 0: 

-

182 remaining_rounds -= 1 

-

183 print( 

-

184 "Detected overlapping edit; scheduling another edit round." 

-

185 ) 

-

186 another_round = True 

-

187 else: 

-

188 _warn( 

-

189 "Too many overlapping edits; stopping after this round (circuit breaker)." 

-

190 ) 

-

191 too_many_rounds = True 

-

192 continue 

-

193 edits.extend(sorted_edits) 

-

194 fixed_diagnostics.append(diagnostic) 

-

195 last_fix_position = sorted_edits[-1].range.start 

-

196 

-

197 if another_round and not edits: 

-

198 _error( 

-

199 "Internal error: Detected an overlapping edit and yet had no edits to perform..." 

-

200 ) 

-

201 

-

202 fixed_count += len(fixed_diagnostics) 

-

203 

-

204 text = apply_text_edits( 

-

205 text, 

-

206 lines, 

-

207 edits, 

-

208 ) 

-

209 lines = text.splitlines(keepends=True) 

-

210 

-

211 for diagnostic in fixed_diagnostics: 

-

212 report_diagnostic( 

-

213 fo, 

-

214 filename, 

-

215 diagnostic, 

-

216 lines, 

-

217 True, 

-

218 True, 

-

219 lint_report, 

-

220 ) 

-

221 lint_state.lines = lines 

-

222 current_issues = linter(lint_state) 

-

223 

-

224 if fixed_count: 

-

225 output_filename = f"{filename}.tmp" 

-

226 with open(output_filename, "wt", encoding="utf-8") as fd: 

-

227 fd.write(text) 

-

228 orig_mode = stat.S_IMODE(os.stat(filename).st_mode) 

-

229 os.chmod(output_filename, orig_mode) 

-

230 os.rename(output_filename, filename) 

-

231 lines = text.splitlines(keepends=True) 

-

232 lint_state.lines = lines 

-

233 remaining_issues = linter(lint_state) or [] 

-

234 else: 

-

235 remaining_issues = current_issues or [] 

-

236 

-

237 for diagnostic in remaining_issues: 

-

238 report_diagnostic( 

-

239 fo, 

-

240 filename, 

-

241 diagnostic, 

-

242 lines, 

-

243 False, 

-

244 False, 

-

245 lint_report, 

-

246 ) 

-

247 

-

248 print() 

-

249 if fixed_count: 

-

250 remaining_issues_count = len(remaining_issues) 

-

251 print( 

-

252 fo.colored( 

-

253 f"Fixes applied to {filename}: {fixed_count}." 

-

254 f" Number of issues went from {issue_count_start} to {remaining_issues_count}", 

-

255 fg="green", 

-

256 style="bold", 

-

257 ) 

-

258 ) 

-

259 elif remaining_issues: 

-

260 print( 

-

261 fo.colored( 

-

262 f"None of the issues in {filename} could be fixed automatically. Sorry!", 

-

263 fg="yellow", 

-

264 bg="black", 

-

265 style="bold", 

-

266 ) 

-

267 ) 

-

268 else: 

-

269 assert not current_issues 

-

270 print( 

-

271 fo.colored( 

-

272 f"No issues detected in {filename}", 

-

273 fg="green", 

-

274 style="bold", 

-

275 ) 

-

276 ) 

-

277 if too_many_rounds: 

-

278 print( 

-

279 fo.colored( 

-

280 f"Not all fixes for issues in {filename} could be applied due to overlapping edits.", 

-

281 fg="yellow", 

-

282 bg="black", 

-

283 style="bold", 

-

284 ) 

-

285 ) 

-

286 print( 

-

287 "Running once more may cause more fixes to be applied. However, you may be facing" 

-

288 " pathological performance." 

-

289 ) 

-

290 

-

291 

-

292def _diagnostics_run( 

-

293 fo: OutputStylingBase, 

-

294 plugin_feature_set: PluginProvidedFeatureSet, 

-

295 filename: str, 

-

296 text: str, 

-

297 linter: LinterImpl, 

-

298 lint_report: LintReport, 

-

299) -> None: 

-

300 lines = text.splitlines(keepends=True) 

-

301 lint_state = LintStateImpl( 

-

302 plugin_feature_set, 

-

303 filename, 

-

304 lines, 

-

305 ) 

-

306 issues = linter(lint_state) or [] 

-

307 for diagnostic in issues: 

-

308 actions = provide_standard_quickfixes_from_diagnostics( 

-

309 CodeActionParams( 

-

310 TextDocumentIdentifier(filename), 

-

311 diagnostic.range, 

-

312 CodeActionContext( 

-

313 [diagnostic], 

-

314 ), 

-

315 ), 

-

316 ) 

-

317 auto_fixer = resolve_auto_fixer(filename, actions) 

-

318 has_auto_fixer = bool(auto_fixer) 

-

319 

-

320 report_diagnostic( 

-

321 fo, 

-

322 filename, 

-

323 diagnostic, 

-

324 lines, 

-

325 has_auto_fixer, 

-

326 False, 

-

327 lint_report, 

-

328 ) 

-

329 

-

330 

-

331def resolve_auto_fixer( 

-

332 document_ref: str, 

-

333 actions: Optional[List[Union[Command, CodeAction]]], 

-

334) -> Optional[List[TextEdit]]: 

-

335 if actions is None or len(actions) != 1: 

-

336 return None 

-

337 action = actions[0] 

-

338 if not isinstance(action, CodeAction): 

-

339 return None 

-

340 workspace_edit = action.edit 

-

341 if workspace_edit is None or action.command is not None: 

-

342 return None 

-

343 if ( 

-

344 not workspace_edit.changes 

-

345 or len(workspace_edit.changes) != 1 

-

346 or document_ref not in workspace_edit.changes 

-

347 ): 

-

348 return None 

-

349 return workspace_edit.changes[document_ref] 

-
- - - diff --git a/coverage-report/d_7764373ba25ba45b_lint_util_py.html b/coverage-report/d_7764373ba25ba45b_lint_util_py.html deleted file mode 100644 index 7bcc47e..0000000 --- a/coverage-report/d_7764373ba25ba45b_lint_util_py.html +++ /dev/null @@ -1,318 +0,0 @@ - - - - - Coverage for src/debputy/linting/lint_util.py: 42% - - - - - -
-
-

- Coverage for src/debputy/linting/lint_util.py: - 42% -

- -

- 117 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2import os 

-

3from typing import List, Optional, Callable, Counter, TYPE_CHECKING 

-

4 

-

5from lsprotocol.types import Position, Range, Diagnostic, DiagnosticSeverity 

-

6 

-

7from debputy.commands.debputy_cmd.output import OutputStylingBase 

-

8from debputy.plugin.api.feature_set import PluginProvidedFeatureSet 

-

9from debputy.util import _DEFAULT_LOGGER, _warn 

-

10 

-

11if TYPE_CHECKING: 

-

12 from debputy.lsp.text_util import LintCapablePositionCodec 

-

13 

-

14 

-

15LinterImpl = Callable[["LintState"], Optional[List[Diagnostic]]] 

-

16 

-

17 

-

18class LintState: 

-

19 

-

20 @property 

-

21 def plugin_feature_set(self) -> PluginProvidedFeatureSet: 

-

22 raise NotImplementedError 

-

23 

-

24 @property 

-

25 def doc_uri(self) -> str: 

-

26 raise NotImplementedError 

-

27 

-

28 @property 

-

29 def path(self) -> str: 

-

30 raise NotImplementedError 

-

31 

-

32 @property 

-

33 def lines(self) -> List[str]: 

-

34 raise NotImplementedError 

-

35 

-

36 @property 

-

37 def position_codec(self) -> "LintCapablePositionCodec": 

-

38 raise NotImplementedError 

-

39 

-

40 

-

41@dataclasses.dataclass(slots=True) 

-

42class LintStateImpl(LintState): 

-

43 plugin_feature_set: PluginProvidedFeatureSet 

-

44 path: str 

-

45 lines: List[str] 

-

46 

-

47 @property 

-

48 def doc_uri(self) -> str: 

-

49 path = self.path 

-

50 abs_path = os.path.join(os.path.curdir, path) 

-

51 return f"file://{abs_path}" 

-

52 

-

53 @property 

-

54 def position_codec(self) -> "LintCapablePositionCodec": 

-

55 return LINTER_POSITION_CODEC 

-

56 

-

57 

-

58@dataclasses.dataclass(slots=True) 

-

59class LintReport: 

-

60 diagnostics_count: Counter[DiagnosticSeverity] = dataclasses.field( 

-

61 default_factory=Counter 

-

62 ) 

-

63 diagnostics_without_severity: int = 0 

-

64 diagnostic_errors: int = 0 

-

65 fixed: int = 0 

-

66 fixable: int = 0 

-

67 

-

68 

-

69class LinterPositionCodec: 

-

70 

-

71 def client_num_units(self, chars: str): 

-

72 return len(chars) 

-

73 

-

74 def position_from_client_units( 

-

75 self, lines: List[str], position: Position 

-

76 ) -> Position: 

-

77 

-

78 if len(lines) == 0: 

-

79 return Position(0, 0) 

-

80 if position.line >= len(lines): 

-

81 return Position(len(lines) - 1, self.client_num_units(lines[-1])) 

-

82 return position 

-

83 

-

84 def position_to_client_units( 

-

85 self, _lines: List[str], position: Position 

-

86 ) -> Position: 

-

87 return position 

-

88 

-

89 def range_from_client_units(self, _lines: List[str], range: Range) -> Range: 

-

90 return range 

-

91 

-

92 def range_to_client_units(self, _lines: List[str], range: Range) -> Range: 

-

93 return range 

-

94 

-

95 

-

96LINTER_POSITION_CODEC = LinterPositionCodec() 

-

97 

-

98 

-

99_SEVERITY2TAG = { 99 ↛ exitline 99 didn't jump to the function exit

-

100 DiagnosticSeverity.Error: lambda fo: fo.colored( 

-

101 "error", 

-

102 fg="red", 

-

103 bg="black", 

-

104 style="bold", 

-

105 ), 

-

106 DiagnosticSeverity.Warning: lambda fo: fo.colored( 

-

107 "warning", 

-

108 fg="yellow", 

-

109 bg="black", 

-

110 style="bold", 

-

111 ), 

-

112 DiagnosticSeverity.Information: lambda fo: fo.colored( 

-

113 "informational", 

-

114 fg="blue", 

-

115 bg="black", 

-

116 style="bold", 

-

117 ), 

-

118 DiagnosticSeverity.Hint: lambda fo: fo.colored( 

-

119 "pedantic", 

-

120 fg="green", 

-

121 bg="black", 

-

122 style="bold", 

-

123 ), 

-

124} 

-

125 

-

126 

-

127def _lines_to_print(range_: Range) -> int: 

-

128 count = range_.end.line - range_.start.line 

-

129 if range_.end.character > 0: 

-

130 count += 1 

-

131 return count 

-

132 

-

133 

-

134def _highlight_range( 

-

135 fo: OutputStylingBase, line: str, line_no: int, range_: Range 

-

136) -> str: 

-

137 line_wo_nl = line.rstrip("\r\n") 

-

138 start_pos = 0 

-

139 prefix = "" 

-

140 suffix = "" 

-

141 if line_no == range_.start.line: 

-

142 start_pos = range_.start.character 

-

143 prefix = line_wo_nl[0:start_pos] 

-

144 if line_no == range_.end.line: 

-

145 end_pos = range_.end.character 

-

146 suffix = line_wo_nl[end_pos:] 

-

147 else: 

-

148 end_pos = len(line_wo_nl) 

-

149 

-

150 marked_part = fo.colored(line_wo_nl[start_pos:end_pos], fg="red", style="bold") 

-

151 

-

152 return prefix + marked_part + suffix 

-

153 

-

154 

-

155def report_diagnostic( 

-

156 fo: OutputStylingBase, 

-

157 filename: str, 

-

158 diagnostic: Diagnostic, 

-

159 lines: List[str], 

-

160 auto_fixable: bool, 

-

161 auto_fixed: bool, 

-

162 lint_report: LintReport, 

-

163) -> None: 

-

164 logger = _DEFAULT_LOGGER 

-

165 assert logger is not None 

-

166 severity = diagnostic.severity 

-

167 missing_severity = False 

-

168 if severity is None: 

-

169 severity = DiagnosticSeverity.Warning 

-

170 missing_severity = True 

-

171 if not auto_fixed: 

-

172 tag_unresolved = _SEVERITY2TAG.get(severity) 

-

173 if tag_unresolved is None: 

-

174 tag_unresolved = _SEVERITY2TAG[DiagnosticSeverity.Warning] 

-

175 lint_report.diagnostics_without_severity += 1 

-

176 else: 

-

177 lint_report.diagnostics_count[severity] += 1 

-

178 tag = tag_unresolved(fo) 

-

179 else: 

-

180 tag = fo.colored( 

-

181 "auto-fixing", 

-

182 fg="magenta", 

-

183 bg="black", 

-

184 style="bold", 

-

185 ) 

-

186 start_line = diagnostic.range.start.line 

-

187 start_position = diagnostic.range.start.character 

-

188 end_line = diagnostic.range.end.line 

-

189 end_position = diagnostic.range.end.character 

-

190 has_fixit = "" 

-

191 line_no_width = len(str(len(lines))) 

-

192 if not auto_fixed and auto_fixable: 

-

193 has_fixit = " [Correctable via --auto-fix]" 

-

194 lint_report.fixable += 1 

-

195 print( 

-

196 f"{tag}: File: {filename}:{start_line+1}:{start_position}:{end_line+1}:{end_position}: {diagnostic.message}{has_fixit}", 

-

197 ) 

-

198 if missing_severity: 

-

199 _warn( 

-

200 " This warning did not have an explicit severity; Used Warning as a fallback!" 

-

201 ) 

-

202 if auto_fixed: 

-

203 # If it is fixed, there is no reason to show additional context. 

-

204 lint_report.fixed += 1 

-

205 return 

-

206 lines_to_print = _lines_to_print(diagnostic.range) 

-

207 if diagnostic.range.end.line > len(lines) or diagnostic.range.start.line < 0: 

-

208 lint_report.diagnostic_errors += 1 

-

209 _warn( 

-

210 "Bug in the underlying linter: The line numbers of the warning does not fit in the file..." 

-

211 ) 

-

212 return 

-

213 if lines_to_print == 1: 

-

214 line = _highlight_range(fo, lines[start_line], start_line, diagnostic.range) 

-

215 print(f" {start_line+1:{line_no_width}}: {line}") 

-

216 else: 

-

217 for line_no in range(start_line, end_line): 

-

218 line = _highlight_range(fo, lines[line_no], line_no, diagnostic.range) 

-

219 print(f" {line_no+1:{line_no_width}}: {line}") 

-
- - - diff --git a/coverage-report/d_9ae9c81fc31f2694_gnome_py.html b/coverage-report/d_9ae9c81fc31f2694_gnome_py.html deleted file mode 100644 index 90f704d..0000000 --- a/coverage-report/d_9ae9c81fc31f2694_gnome_py.html +++ /dev/null @@ -1,170 +0,0 @@ - - - - - Coverage for debputy/plugins/gnome.py: 96% - - - - - -
-
-

- Coverage for debputy/plugins/gnome.py: - 96% -

- -

- 38 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import re 

-

2from typing import Any 

-

3 

-

4from debputy.plugin.api import ( 

-

5 DebputyPluginInitializer, 

-

6 BinaryCtrlAccessor, 

-

7 PackageProcessingContext, 

-

8) 

-

9from debputy.util import _error 

-

10 

-

11GNOME_VERSION1_RE = re.compile(r"^(\d+:)?(\d+)\.(\d+)\.[\d.]+.*$") 

-

12GNOME_VERSION2_RE = re.compile( 

-

13 r"^(\d+:)?(\d+)(?:\.[\d.]+|~(alpha|beta|rc)[\d.]*|[+~])?.*$" 

-

14) 

-

15 

-

16 

-

17def initialize(api: DebputyPluginInitializer) -> None: 

-

18 api.metadata_or_maintscript_detector( 

-

19 "gnome-versions", 

-

20 gnome_versions, 

-

21 # Probably not necessary, but this is the most faithful conversion 

-

22 package_type=["deb", "udeb"], 

-

23 ) 

-

24 # Looking for "clean_la_files"? The `debputy` plugin provides a replacement 

-

25 # feature. 

-

26 

-

27 

-

28def gnome_versions( 

-

29 _unused: Any, 

-

30 ctrl: BinaryCtrlAccessor, 

-

31 context: PackageProcessingContext, 

-

32) -> None: 

-

33 # Conversion note: In debhelper, $dh{VERSION} is actually the "source" version 

-

34 # (though sometimes it has a binNMU version too). In `debputy`, we have access 

-

35 # to the "true" binary version (dpkg-gencontrol -v<VERSION>). In 99% of all cases, 

-

36 # the difference is irrelevant as people rarely use dpkg-gencontrol -v<VERSION>. 

-

37 version = context.binary_package_version 

-

38 m = GNOME_VERSION1_RE.match(version) 

-

39 epoch = "" 

-

40 gnome_version = "" 

-

41 gnome_next_version = "" 

-

42 if m: 

-

43 major_version = int(m.group(2)) 

-

44 if major_version < 40: 

-

45 epoch = m.group(1) 

-

46 minor_version = int(m.group(3)) 

-

47 gnome_version = f"{major_version}.{minor_version}" 

-

48 if major_version == 3 and minor_version == 38: 

-

49 prefix = "" 

-

50 else: 

-

51 prefix = f"{major_version}." 

-

52 gnome_next_version = f"{prefix}{minor_version + 2}" 

-

53 if gnome_version == "": 

-

54 m = GNOME_VERSION2_RE.match(version) 

-

55 if not m: 55 ↛ 56line 55 didn't jump to line 56, because the condition on line 55 was never true

-

56 _error( 

-

57 f"Unable to determine the GNOME major version from {version} for package" 

-

58 f" {context.binary_package.name}. If this is not a GNOME package or it does" 

-

59 f" not follow the GNOME version standard, please disable the GNOME plugin" 

-

60 f" (debputy-plugin-gnome)." 

-

61 ) 

-

62 epoch = m.group(1) 

-

63 version = int(m.group(2)) 

-

64 gnome_version = f"{version}~" 

-

65 gnome_next_version = f"{version + 1}~" 

-

66 if epoch is None: 

-

67 epoch = "" 

-

68 ctrl.substvars["gnome:Version"] = f"{epoch}{gnome_version}" 

-

69 ctrl.substvars["gnome:UpstreamVersion"] = f"{gnome_version}" 

-

70 ctrl.substvars["gnome:NextVersion"] = f"{epoch}{gnome_next_version}" 

-

71 ctrl.substvars["gnome:NextUpstreamVersion"] = f"{gnome_next_version}" 

-
- - - diff --git a/coverage-report/d_9ae9c81fc31f2694_numpy3_py.html b/coverage-report/d_9ae9c81fc31f2694_numpy3_py.html deleted file mode 100644 index 03e18e1..0000000 --- a/coverage-report/d_9ae9c81fc31f2694_numpy3_py.html +++ /dev/null @@ -1,161 +0,0 @@ - - - - - Coverage for debputy/plugins/numpy3.py: 86% - - - - - -
-
-

- Coverage for debputy/plugins/numpy3.py: - 86% -

- -

- 34 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import functools 

-

2import os 

-

3from typing import Any, Tuple 

-

4 

-

5from debputy.plugin.api import ( 

-

6 DebputyPluginInitializer, 

-

7 BinaryCtrlAccessor, 

-

8 PackageProcessingContext, 

-

9) 

-

10from debputy.util import _error 

-

11 

-

12 

-

13def initialize(api: DebputyPluginInitializer) -> None: 

-

14 api.metadata_or_maintscript_detector( 

-

15 "numpy-depends", 

-

16 numpy3_versions, 

-

17 # Probably not necessary, but this is the most faithful conversion 

-

18 package_type=["deb", "udeb"], 

-

19 ) 

-

20 

-

21 

-

22@functools.lru_cache 

-

23def _parse_numpy3_versions(versions_file: str) -> Tuple[str, str]: 

-

24 attributes = {} 

-

25 try: 

-

26 with open(versions_file, "rt", encoding="utf-8") as fd: 

-

27 for line in fd: 

-

28 if line.startswith("#") or line.isspace(): 

-

29 continue 

-

30 k, v = line.split() 

-

31 attributes[k] = v 

-

32 except FileNotFoundError: 

-

33 _error( 

-

34 f"Missing Build-Dependency on python3-numpy to ensure {versions_file}" 

-

35 " is present." 

-

36 ) 

-

37 

-

38 try: 

-

39 api_min_version = attributes["api-min-version"] 

-

40 abi_version = attributes["abi"] 

-

41 except KeyError as e: 

-

42 k = e.args[0] 

-

43 _error(f'Expected {versions_file} to contain the key "{k}"') 

-

44 assert False 

-

45 

-

46 return api_min_version, abi_version 

-

47 

-

48 

-

49def numpy3_versions( 

-

50 _unused: Any, 

-

51 ctrl: BinaryCtrlAccessor, 

-

52 context: PackageProcessingContext, 

-

53) -> None: 

-

54 if context.binary_package.is_arch_all: 

-

55 dep = "python3-numpy" 

-

56 else: 

-

57 # Note we do not support --strict; codesearch.d.n suggests it is not used 

-

58 # anywhere and this saves us figuring out how to support it here. 

-

59 versions_file = os.environ.get("_NUMPY_TEST_PATH", "/usr/share/numpy3/versions") 

-

60 api_min_version, abi_version = _parse_numpy3_versions(versions_file) 

-

61 dep = f"python3-numpy (>= {api_min_version}), python3-numpy-abi{abi_version}" 

-

62 ctrl.substvars.add_dependency("python3:Depends", dep) 

-
- - - diff --git a/coverage-report/d_9ae9c81fc31f2694_perl_openssl_py.html b/coverage-report/d_9ae9c81fc31f2694_perl_openssl_py.html deleted file mode 100644 index 715454b..0000000 --- a/coverage-report/d_9ae9c81fc31f2694_perl_openssl_py.html +++ /dev/null @@ -1,142 +0,0 @@ - - - - - Coverage for debputy/plugins/perl_openssl.py: 75% - - - - - -
-
-

- Coverage for debputy/plugins/perl_openssl.py: - 75% -

- -

- 20 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import functools 

-

2import os 

-

3import subprocess 

-

4from typing import Any 

-

5 

-

6from debputy.plugin.api import ( 

-

7 DebputyPluginInitializer, 

-

8 BinaryCtrlAccessor, 

-

9 PackageProcessingContext, 

-

10) 

-

11from debputy.util import _error 

-

12 

-

13 

-

14def initialize(api: DebputyPluginInitializer) -> None: 

-

15 api.metadata_or_maintscript_detector( 

-

16 "perl-openssl-abi", 

-

17 detect_perl_openssl_abi, 

-

18 ) 

-

19 

-

20 

-

21@functools.lru_cache 

-

22def _resolve_libssl_abi(cmd: str) -> str: 

-

23 try: 

-

24 return subprocess.check_output([cmd]).strip().decode("utf-8") 

-

25 except FileNotFoundError: 

-

26 _error( 

-

27 f"The perl-openssl plugin requires that perl-openssl-defaults + libssl-dev is installed" 

-

28 ) 

-

29 except subprocess.CalledProcessError as e: 

-

30 _error(f"") 

-

31 

-

32 

-

33def detect_perl_openssl_abi( 

-

34 _unused: Any, 

-

35 ctrl: BinaryCtrlAccessor, 

-

36 _context: PackageProcessingContext, 

-

37) -> None: 

-

38 cmd = os.environ.get( 

-

39 "_PERL_SSL_DEFAULTS_TEST_PATH", 

-

40 "/usr/share/perl-openssl-defaults/get-libssl-abi", 

-

41 ) 

-

42 abi = _resolve_libssl_abi(cmd) 

-

43 ctrl.substvars.add_dependency("perl:Depends", f"perl-openssl-abi-{abi}") 

-
- - - diff --git a/coverage-report/d_d5d6843b45eec01e___init___py.html b/coverage-report/d_d5d6843b45eec01e___init___py.html deleted file mode 100644 index 498a761..0000000 --- a/coverage-report/d_d5d6843b45eec01e___init___py.html +++ /dev/null @@ -1,99 +0,0 @@ - - - - - Coverage for src/debputy/plugin/debputy/__init__.py: 100% - - - - - -
-
-

- Coverage for src/debputy/plugin/debputy/__init__.py: - 100% -

- -

- 0 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-
- - - diff --git a/coverage-report/d_d5d6843b45eec01e_binary_package_rules_py.html b/coverage-report/d_d5d6843b45eec01e_binary_package_rules_py.html deleted file mode 100644 index c98b3fb..0000000 --- a/coverage-report/d_d5d6843b45eec01e_binary_package_rules_py.html +++ /dev/null @@ -1,817 +0,0 @@ - - - - - Coverage for src/debputy/plugin/debputy/binary_package_rules.py: 82% - - - - - -
-
-

- Coverage for src/debputy/plugin/debputy/binary_package_rules.py: - 82% -

- -

- 173 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2import os 

-

3import textwrap 

-

4from typing import ( 

-

5 Any, 

-

6 List, 

-

7 NotRequired, 

-

8 Union, 

-

9 Literal, 

-

10 TypedDict, 

-

11 Annotated, 

-

12 Optional, 

-

13 FrozenSet, 

-

14 Self, 

-

15 cast, 

-

16) 

-

17 

-

18from debputy import DEBPUTY_DOC_ROOT_DIR 

-

19from debputy.maintscript_snippet import DpkgMaintscriptHelperCommand, MaintscriptSnippet 

-

20from debputy.manifest_parser.base_types import ( 

-

21 DebputyParsedContent, 

-

22 FileSystemExactMatchRule, 

-

23) 

-

24from debputy.manifest_parser.declarative_parser import ( 

-

25 DebputyParseHint, 

-

26 ParserGenerator, 

-

27) 

-

28from debputy.manifest_parser.exceptions import ManifestParseException 

-

29from debputy.manifest_parser.parser_data import ParserContextData 

-

30from debputy.manifest_parser.util import AttributePath 

-

31from debputy.path_matcher import MatchRule, MATCH_ANYTHING, ExactFileSystemPath 

-

32from debputy.plugin.api import reference_documentation 

-

33from debputy.plugin.api.impl import ( 

-

34 DebputyPluginInitializerProvider, 

-

35 ServiceDefinitionImpl, 

-

36) 

-

37from debputy.plugin.api.impl_types import OPARSER_PACKAGES 

-

38from debputy.plugin.api.spec import ( 

-

39 ServiceUpgradeRule, 

-

40 ServiceDefinition, 

-

41 DSD, 

-

42 documented_attr, 

-

43) 

-

44from debputy.transformation_rules import TransformationRule 

-

45from debputy.util import _error 

-

46 

-

47ACCEPTABLE_CLEAN_ON_REMOVAL_FOR_GLOBS_AND_EXACT_MATCHES = frozenset( 

-

48 [ 

-

49 "./var/log", 

-

50 ] 

-

51) 

-

52 

-

53 

-

54ACCEPTABLE_CLEAN_ON_REMOVAL_IF_EXACT_MATCH_OR_SUBDIR_OF = frozenset( 

-

55 [ 

-

56 "./etc", 

-

57 "./run", 

-

58 "./var/lib", 

-

59 "./var/cache", 

-

60 "./var/backups", 

-

61 "./var/spool", 

-

62 # linux-image uses these paths with some `rm -f` 

-

63 "./usr/lib/modules", 

-

64 "./lib/modules", 

-

65 # udev special case 

-

66 "./lib/udev", 

-

67 "./usr/lib/udev", 

-

68 # pciutils deletes /usr/share/misc/pci.ids.<ext> 

-

69 "./usr/share/misc", 

-

70 ] 

-

71) 

-

72 

-

73 

-

74def register_binary_package_rules(api: DebputyPluginInitializerProvider) -> None: 

-

75 api.pluggable_manifest_rule( 

-

76 OPARSER_PACKAGES, 

-

77 "binary-version", 

-

78 BinaryVersionParsedFormat, 

-

79 _parse_binary_version, 

-

80 source_format=str, 

-

81 inline_reference_documentation=reference_documentation( 

-

82 title="Custom binary version (`binary-version`)", 

-

83 description=textwrap.dedent( 

-

84 """\ 

-

85 In the *rare* case that you need a binary package to have a custom version, you can use 

-

86 the `binary-version:` key to describe the desired package version. An example being: 

-

87 

-

88 packages: 

-

89 foo: 

-

90 # The foo package needs a different epoch because we took it over from a different 

-

91 # source package with higher epoch version 

-

92 binary-version: '1:{{DEB_VERSION_UPSTREAM_REVISION}}' 

-

93 

-

94 Use this feature sparingly as it is generally not possible to undo as each version must be 

-

95 monotonously higher than the previous one. This feature translates into `-v` option for 

-

96 `dpkg-gencontrol`. 

-

97 

-

98 The value for the `binary-version` key is a string that defines the binary version. Generally, 

-

99 you will want it to contain one of the versioned related substitution variables such as 

-

100 `{{DEB_VERSION_UPSTREAM_REVISION}}`. Otherwise, you will have to remember to bump the version 

-

101 manually with each upload as versions cannot be reused and the package would not support binNMUs 

-

102 either. 

-

103 """ 

-

104 ), 

-

105 reference_documentation_url=f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md#custom-binary-version-binary-version", 

-

106 ), 

-

107 ) 

-

108 

-

109 api.pluggable_manifest_rule( 

-

110 OPARSER_PACKAGES, 

-

111 "transformations", 

-

112 List[TransformationRule], 

-

113 _unpack_list, 

-

114 inline_reference_documentation=reference_documentation( 

-

115 title="Transformations (`transformations`)", 

-

116 description=textwrap.dedent( 

-

117 """\ 

-

118 You can define a `transformations` under the package definition, which is a list a transformation 

-

119 rules. An example: 

-

120 

-

121 packages: 

-

122 foo: 

-

123 transformations: 

-

124 - remove: 'usr/share/doc/{{PACKAGE}}/INSTALL.md' 

-

125 - move: 

-

126 source: bar/* 

-

127 target: foo/ 

-

128 

-

129 

-

130 Transformations are ordered and are applied in the listed order. A path can be matched by multiple 

-

131 transformations; how that plays out depends on which transformations are applied and in which order. 

-

132 A quick summary: 

-

133 

-

134 - Transformations that modify the file system layout affect how path matches in later transformations. 

-

135 As an example, `move` and `remove` transformations affects what globs and path matches expand to in 

-

136 later transformation rules. 

-

137 

-

138 - For other transformations generally the latter transformation overrules the earlier one, when they 

-

139 overlap or conflict. 

-

140 """ 

-

141 ), 

-

142 reference_documentation_url=f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md#transformations-transformations", 

-

143 ), 

-

144 ) 

-

145 

-

146 api.pluggable_manifest_rule( 

-

147 OPARSER_PACKAGES, 

-

148 "conffile-management", 

-

149 List[DpkgMaintscriptHelperCommand], 

-

150 _unpack_list, 

-

151 ) 

-

152 

-

153 api.pluggable_manifest_rule( 

-

154 OPARSER_PACKAGES, 

-

155 "services", 

-

156 List[ServiceRuleParsedFormat], 

-

157 _process_service_rules, 

-

158 source_format=List[ServiceRuleSourceFormat], 

-

159 inline_reference_documentation=reference_documentation( 

-

160 title="Define how services in the package will be handled (`services`)", 

-

161 description=textwrap.dedent( 

-

162 """\ 

-

163 If you have non-standard requirements for certain services in the package, you can define those via 

-

164 the `services` attribute. The `services` attribute is a list of service rules. Example: 

-

165 

-

166 packages: 

-

167 foo: 

-

168 services: 

-

169 - service: "foo" 

-

170 enable-on-install: false 

-

171 - service: "bar" 

-

172 on-upgrade: stop-then-start 

-

173 """ 

-

174 ), 

-

175 attributes=[ 

-

176 documented_attr( 

-

177 "service", 

-

178 textwrap.dedent( 

-

179 f"""\ 

-

180 Name of the service to match. The name is usually the basename of the service file. 

-

181 However, aliases can also be used for relevant system managers. When aliases **and** 

-

182 multiple service managers are involved, then the rule will apply to all matches. 

-

183 For details on aliases, please see 

-

184 {DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md#service-managers-and-aliases. 

-

185 

-

186 - Note: For systemd, the `.service` suffix can be omitted from name, but other 

-

187 suffixes such as `.timer` cannot. 

-

188 """ 

-

189 ), 

-

190 ), 

-

191 documented_attr( 

-

192 "type_of_service", 

-

193 textwrap.dedent( 

-

194 """\ 

-

195 The type of service this rule applies to. To act on a `systemd` timer, you would 

-

196 set this to `timer` (etc.). Each service manager defines its own set of types 

-

197 of services. 

-

198 """ 

-

199 ), 

-

200 ), 

-

201 documented_attr( 

-

202 "service_scope", 

-

203 textwrap.dedent( 

-

204 """\ 

-

205 The scope of the service. It must be either `system` and `user`. 

-

206 - Note: The keyword is defined to support `user`, but `debputy` does not support `user` 

-

207 services at the moment (the detection logic is missing). 

-

208 """ 

-

209 ), 

-

210 ), 

-

211 documented_attr( 

-

212 ["service_manager", "service_managers"], 

-

213 textwrap.dedent( 

-

214 """\ 

-

215 Which service managers this rule is for. When omitted, all service managers with this 

-

216 service will be affected. This can be used to specify separate rules for the same 

-

217 service under different service managers. 

-

218 - When this attribute is explicitly given, then all the listed service managers must 

-

219 provide at least one service matching the definition. In contract, when it is omitted, 

-

220 then all service manager integrations are consulted but as long as at least one 

-

221 service is match from any service manager, the rule is accepted. 

-

222 """ 

-

223 ), 

-

224 ), 

-

225 documented_attr( 

-

226 "enable_on_install", 

-

227 textwrap.dedent( 

-

228 """\ 

-

229 Whether to automatically enable the service on installation. Note: This does 

-

230 **not** affect whether the service will be started nor how restarts during 

-

231 upgrades will happen. 

-

232 - If omitted, the plugin detecting the service decides the default. 

-

233 """ 

-

234 ), 

-

235 ), 

-

236 documented_attr( 

-

237 "start_on_install", 

-

238 textwrap.dedent( 

-

239 """\ 

-

240 Whether to automatically start the service on installation. Whether it is 

-

241 enabled or how upgrades are handled have separate attributes. 

-

242 - If omitted, the plugin detecting the service decides the default. 

-

243 """ 

-

244 ), 

-

245 ), 

-

246 documented_attr( 

-

247 "on_upgrade", 

-

248 textwrap.dedent( 

-

249 """\ 

-

250 How `debputy` should handle the service during upgrades. The default depends on the 

-

251 plugin detecting the service. Valid values are: 

-

252 

-

253 - `do-nothing`: During an upgrade, the package should not attempt to stop, reload or 

-

254 restart the service. 

-

255 - `reload`: During an upgrade, prefer reloading the service rather than restarting 

-

256 if possible. Note that the result may become `restart` instead if the service 

-

257 manager integration determines that `reload` is not supported. 

-

258 - `restart`: During an upgrade, `restart` the service post upgrade. The service 

-

259 will be left running during the upgrade process. 

-

260 - `stop-then-start`: Stop the service before the upgrade, perform the upgrade and 

-

261 then start the service. 

-

262 """ 

-

263 ), 

-

264 ), 

-

265 ], 

-

266 reference_documentation_url=f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md#service-management-services", 

-

267 ), 

-

268 ) 

-

269 

-

270 api.pluggable_manifest_rule( 

-

271 OPARSER_PACKAGES, 

-

272 "clean-after-removal", 

-

273 ListParsedFormat, 

-

274 _parse_clean_after_removal, 

-

275 source_format=List[Any], 

-

276 # FIXME: debputy won't see the attributes for this one :'( 

-

277 inline_reference_documentation=reference_documentation( 

-

278 title="Remove runtime created paths on purge or post removal (`clean-after-removal`)", 

-

279 description=textwrap.dedent( 

-

280 """\ 

-

281 For some packages, it is necessary to clean up some run-time created paths. Typical use cases are 

-

282 deleting log files, cache files, or persistent state. This can be done via the `clean-after-removal`. 

-

283 An example being: 

-

284 

-

285 packages: 

-

286 foo: 

-

287 clean-after-removal: 

-

288 - /var/log/foo/*.log 

-

289 - /var/log/foo/*.log.gz 

-

290 - path: /var/log/foo/ 

-

291 ignore-non-empty-dir: true 

-

292 - /etc/non-conffile-configuration.conf 

-

293 - path: /var/cache/foo 

-

294 recursive: true 

-

295 

-

296 The `clean-after-removal` key accepts a list, where each element is either a mapping, a string or a list 

-

297 of strings. When an element is a mapping, then the following key/value pairs are applicable: 

-

298 

-

299 * `path` or `paths` (required): A path match (`path`) or a list of path matches (`paths`) defining the 

-

300 path(s) that should be removed after clean. The path match(es) can use globs and manifest variables. 

-

301 Every path matched will by default be removed via `rm -f` or `rmdir` depending on whether the path 

-

302 provided ends with a *literal* `/`. Special-rules for matches: 

-

303 - Glob is interpreted by the shell, so shell (`/bin/sh`) rules apply to globs rather than 

-

304 `debputy`'s glob rules. As an example, `foo/*` will **not** match `foo/.hidden-file`. 

-

305 - `debputy` cannot evaluate whether these paths/globs will match the desired paths (or anything at 

-

306 all). Be sure to test the resulting package. 

-

307 - When a symlink is matched, it is not followed. 

-

308 - Directory handling depends on the `recursive` attribute and whether the pattern ends with a literal 

-

309 "/". 

-

310 - `debputy` has restrictions on the globs being used to prevent rules that could cause massive damage 

-

311 to the system. 

-

312 

-

313 * `recursive` (optional): When `true`, the removal rule will use `rm -fr` rather than `rm -f` or `rmdir` 

-

314 meaning any directory matched will be deleted along with all of its contents. 

-

315 

-

316 * `ignore-non-empty-dir` (optional): When `true`, each path must be or match a directory (and as a 

-

317 consequence each path must with a literal `/`). The affected directories will be deleted only if they 

-

318 are empty. Non-empty directories will be skipped. This option is mutually exclusive with `recursive`. 

-

319 

-

320 * `delete-on` (optional, defaults to `purge`): This attribute defines when the removal happens. It can 

-

321 be set to one of the following values: 

-

322 - `purge`: The removal happens with the package is being purged. This is the default. At a technical 

-

323 level, the removal occurs at `postrm purge`. 

-

324 - `removal`: The removal happens immediately after the package has been removed. At a technical level, 

-

325 the removal occurs at `postrm remove`. 

-

326 

-

327 This feature resembles the concept of `rpm`'s `%ghost` files. 

-

328 """ 

-

329 ), 

-

330 reference_documentation_url=f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md#remove-runtime-created-paths-on-purge-or-post-removal-clean-after-removal", 

-

331 ), 

-

332 ) 

-

333 

-

334 api.pluggable_manifest_rule( 

-

335 OPARSER_PACKAGES, 

-

336 "installation-search-dirs", 

-

337 InstallationSearchDirsParsedFormat, 

-

338 _parse_installation_search_dirs, 

-

339 source_format=List[FileSystemExactMatchRule], 

-

340 inline_reference_documentation=reference_documentation( 

-

341 title="Custom installation time search directories (`installation-search-dirs`)", 

-

342 description=textwrap.dedent( 

-

343 """\ 

-

344 For source packages that does multiple build, it can be an advantage to provide a custom list of 

-

345 installation-time search directories. This can be done via the `installation-search-dirs` key. A common 

-

346 example is building the source twice with different optimization and feature settings where the second 

-

347 build is for the `debian-installer` (in the form of a `udeb` package). A sample manifest snippet could 

-

348 look something like: 

-

349 

-

350 installations: 

-

351 - install: 

-

352 # Because of the search order (see below), `foo` installs `debian/tmp/usr/bin/tool`, 

-

353 # while `foo-udeb` installs `debian/tmp-udeb/usr/bin/tool` (assuming both paths are 

-

354 # available). Note the rule can be split into two with the same effect if that aids 

-

355 # readability or understanding. 

-

356 source: usr/bin/tool 

-

357 into: 

-

358 - foo 

-

359 - foo-udeb 

-

360 packages: 

-

361 foo-udeb: 

-

362 installation-search-dirs: 

-

363 - debian/tmp-udeb 

-

364 

-

365 

-

366 The `installation-search-dirs` key accepts a list, where each element is a path (str) relative from the 

-

367 source root to the directory that should be used as a search directory (absolute paths are still interpreted 

-

368 as relative to the source root). This list should contain all search directories that should be applicable 

-

369 for this package (except the source root itself, which is always appended after the provided list). If the 

-

370 key is omitted, then `debputy` will provide a default search order (In the `dh` integration, the default 

-

371 is the directory `debian/tmp`). 

-

372 

-

373 If a non-existing or non-directory path is listed, then it will be skipped (info-level note). If the path 

-

374 exists and is a directory, it will also be checked for "not-installed" paths. 

-

375 """ 

-

376 ), 

-

377 reference_documentation_url=f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md#custom-installation-time-search-directories-installation-search-dirs", 

-

378 ), 

-

379 ) 

-

380 

-

381 

-

382class ServiceRuleSourceFormat(TypedDict): 

-

383 service: str 

-

384 type_of_service: NotRequired[str] 

-

385 service_scope: NotRequired[Literal["system", "user"]] 

-

386 enable_on_install: NotRequired[bool] 

-

387 start_on_install: NotRequired[bool] 

-

388 on_upgrade: NotRequired[ServiceUpgradeRule] 

-

389 service_manager: NotRequired[ 

-

390 Annotated[str, DebputyParseHint.target_attribute("service_managers")] 

-

391 ] 

-

392 service_managers: NotRequired[List[str]] 

-

393 

-

394 

-

395class ServiceRuleParsedFormat(DebputyParsedContent): 

-

396 service: str 

-

397 type_of_service: NotRequired[str] 

-

398 service_scope: NotRequired[Literal["system", "user"]] 

-

399 enable_on_install: NotRequired[bool] 

-

400 start_on_install: NotRequired[bool] 

-

401 on_upgrade: NotRequired[ServiceUpgradeRule] 

-

402 service_managers: NotRequired[List[str]] 

-

403 

-

404 

-

405@dataclasses.dataclass(slots=True, frozen=True) 

-

406class ServiceRule: 

-

407 definition_source: str 

-

408 service: str 

-

409 type_of_service: str 

-

410 service_scope: Literal["system", "user"] 

-

411 enable_on_install: Optional[bool] 

-

412 start_on_install: Optional[bool] 

-

413 on_upgrade: Optional[ServiceUpgradeRule] 

-

414 service_managers: Optional[FrozenSet[str]] 

-

415 

-

416 @classmethod 

-

417 def from_service_rule_parsed_format( 

-

418 cls, 

-

419 data: ServiceRuleParsedFormat, 

-

420 attribute_path: AttributePath, 

-

421 ) -> "Self": 

-

422 service_managers = data.get("service_managers") 

-

423 return cls( 

-

424 attribute_path.path, 

-

425 data["service"], 

-

426 data.get("type_of_service", "service"), 

-

427 cast("Literal['system', 'user']", data.get("service_scope", "system")), 

-

428 data.get("enable_on_install"), 

-

429 data.get("start_on_install"), 

-

430 data.get("on_upgrade"), 

-

431 frozenset(service_managers) if service_managers else service_managers, 

-

432 ) 

-

433 

-

434 def applies_to_service_manager(self, service_manager: str) -> bool: 

-

435 return self.service_managers is None or service_manager in self.service_managers 

-

436 

-

437 def apply_to_service_definition( 

-

438 self, 

-

439 service_definition: ServiceDefinition[DSD], 

-

440 ) -> ServiceDefinition[DSD]: 

-

441 assert isinstance(service_definition, ServiceDefinitionImpl) 

-

442 if not service_definition.is_plugin_provided_definition: 

-

443 _error( 

-

444 f"Conflicting definitions related to {self.service} (type: {self.type_of_service}," 

-

445 f" scope: {self.service_scope}). First definition at {service_definition.definition_source}," 

-

446 f" the second at {self.definition_source}). If they are for different service managers," 

-

447 " you can often avoid this problem by explicitly defining which service managers are applicable" 

-

448 ' to each rule via the "service-managers" keyword.' 

-

449 ) 

-

450 changes = { 

-

451 "definition_source": self.definition_source, 

-

452 "is_plugin_provided_definition": False, 

-

453 } 

-

454 if ( 

-

455 self.service != service_definition.name 

-

456 and self.service in service_definition.names 

-

457 ): 

-

458 changes["name"] = self.service 

-

459 if self.enable_on_install is not None: 

-

460 changes["auto_start_on_install"] = self.enable_on_install 

-

461 if self.start_on_install is not None: 

-

462 changes["auto_start_on_install"] = self.start_on_install 

-

463 if self.on_upgrade is not None: 

-

464 changes["on_upgrade"] = self.on_upgrade 

-

465 

-

466 return service_definition.replace(**changes) 

-

467 

-

468 

-

469class BinaryVersionParsedFormat(DebputyParsedContent): 

-

470 binary_version: str 

-

471 

-

472 

-

473class ListParsedFormat(DebputyParsedContent): 

-

474 elements: List[Any] 

-

475 

-

476 

-

477class ListOfTransformationRulesFormat(DebputyParsedContent): 

-

478 elements: List[TransformationRule] 

-

479 

-

480 

-

481class ListOfDpkgMaintscriptHelperCommandFormat(DebputyParsedContent): 

-

482 elements: List[DpkgMaintscriptHelperCommand] 

-

483 

-

484 

-

485class InstallationSearchDirsParsedFormat(DebputyParsedContent): 

-

486 installation_search_dirs: List[FileSystemExactMatchRule] 

-

487 

-

488 

-

489def _parse_binary_version( 

-

490 _name: str, 

-

491 parsed_data: BinaryVersionParsedFormat, 

-

492 _attribute_path: AttributePath, 

-

493 _parser_context: ParserContextData, 

-

494) -> str: 

-

495 return parsed_data["binary_version"] 

-

496 

-

497 

-

498def _parse_installation_search_dirs( 

-

499 _name: str, 

-

500 parsed_data: InstallationSearchDirsParsedFormat, 

-

501 _attribute_path: AttributePath, 

-

502 _parser_context: ParserContextData, 

-

503) -> List[FileSystemExactMatchRule]: 

-

504 return parsed_data["installation_search_dirs"] 

-

505 

-

506 

-

507def _process_service_rules( 

-

508 _name: str, 

-

509 parsed_data: List[ServiceRuleParsedFormat], 

-

510 attribute_path: AttributePath, 

-

511 _parser_context: ParserContextData, 

-

512) -> List[ServiceRule]: 

-

513 return [ 

-

514 ServiceRule.from_service_rule_parsed_format(x, attribute_path[i]) 

-

515 for i, x in enumerate(parsed_data) 

-

516 ] 

-

517 

-

518 

-

519def _unpack_list( 

-

520 _name: str, 

-

521 parsed_data: List[Any], 

-

522 _attribute_path: AttributePath, 

-

523 _parser_context: ParserContextData, 

-

524) -> List[Any]: 

-

525 return parsed_data 

-

526 

-

527 

-

528class CleanAfterRemovalRuleSourceFormat(TypedDict): 

-

529 path: NotRequired[Annotated[str, DebputyParseHint.target_attribute("paths")]] 

-

530 paths: NotRequired[List[str]] 

-

531 delete_on: NotRequired[Literal["purge", "removal"]] 

-

532 recursive: NotRequired[bool] 

-

533 ignore_non_empty_dir: NotRequired[bool] 

-

534 

-

535 

-

536class CleanAfterRemovalRule(DebputyParsedContent): 

-

537 paths: List[str] 

-

538 delete_on: NotRequired[Literal["purge", "removal"]] 

-

539 recursive: NotRequired[bool] 

-

540 ignore_non_empty_dir: NotRequired[bool] 

-

541 

-

542 

-

543# FIXME: Not optimal that we are doing an initialization of ParserGenerator here. But the rule is not depending on any 

-

544# complex types that is registered by plugins, so it will work for now. 

-

545_CLEAN_AFTER_REMOVAL_RULE_PARSER = ParserGenerator().generate_parser( 

-

546 CleanAfterRemovalRule, 

-

547 source_content=Union[CleanAfterRemovalRuleSourceFormat, str, List[str]], 

-

548 inline_reference_documentation=reference_documentation( 

-

549 reference_documentation_url=f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md#remove-runtime-created-paths-on-purge-or-post-removal-clean-after-removal", 

-

550 ), 

-

551) 

-

552 

-

553 

-

554# Order between clean_on_removal and conffile_management is 

-

555# important. We want the dpkg conffile management rules to happen before the 

-

556# clean clean_on_removal rules. Since the latter only affects `postrm` 

-

557# and the order is reversed for `postrm` scripts (among other), we need do 

-

558# clean_on_removal first to account for the reversing of order. 

-

559# 

-

560# FIXME: All of this is currently not really possible todo, but it should be. 

-

561# (I think it is the correct order by "mistake" rather than by "design", which is 

-

562# what this note is about) 

-

563def _parse_clean_after_removal( 

-

564 _name: str, 

-

565 parsed_data: ListParsedFormat, 

-

566 attribute_path: AttributePath, 

-

567 parser_context: ParserContextData, 

-

568) -> None: # TODO: Return and pass to a maintscript helper 

-

569 raw_clean_after_removal = parsed_data["elements"] 

-

570 package_state = parser_context.current_binary_package_state 

-

571 

-

572 for no, raw_transformation in enumerate(raw_clean_after_removal): 

-

573 definition_source = attribute_path[no] 

-

574 clean_after_removal_rules = _CLEAN_AFTER_REMOVAL_RULE_PARSER.parse_input( 

-

575 raw_transformation, 

-

576 definition_source, 

-

577 parser_context=parser_context, 

-

578 ) 

-

579 patterns = clean_after_removal_rules["paths"] 

-

580 if patterns: 580 ↛ 582line 580 didn't jump to line 582, because the condition on line 580 was never false

-

581 definition_source.path_hint = patterns[0] 

-

582 delete_on = clean_after_removal_rules.get("delete_on") or "purge" 

-

583 recurse = clean_after_removal_rules.get("recursive") or False 

-

584 ignore_non_empty_dir = ( 

-

585 clean_after_removal_rules.get("ignore_non_empty_dir") or False 

-

586 ) 

-

587 if delete_on == "purge": 587 ↛ 590line 587 didn't jump to line 590, because the condition on line 587 was never false

-

588 condition = '[ "$1" = "purge" ]' 

-

589 else: 

-

590 condition = '[ "$1" = "remove" ]' 

-

591 

-

592 if ignore_non_empty_dir: 

-

593 if recurse: 593 ↛ 594line 593 didn't jump to line 594, because the condition on line 593 was never true

-

594 raise ManifestParseException( 

-

595 'The "recursive" and "ignore-non-empty-dir" options are mutually exclusive.' 

-

596 f" Both were enabled at the same time in at {definition_source.path}" 

-

597 ) 

-

598 for pattern in patterns: 

-

599 if not pattern.endswith("/"): 599 ↛ 600line 599 didn't jump to line 600, because the condition on line 599 was never true

-

600 raise ManifestParseException( 

-

601 'When ignore-non-empty-dir is True, then all patterns must end with a literal "/"' 

-

602 f' to ensure they only apply to directories. The pattern "{pattern}" at' 

-

603 f" {definition_source.path} did not." 

-

604 ) 

-

605 

-

606 substitution = parser_context.substitution 

-

607 match_rules = [ 

-

608 MatchRule.from_path_or_glob( 

-

609 p, definition_source.path, substitution=substitution 

-

610 ) 

-

611 for p in patterns 

-

612 ] 

-

613 content_lines = [ 

-

614 f"if {condition}; then\n", 

-

615 ] 

-

616 for idx, match_rule in enumerate(match_rules): 

-

617 original_pattern = patterns[idx] 

-

618 if match_rule is MATCH_ANYTHING: 618 ↛ 619line 618 didn't jump to line 619, because the condition on line 618 was never true

-

619 raise ManifestParseException( 

-

620 f'Using "{original_pattern}" in a clean rule would trash the system.' 

-

621 f" Please restrict this pattern at {definition_source.path} considerably." 

-

622 ) 

-

623 is_subdir_match = False 

-

624 matched_directory: Optional[str] 

-

625 if isinstance(match_rule, ExactFileSystemPath): 

-

626 matched_directory = ( 

-

627 os.path.dirname(match_rule.path) 

-

628 if match_rule.path not in ("/", ".", "./") 

-

629 else match_rule.path 

-

630 ) 

-

631 is_subdir_match = True 

-

632 else: 

-

633 matched_directory = getattr(match_rule, "directory", None) 

-

634 

-

635 if matched_directory is None: 635 ↛ 636line 635 didn't jump to line 636, because the condition on line 635 was never true

-

636 raise ManifestParseException( 

-

637 f'The pattern "{original_pattern}" defined at {definition_source.path} is not' 

-

638 f" trivially anchored in a specific directory. Cowardly refusing to use it" 

-

639 f" in a clean rule as it may trash the system if the pattern is overreaching." 

-

640 f" Please avoid glob characters in the top level directories." 

-

641 ) 

-

642 assert matched_directory.startswith("./") or matched_directory in ( 

-

643 ".", 

-

644 "./", 

-

645 "", 

-

646 ) 

-

647 acceptable_directory = False 

-

648 would_have_allowed_direct_match = False 

-

649 while matched_directory not in (".", "./", ""): 

-

650 # Our acceptable paths set includes "/var/lib" or "/etc". We require that the 

-

651 # pattern is either an exact match, in which case it may match directly inside 

-

652 # the acceptable directory OR it is a pattern against a subdirectory of the 

-

653 # acceptable path. As an example: 

-

654 # 

-

655 # /etc/inputrc <-- OK, exact match 

-

656 # /etc/foo/* <-- OK, subdir match 

-

657 # /etc/* <-- ERROR, glob directly in the accepted directory. 

-

658 if is_subdir_match and ( 

-

659 matched_directory 

-

660 in ACCEPTABLE_CLEAN_ON_REMOVAL_IF_EXACT_MATCH_OR_SUBDIR_OF 

-

661 ): 

-

662 acceptable_directory = True 

-

663 break 

-

664 if ( 

-

665 matched_directory 

-

666 in ACCEPTABLE_CLEAN_ON_REMOVAL_FOR_GLOBS_AND_EXACT_MATCHES 

-

667 ): 

-

668 # Special-case: In some directories (such as /var/log), we allow globs directly. 

-

669 # Notably, X11's log files are /var/log/Xorg.*.log 

-

670 acceptable_directory = True 

-

671 break 

-

672 if ( 

-

673 matched_directory 

-

674 in ACCEPTABLE_CLEAN_ON_REMOVAL_IF_EXACT_MATCH_OR_SUBDIR_OF 

-

675 ): 

-

676 would_have_allowed_direct_match = True 

-

677 break 

-

678 matched_directory = os.path.dirname(matched_directory) 

-

679 is_subdir_match = True 

-

680 

-

681 if would_have_allowed_direct_match and not acceptable_directory: 

-

682 raise ManifestParseException( 

-

683 f'The pattern "{original_pattern}" defined at {definition_source.path} seems to' 

-

684 " be overreaching. If it has been a path (and not use a glob), the rule would" 

-

685 " have been permitted." 

-

686 ) 

-

687 elif not acceptable_directory: 

-

688 raise ManifestParseException( 

-

689 f'The pattern or path "{original_pattern}" defined at {definition_source.path} seems to' 

-

690 f' be overreaching or not limited to the set of "known acceptable" directories.' 

-

691 ) 

-

692 

-

693 try: 

-

694 shell_escaped_pattern = match_rule.shell_escape_pattern() 

-

695 except TypeError: 

-

696 raise ManifestParseException( 

-

697 f'Sorry, the pattern "{original_pattern}" defined at {definition_source.path}' 

-

698 f" is unfortunately not supported by `debputy` for clean-after-removal rules." 

-

699 f" If you can rewrite the rule to something like `/var/log/foo/*.log` or" 

-

700 f' similar "trivial" patterns. You may have to rewrite the pattern the rule ' 

-

701 f" into multiple patterns to achieve this. This restriction is to enable " 

-

702 f' `debputy` to ensure the pattern is correctly executed plus catch "obvious' 

-

703 f' system trashing" patterns. Apologies for the inconvenience.' 

-

704 ) 

-

705 

-

706 if ignore_non_empty_dir: 

-

707 cmd = f' rmdir --ignore-fail-on-non-empty "${{DPKG_ROOT}}"{shell_escaped_pattern}\n' 

-

708 elif recurse: 

-

709 cmd = f' rm -fr "${{DPKG_ROOT}}"{shell_escaped_pattern}\n' 

-

710 elif original_pattern.endswith("/"): 

-

711 cmd = f' rmdir "${{DPKG_ROOT}}"{shell_escaped_pattern}\n' 

-

712 else: 

-

713 cmd = f' rm -f "${{DPKG_ROOT}}"{shell_escaped_pattern}\n' 

-

714 content_lines.append(cmd) 

-

715 content_lines.append("fi\n") 

-

716 

-

717 snippet = MaintscriptSnippet(definition_source.path, "".join(content_lines)) 

-

718 package_state.maintscript_snippets["postrm"].append(snippet) 

-
- - - diff --git a/coverage-report/d_d5d6843b45eec01e_debputy_plugin_py.html b/coverage-report/d_d5d6843b45eec01e_debputy_plugin_py.html deleted file mode 100644 index 5a81ca5..0000000 --- a/coverage-report/d_d5d6843b45eec01e_debputy_plugin_py.html +++ /dev/null @@ -1,499 +0,0 @@ - - - - - Coverage for src/debputy/plugin/debputy/debputy_plugin.py: 100% - - - - - -
-
-

- Coverage for src/debputy/plugin/debputy/debputy_plugin.py: - 100% -

- -

- 78 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import textwrap 

-

2 

-

3from debputy.plugin.api import ( 

-

4 DebputyPluginInitializer, 

-

5 packager_provided_file_reference_documentation, 

-

6) 

-

7from debputy.plugin.debputy.metadata_detectors import ( 

-

8 detect_systemd_tmpfiles, 

-

9 detect_kernel_modules, 

-

10 detect_icons, 

-

11 detect_gsettings_dependencies, 

-

12 detect_xfonts, 

-

13 detect_initramfs_hooks, 

-

14 detect_systemd_sysusers, 

-

15 detect_pycompile_files, 

-

16 translate_capabilities, 

-

17 pam_auth_update, 

-

18 auto_depends_arch_any_solink, 

-

19) 

-

20from debputy.plugin.debputy.paths import ( 

-

21 SYSTEMD_TMPFILES_DIR, 

-

22 INITRAMFS_HOOK_DIR, 

-

23 GSETTINGS_SCHEMA_DIR, 

-

24 SYSTEMD_SYSUSERS_DIR, 

-

25) 

-

26from debputy.plugin.debputy.private_api import initialize_via_private_api 

-

27 

-

28 

-

29def initialize_debputy_features(api: DebputyPluginInitializer) -> None: 

-

30 initialize_via_private_api(api) 

-

31 declare_manifest_variables(api) 

-

32 register_packager_provided_files(api) 

-

33 register_package_metadata_detectors(api) 

-

34 

-

35 

-

36def declare_manifest_variables(api: DebputyPluginInitializer) -> None: 

-

37 api.manifest_variable( 

-

38 "path:BASH_COMPLETION_DIR", 

-

39 "/usr/share/bash-completion/completions", 

-

40 variable_reference_documentation="Directory to install bash completions into", 

-

41 ) 

-

42 api.manifest_variable( 

-

43 "path:GNU_INFO_DIR", 

-

44 "/usr/share/info", 

-

45 variable_reference_documentation="Directory to install GNU INFO files into", 

-

46 ) 

-

47 

-

48 api.manifest_variable( 

-

49 "token:NL", 

-

50 "\n", 

-

51 variable_reference_documentation="Literal newline (linefeed) character", 

-

52 ) 

-

53 api.manifest_variable( 

-

54 "token:NEWLINE", 

-

55 "\n", 

-

56 variable_reference_documentation="Literal newline (linefeed) character", 

-

57 ) 

-

58 api.manifest_variable( 

-

59 "token:TAB", 

-

60 "\t", 

-

61 variable_reference_documentation="Literal tab character", 

-

62 ) 

-

63 api.manifest_variable( 

-

64 "token:OPEN_CURLY_BRACE", 

-

65 "{", 

-

66 variable_reference_documentation='Literal "{" character', 

-

67 ) 

-

68 api.manifest_variable( 

-

69 "token:CLOSE_CURLY_BRACE", 

-

70 "}", 

-

71 variable_reference_documentation='Literal "}" character', 

-

72 ) 

-

73 api.manifest_variable( 

-

74 "token:DOUBLE_OPEN_CURLY_BRACE", 

-

75 "{{", 

-

76 variable_reference_documentation='Literal "{{" character - useful to avoid triggering a substitution', 

-

77 ) 

-

78 api.manifest_variable( 

-

79 "token:DOUBLE_CLOSE_CURLY_BRACE", 

-

80 "}}", 

-

81 variable_reference_documentation='Literal "}}" string - useful to avoid triggering a substitution', 

-

82 ) 

-

83 

-

84 

-

85def register_package_metadata_detectors(api: DebputyPluginInitializer) -> None: 

-

86 api.metadata_or_maintscript_detector("systemd-tmpfiles", detect_systemd_tmpfiles) 

-

87 api.metadata_or_maintscript_detector("systemd-sysusers", detect_systemd_sysusers) 

-

88 api.metadata_or_maintscript_detector("kernel-modules", detect_kernel_modules) 

-

89 api.metadata_or_maintscript_detector("icon-cache", detect_icons) 

-

90 api.metadata_or_maintscript_detector( 

-

91 "gsettings-dependencies", 

-

92 detect_gsettings_dependencies, 

-

93 ) 

-

94 api.metadata_or_maintscript_detector("xfonts", detect_xfonts) 

-

95 api.metadata_or_maintscript_detector("initramfs-hooks", detect_initramfs_hooks) 

-

96 api.metadata_or_maintscript_detector("pycompile-files", detect_pycompile_files) 

-

97 api.metadata_or_maintscript_detector( 

-

98 "translate-capabilities", 

-

99 translate_capabilities, 

-

100 ) 

-

101 api.metadata_or_maintscript_detector("pam-auth-update", pam_auth_update) 

-

102 api.metadata_or_maintscript_detector( 

-

103 "auto-depends-arch-any-solink", 

-

104 auto_depends_arch_any_solink, 

-

105 ) 

-

106 

-

107 

-

108def register_packager_provided_files(api: DebputyPluginInitializer) -> None: 

-

109 api.packager_provided_file( 

-

110 "tmpfiles", 

-

111 f"{SYSTEMD_TMPFILES_DIR}/{{name}}.conf", 

-

112 reference_documentation=packager_provided_file_reference_documentation( 

-

113 format_documentation_uris=["man:tmpfiles.d(5)"] 

-

114 ), 

-

115 ) 

-

116 api.packager_provided_file( 

-

117 "sysusers", 

-

118 f"{SYSTEMD_SYSUSERS_DIR}/{{name}}.conf", 

-

119 reference_documentation=packager_provided_file_reference_documentation( 

-

120 format_documentation_uris=["man:sysusers.d(5)"] 

-

121 ), 

-

122 ) 

-

123 api.packager_provided_file( 

-

124 "bash-completion", "/usr/share/bash-completion/completions/{name}" 

-

125 ) 

-

126 api.packager_provided_file( 

-

127 "bug-script", 

-

128 "./usr/share/bug/{name}/script", 

-

129 default_mode=0o0755, 

-

130 allow_name_segment=False, 

-

131 ) 

-

132 api.packager_provided_file( 

-

133 "bug-control", 

-

134 "/usr/share/bug/{name}/control", 

-

135 allow_name_segment=False, 

-

136 ) 

-

137 

-

138 api.packager_provided_file( 

-

139 "bug-presubj", 

-

140 "/usr/share/bug/{name}/presubj", 

-

141 allow_name_segment=False, 

-

142 ) 

-

143 

-

144 api.packager_provided_file("pam", "/usr/lib/pam.d/{name}") 

-

145 api.packager_provided_file( 

-

146 "ppp.ip-up", 

-

147 "/etc/ppp/ip-up.d/{name}", 

-

148 default_mode=0o0755, 

-

149 ) 

-

150 api.packager_provided_file( 

-

151 "ppp.ip-down", 

-

152 "/etc/ppp/ip-down.d/{name}", 

-

153 default_mode=0o0755, 

-

154 ) 

-

155 api.packager_provided_file( 

-

156 "lintian-overrides", 

-

157 "/usr/share/lintian/overrides/{name}", 

-

158 allow_name_segment=False, 

-

159 ) 

-

160 api.packager_provided_file("logrotate", "/etc/logrotate.d/{name}") 

-

161 api.packager_provided_file( 

-

162 "logcheck.cracking", 

-

163 "/etc/logcheck/cracking.d/{name}", 

-

164 post_formatting_rewrite=_replace_dot_with_underscore, 

-

165 ) 

-

166 api.packager_provided_file( 

-

167 "logcheck.violations", 

-

168 "/etc/logcheck/violations.d/{name}", 

-

169 post_formatting_rewrite=_replace_dot_with_underscore, 

-

170 ) 

-

171 api.packager_provided_file( 

-

172 "logcheck.violations.ignore", 

-

173 "/etc/logcheck/violations.ignore.d/{name}", 

-

174 post_formatting_rewrite=_replace_dot_with_underscore, 

-

175 ) 

-

176 api.packager_provided_file( 

-

177 "logcheck.ignore.workstation", 

-

178 "/etc/logcheck/ignore.d.workstation/{name}", 

-

179 post_formatting_rewrite=_replace_dot_with_underscore, 

-

180 ) 

-

181 api.packager_provided_file( 

-

182 "logcheck.ignore.server", 

-

183 "/etc/logcheck/ignore.d.server/{name}", 

-

184 post_formatting_rewrite=_replace_dot_with_underscore, 

-

185 ) 

-

186 api.packager_provided_file( 

-

187 "logcheck.ignore.paranoid", 

-

188 "/etc/logcheck/ignore.d.paranoid/{name}", 

-

189 post_formatting_rewrite=_replace_dot_with_underscore, 

-

190 ) 

-

191 

-

192 api.packager_provided_file("mime", "/usr/lib/mime/packages/{name}") 

-

193 api.packager_provided_file("sharedmimeinfo", "/usr/share/mime/packages/{name}.xml") 

-

194 

-

195 api.packager_provided_file( 

-

196 "if-pre-up", 

-

197 "/etc/network/if-pre-up.d/{name}", 

-

198 default_mode=0o0755, 

-

199 ) 

-

200 api.packager_provided_file( 

-

201 "if-up", 

-

202 "/etc/network/if-up.d/{name}", 

-

203 default_mode=0o0755, 

-

204 ) 

-

205 api.packager_provided_file( 

-

206 "if-down", 

-

207 "/etc/network/if-down.d/{name}", 

-

208 default_mode=0o0755, 

-

209 ) 

-

210 api.packager_provided_file( 

-

211 "if-post-down", 

-

212 "/etc/network/if-post-down.d/{name}", 

-

213 default_mode=0o0755, 

-

214 ) 

-

215 

-

216 api.packager_provided_file( 

-

217 "cron.hourly", 

-

218 "/etc/cron.hourly/{name}", 

-

219 default_mode=0o0755, 

-

220 ) 

-

221 api.packager_provided_file( 

-

222 "cron.daily", 

-

223 "/etc/cron.daily/{name}", 

-

224 default_mode=0o0755, 

-

225 ) 

-

226 api.packager_provided_file( 

-

227 "cron.weekly", 

-

228 "/etc/cron.weekly/{name}", 

-

229 default_mode=0o0755, 

-

230 ) 

-

231 api.packager_provided_file( 

-

232 "cron.monthly", 

-

233 "./etc/cron.monthly/{name}", 

-

234 default_mode=0o0755, 

-

235 ) 

-

236 api.packager_provided_file( 

-

237 "cron.yearly", 

-

238 "/etc/cron.yearly/{name}", 

-

239 default_mode=0o0755, 

-

240 ) 

-

241 # cron.d uses 0644 unlike the others 

-

242 api.packager_provided_file( 

-

243 "cron.d", 

-

244 "/etc/cron.d/{name}", 

-

245 reference_documentation=packager_provided_file_reference_documentation( 

-

246 format_documentation_uris=["man:crontab(5)"] 

-

247 ), 

-

248 ) 

-

249 

-

250 api.packager_provided_file( 

-

251 "initramfs-hook", f"{INITRAMFS_HOOK_DIR}/{{name}}", default_mode=0o0755 

-

252 ) 

-

253 

-

254 api.packager_provided_file("modprobe", "/etc/modprobe.d/{name}.conf") 

-

255 

-

256 api.packager_provided_file( 

-

257 "init", 

-

258 "/etc/init.d/{name}", 

-

259 default_mode=0o755, 

-

260 ) 

-

261 api.packager_provided_file("default", "/etc/default/{name}") 

-

262 

-

263 for stem in [ 

-

264 "mount", 

-

265 "path", 

-

266 "service", 

-

267 "socket", 

-

268 "target", 

-

269 "timer", 

-

270 ]: 

-

271 api.packager_provided_file( 

-

272 stem, 

-

273 f"/usr/lib/systemd/system/{{name}}.{stem}", 

-

274 reference_documentation=packager_provided_file_reference_documentation( 

-

275 format_documentation_uris=[f"man:systemd.{stem}(5)"] 

-

276 ), 

-

277 ) 

-

278 

-

279 for stem in [ 

-

280 "path", 

-

281 "service", 

-

282 "socket", 

-

283 "target", 

-

284 "timer", 

-

285 ]: 

-

286 api.packager_provided_file( 

-

287 f"@{stem}", f"/usr/lib/systemd/system/{{name}}@.{stem}" 

-

288 ) 

-

289 

-

290 # api.packager_provided_file( 

-

291 # "udev", 

-

292 # "./lib/udev/rules.d/{priority:02}-{name}.rules", 

-

293 # default_priority=60, 

-

294 # ) 

-

295 

-

296 api.packager_provided_file( 

-

297 "gsettings-override", 

-

298 f"{GSETTINGS_SCHEMA_DIR}/{{priority:02}}_{{name}}.gschema.override", 

-

299 default_priority=10, 

-

300 ) 

-

301 

-

302 # Special-cases that will probably not be a good example for other plugins 

-

303 api.packager_provided_file( 

-

304 "changelog", 

-

305 # The "changelog.Debian" gets renamed to "changelog" for native packages elsewhere. 

-

306 # Also, the changelog trimming is also done elsewhere. 

-

307 "/usr/share/doc/{name}/changelog.Debian", 

-

308 allow_name_segment=False, 

-

309 packageless_is_fallback_for_all_packages=True, 

-

310 reference_documentation=packager_provided_file_reference_documentation( 

-

311 description=textwrap.dedent( 

-

312 """\ 

-

313 This file is the changelog of the package and is mandatory. 

-

314 

-

315 The changelog contains the version of the source package and is mandatory for all 

-

316 packages. 

-

317 

-

318 Use `dch --create` to create the changelog. 

-

319 

-

320 In theory, the binary package can have a different changelog than the source 

-

321 package (by having `debian/binary-package.changelog`). However, it is generally 

-

322 not useful and leads to double administration. It has not been used in practice. 

-

323 """ 

-

324 ), 

-

325 format_documentation_uris=[ 

-

326 "man:deb-changelog(5)", 

-

327 "https://www.debian.org/doc/debian-policy/ch-source.html#debian-changelog-debian-changelog", 

-

328 "man:dch(1)", 

-

329 ], 

-

330 ), 

-

331 ) 

-

332 api.packager_provided_file( 

-

333 "copyright", 

-

334 "/usr/share/doc/{name}/copyright", 

-

335 allow_name_segment=False, 

-

336 packageless_is_fallback_for_all_packages=True, 

-

337 reference_documentation=packager_provided_file_reference_documentation( 

-

338 description=textwrap.dedent( 

-

339 """\ 

-

340 This file documents the license and copyright information of the binary package. 

-

341 Packages aimed at the Debian archive (and must derivatives thereof) must have this file. 

-

342 

-

343 For packages not aimed at Debian, the file can still be useful to convey the license 

-

344 terms of the package (which is often a requirement in many licenses). However, it is 

-

345 not a strict *technical* requirement. Whether it is a legal requirement depends on 

-

346 license. 

-

347 

-

348 Often, the same file can be used for all packages. In the extremely rare case where 

-

349 one binary package has a "vastly different" license than the other packages, you can 

-

350 provide a package specific version for that package. 

-

351 """ 

-

352 ), 

-

353 format_documentation_uris=[ 

-

354 "https://www.debian.org/doc/debian-policy/ch-source.html#copyright-debian-copyright", 

-

355 "https://www.debian.org/doc/debian-policy/ch-docs.html#s-copyrightfile", 

-

356 "https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/", 

-

357 ], 

-

358 ), 

-

359 ) 

-

360 api.packager_provided_file( 

-

361 "NEWS", 

-

362 "/usr/share/doc/{name}/NEWS.Debian", 

-

363 allow_name_segment=False, 

-

364 packageless_is_fallback_for_all_packages=True, 

-

365 reference_documentation=packager_provided_file_reference_documentation( 

-

366 description=textwrap.dedent( 

-

367 """\ 

-

368 Important news that should be shown to the user/admin when upgrading. If a system has 

-

369 apt-listchanges installed, then contents of this file will be shown prior to upgrading 

-

370 the package. 

-

371 

-

372 Uses a similar format to that of debian/changelog (create with `dch --news --create`). 

-

373 """ 

-

374 ), 

-

375 format_documentation_uris=[ 

-

376 "https://www.debian.org/doc/manuals/developers-reference/best-pkging-practices.en.html#supplementing-changelogs-with-news-debian-files", 

-

377 "man:dch(1)", 

-

378 ], 

-

379 ), 

-

380 ) 

-

381 api.packager_provided_file( 

-

382 "README.Debian", 

-

383 "/usr/share/doc/{name}/README.Debian", 

-

384 allow_name_segment=False, 

-

385 ) 

-

386 api.packager_provided_file( 

-

387 "TODO", 

-

388 "/usr/share/doc/{name}/TODO.Debian", 

-

389 allow_name_segment=False, 

-

390 ) 

-

391 # From dh-python / dh_python3 

-

392 # api.packager_provided_file( 

-

393 # "bcep", 

-

394 # "/usr/share/python3/bcep/{name}", 

-

395 # allow_name_segment=False, 

-

396 # ) 

-

397 

-

398 

-

399def _replace_dot_with_underscore(x: str) -> str: 

-

400 return x.replace(".", "_") 

-
- - - diff --git a/coverage-report/d_d5d6843b45eec01e_discard_rules_py.html b/coverage-report/d_d5d6843b45eec01e_discard_rules_py.html deleted file mode 100644 index 23adeb8..0000000 --- a/coverage-report/d_d5d6843b45eec01e_discard_rules_py.html +++ /dev/null @@ -1,196 +0,0 @@ - - - - - Coverage for src/debputy/plugin/debputy/discard_rules.py: 96% - - - - - -
-
-

- Coverage for src/debputy/plugin/debputy/discard_rules.py: - 96% -

- -

- 34 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import re 

-

2 

-

3from debputy.plugin.api import VirtualPath 

-

4 

-

5_VCS_PATHS = { 

-

6 ".arch-inventory", 

-

7 ".arch-ids", 

-

8 ".be", 

-

9 ".bzrbackup", 

-

10 ".bzrignore", 

-

11 ".bzrtags", 

-

12 ".cvsignore", 

-

13 ".hg", 

-

14 ".hgignore", 

-

15 ".hgtags", 

-

16 ".hgsigs", 

-

17 ".git", 

-

18 ".gitignore", 

-

19 ".gitattributes", 

-

20 ".gitmodules", 

-

21 ".gitreview", 

-

22 ".mailmap", 

-

23 ".mtn-ignore", 

-

24 ".svn", 

-

25 "{arch}", 

-

26 "CVS", 

-

27 "RCS", 

-

28 "_MTN", 

-

29 "_darcs", 

-

30} 

-

31 

-

32_BACKUP_FILES_RE = re.compile( 

-

33 "|".join( 

-

34 [ 

-

35 # Common backup files 

-

36 r".*~", 

-

37 r".*[.](?:bak|orig|rej)", 

-

38 # Editor backup/swap files 

-

39 r"[.]#.*", 

-

40 r"[.].*[.]sw.", 

-

41 # Other known stuff 

-

42 r"[.]shelf", 

-

43 r",,.*", # "baz-style junk" (according to dpkg (Dpkg::Source::Package) 

-

44 r"DEADJOE", # Joe's one line of immortality that just gets cargo cult'ed around ... just in case. 

-

45 ] 

-

46 ) 

-

47) 

-

48 

-

49_DOXYGEN_DIR_TEST_FILES = ["doxygen.css", "doxygen.svg", "index.html"] 

-

50 

-

51 

-

52def _debputy_discard_pyc_files(path: "VirtualPath") -> bool: 

-

53 if path.name == "__pycache__" and path.is_dir: 

-

54 return True 

-

55 return path.name.endswith((".pyc", ".pyo")) and path.is_file 

-

56 

-

57 

-

58def _debputy_prune_la_files(path: "VirtualPath") -> bool: 

-

59 return ( 

-

60 path.name.endswith(".la") 

-

61 and path.is_file 

-

62 and path.absolute.startswith("/usr/lib") 

-

63 ) 

-

64 

-

65 

-

66def _debputy_prune_backup_files(path: VirtualPath) -> bool: 

-

67 return bool(_BACKUP_FILES_RE.match(path.name)) 

-

68 

-

69 

-

70def _debputy_prune_vcs_paths(path: VirtualPath) -> bool: 

-

71 return path.name in _VCS_PATHS 

-

72 

-

73 

-

74def _debputy_prune_info_dir_file(path: VirtualPath) -> bool: 

-

75 return path.absolute == "/usr/share/info/dir" 

-

76 

-

77 

-

78def _debputy_prune_binary_debian_dir(path: VirtualPath) -> bool: 

-

79 return path.absolute == "/DEBIAN" 

-

80 

-

81 

-

82def _debputy_prune_doxygen_cruft(path: VirtualPath) -> bool: 

-

83 if not path.name.endswith((".md5", ".map")) or not path.is_file: 

-

84 return False 

-

85 parent_dir = path.parent_dir 

-

86 while parent_dir: 86 ↛ 97line 86 didn't jump to line 97, because the condition on line 86 was never false

-

87 is_doxygen_dir = True 

-

88 for name in _DOXYGEN_DIR_TEST_FILES: 

-

89 test_file = parent_dir.get(name) 

-

90 if test_file is None or not test_file.is_file: 

-

91 is_doxygen_dir = False 

-

92 break 

-

93 

-

94 if is_doxygen_dir: 

-

95 return True 

-

96 parent_dir = parent_dir.parent_dir 

-

97 return False 

-
- - - diff --git a/coverage-report/d_d5d6843b45eec01e_manifest_root_rules_py.html b/coverage-report/d_d5d6843b45eec01e_manifest_root_rules_py.html deleted file mode 100644 index e0b43a6..0000000 --- a/coverage-report/d_d5d6843b45eec01e_manifest_root_rules_py.html +++ /dev/null @@ -1,349 +0,0 @@ - - - - - Coverage for src/debputy/plugin/debputy/manifest_root_rules.py: 79% - - - - - -
-
-

- Coverage for src/debputy/plugin/debputy/manifest_root_rules.py: - 79% -

- -

- 57 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import textwrap 

-

2from typing import List, Any, Dict, Tuple, TYPE_CHECKING, cast 

-

3 

-

4from debputy._manifest_constants import ( 

-

5 ManifestVersion, 

-

6 MK_MANIFEST_VERSION, 

-

7 MK_INSTALLATIONS, 

-

8 SUPPORTED_MANIFEST_VERSIONS, 

-

9 MK_MANIFEST_DEFINITIONS, 

-

10 MK_PACKAGES, 

-

11 MK_MANIFEST_VARIABLES, 

-

12) 

-

13from debputy.exceptions import DebputySubstitutionError 

-

14from debputy.installations import InstallRule 

-

15from debputy.manifest_parser.base_types import DebputyParsedContent 

-

16from debputy.manifest_parser.exceptions import ManifestParseException 

-

17from debputy.manifest_parser.parser_data import ParserContextData 

-

18from debputy.manifest_parser.util import AttributePath 

-

19from debputy.plugin.api import reference_documentation 

-

20from debputy.plugin.api.impl import DebputyPluginInitializerProvider 

-

21from debputy.plugin.api.impl_types import ( 

-

22 OPARSER_MANIFEST_ROOT, 

-

23 OPARSER_MANIFEST_DEFINITIONS, 

-

24 SUPPORTED_DISPATCHABLE_OBJECT_PARSERS, 

-

25 OPARSER_PACKAGES, 

-

26) 

-

27from debputy.substitution import VariableNameState, SUBST_VAR_RE 

-

28 

-

29if TYPE_CHECKING: 

-

30 from debputy.highlevel_manifest_parser import YAMLManifestParser 

-

31 

-

32 

-

33def register_manifest_root_rules(api: DebputyPluginInitializerProvider) -> None: 

-

34 # Registration order matters. Notably, definitions must come before anything that can 

-

35 # use definitions (variables), which is why it is second only to the manifest version. 

-

36 api.pluggable_manifest_rule( 

-

37 OPARSER_MANIFEST_ROOT, 

-

38 MK_MANIFEST_VERSION, 

-

39 ManifestVersionFormat, 

-

40 _handle_version, 

-

41 source_format=ManifestVersion, 

-

42 inline_reference_documentation=reference_documentation( 

-

43 title="Manifest version", 

-

44 description=textwrap.dedent( 

-

45 """\ 

-

46 All `debputy` manifests must include a `debputy` manifest version, which will enable the 

-

47 format to change over time. For now, there is only one version (`"0.1"`) and you have 

-

48 to include the line: 

-

49 

-

50 manifest-version: "0.1" 

-

51 

-

52 On its own, the manifest containing only `manifest-version: "..."` will not do anything. So if you 

-

53 end up only having the `manifest-version` key in the manifest, you can just remove the manifest and 

-

54 rely entirely on the built-in rules. 

-

55 """ 

-

56 ), 

-

57 ), 

-

58 ) 

-

59 api.pluggable_object_parser( 

-

60 OPARSER_MANIFEST_ROOT, 

-

61 MK_MANIFEST_DEFINITIONS, 

-

62 object_parser_key=OPARSER_MANIFEST_DEFINITIONS, 

-

63 on_end_parse_step=lambda _a, _b, _c, mp: mp._ensure_package_states_is_initialized(), 

-

64 ) 

-

65 api.pluggable_manifest_rule( 

-

66 OPARSER_MANIFEST_DEFINITIONS, 

-

67 MK_MANIFEST_VARIABLES, 

-

68 ManifestVariablesParsedFormat, 

-

69 _handle_manifest_variables, 

-

70 source_format=Dict[str, str], 

-

71 inline_reference_documentation=reference_documentation( 

-

72 title="Manifest Variables (`variables`)", 

-

73 description=textwrap.dedent( 

-

74 """\ 

-

75 It is possible to provide custom manifest variables via the `variables` attribute. An example: 

-

76 

-

77 manifest-version: '0.1' 

-

78 definitions: 

-

79 variables: 

-

80 LIBPATH: "/usr/lib/{{DEB_HOST_MULTIARCH}}" 

-

81 SONAME: "1" 

-

82 installations: 

-

83 - install: 

-

84 source: build/libfoo.so.{{SONAME}}* 

-

85 # The quotes here is for the YAML parser's sake. 

-

86 dest-dir: "{{LIBPATH}}" 

-

87 into: libfoo{{SONAME}} 

-

88 

-

89 The value of the `variables` key must be a mapping, where each key is a new variable name and 

-

90 the related value is the value of said key. The keys must be valid variable name and not shadow 

-

91 existing variables (that is, variables such as `PACKAGE` and `DEB_HOST_MULTIARCH` *cannot* be 

-

92 redefined). The value for each variable *can* refer to *existing* variables as seen in the 

-

93 example above. 

-

94 

-

95 As usual, `debputy` will insist that all declared variables must be used. 

-

96 

-

97 Limitations: 

-

98 * When declaring variables that depends on another variable declared in the manifest, the 

-

99 order is important. The variables are resolved from top to bottom. 

-

100 * When a manifest variable depends on another manifest variable, the existing variable is 

-

101 currently always resolved in source context. As a consequence, some variables such as 

-

102 `{{PACKAGE}}` cannot be used when defining a variable. This restriction may be 

-

103 lifted in the future. 

-

104 """ 

-

105 ), 

-

106 ), 

-

107 ) 

-

108 api.pluggable_manifest_rule( 

-

109 OPARSER_MANIFEST_ROOT, 

-

110 MK_INSTALLATIONS, 

-

111 List[InstallRule], 

-

112 _handle_installation_rules, 

-

113 inline_reference_documentation=reference_documentation( 

-

114 title="Installations", 

-

115 description=textwrap.dedent( 

-

116 """\ 

-

117 For source packages building a single binary, the `dh_auto_install` from debhelper will default to 

-

118 providing everything from upstream's install in the binary package. The `debputy` tool matches this 

-

119 behaviour and accordingly, the `installations` feature is only relevant in this case when you need to 

-

120 manually specify something upstream's install did not cover. 

-

121 

-

122 For sources, that build multiple binaries, where `dh_auto_install` does not detect anything to install, 

-

123 or when `dh_auto_install --destdir debian/tmp` is used, the `installations` section of the manifest is 

-

124 used to declare what goes into which binary package. An example: 

-

125 

-

126 installations: 

-

127 - install: 

-

128 sources: "usr/bin/foo" 

-

129 into: foo 

-

130 - install: 

-

131 sources: "usr/*" 

-

132 into: foo-extra 

-

133 

-

134 All installation rules are processed in order (top to bottom). Once a path has been matched, it can 

-

135 no longer be matched by future rules. In the above example, then `usr/bin/foo` would be in the `foo` 

-

136 package while everything in `usr` *except* `usr/bin/foo` would be in `foo-extra`. If these had been 

-

137 ordered in reverse, the `usr/bin/foo` rule would not have matched anything and caused `debputy` 

-

138 to reject the input as an error on that basis. This behaviour is similar to "DEP-5" copyright files, 

-

139 except the order is reversed ("DEP-5" uses "last match wins", where here we are doing "first match wins") 

-

140 

-

141 In the rare case that some path need to be installed into two packages at the same time, then this is 

-

142 generally done by changing `into` into a list of packages. 

-

143 

-

144 All installations are currently run in *source* package context. This implies that: 

-

145 

-

146 1) No package specific substitutions are available. Notably `{{PACKAGE}}` cannot be resolved. 

-

147 2) All conditions are evaluated in source context. For 99.9% of users, this makes no difference, 

-

148 but there is a cross-build feature that changes the "per package" architecture which is affected. 

-

149 

-

150 This is a limitation that should be fixed in `debputy`. 

-

151 

-

152 **Attention debhelper users**: Note the difference between `dh_install` (etc.) vs. `debputy` on 

-

153 overlapping matches for installation. 

-

154 """ 

-

155 ), 

-

156 ), 

-

157 ) 

-

158 api.pluggable_object_parser( 

-

159 OPARSER_MANIFEST_ROOT, 

-

160 MK_PACKAGES, 

-

161 object_parser_key=OPARSER_PACKAGES, 

-

162 on_end_parse_step=lambda _a, _b, _c, mp: mp._ensure_package_states_is_initialized(), 

-

163 nested_in_package_context=True, 

-

164 ) 

-

165 

-

166 

-

167class ManifestVersionFormat(DebputyParsedContent): 

-

168 manifest_version: ManifestVersion 

-

169 

-

170 

-

171class ListOfInstallRulesFormat(DebputyParsedContent): 

-

172 elements: List[InstallRule] 

-

173 

-

174 

-

175class DictFormat(DebputyParsedContent): 

-

176 mapping: Dict[str, Any] 

-

177 

-

178 

-

179class ManifestVariablesParsedFormat(DebputyParsedContent): 

-

180 variables: Dict[str, str] 

-

181 

-

182 

-

183def _handle_version( 

-

184 _name: str, 

-

185 parsed_data: ManifestVersionFormat, 

-

186 _attribute_path: AttributePath, 

-

187 _parser_context: ParserContextData, 

-

188) -> str: 

-

189 manifest_version = parsed_data["manifest_version"] 

-

190 if manifest_version not in SUPPORTED_MANIFEST_VERSIONS: 190 ↛ 191line 190 didn't jump to line 191, because the condition on line 190 was never true

-

191 raise ManifestParseException( 

-

192 "Unsupported manifest-version. This implementation supports the following versions:" 

-

193 f' {", ".join(repr(v) for v in SUPPORTED_MANIFEST_VERSIONS)}"' 

-

194 ) 

-

195 return manifest_version 

-

196 

-

197 

-

198def _handle_manifest_variables( 

-

199 _name: str, 

-

200 parsed_data: ManifestVariablesParsedFormat, 

-

201 variables_path: AttributePath, 

-

202 parser_context: ParserContextData, 

-

203) -> None: 

-

204 variables = parsed_data.get("variables", {}) 

-

205 resolved_vars: Dict[str, Tuple[str, AttributePath]] = {} 

-

206 manifest_parser: "YAMLManifestParser" = cast("YAMLManifestParser", parser_context) 

-

207 substitution = manifest_parser.substitution 

-

208 for key, value_raw in variables.items(): 

-

209 key_path = variables_path[key] 

-

210 if not SUBST_VAR_RE.match("{{" + key + "}}"): 210 ↛ 211line 210 didn't jump to line 211, because the condition on line 210 was never true

-

211 raise ManifestParseException( 

-

212 f"The variable at {key_path.path} has an invalid name and therefore cannot" 

-

213 " be used." 

-

214 ) 

-

215 if substitution.variable_state(key) != VariableNameState.UNDEFINED: 

-

216 raise ManifestParseException( 

-

217 f'The variable "{key}" is already reserved/defined. Error triggered by' 

-

218 f" {key_path.path}." 

-

219 ) 

-

220 try: 

-

221 value = substitution.substitute(value_raw, key_path.path) 

-

222 except DebputySubstitutionError: 

-

223 if not resolved_vars: 

-

224 raise 

-

225 # See if flushing the variables work 

-

226 substitution = manifest_parser.add_extra_substitution_variables( 

-

227 **resolved_vars 

-

228 ) 

-

229 resolved_vars = {} 

-

230 value = substitution.substitute(value_raw, key_path.path) 

-

231 resolved_vars[key] = (value, key_path) 

-

232 substitution = manifest_parser.add_extra_substitution_variables(**resolved_vars) 

-

233 

-

234 

-

235def _handle_installation_rules( 

-

236 _name: str, 

-

237 parsed_data: List[InstallRule], 

-

238 _attribute_path: AttributePath, 

-

239 _parser_context: ParserContextData, 

-

240) -> List[Any]: 

-

241 return parsed_data 

-

242 

-

243 

-

244def _handle_opaque_dict( 

-

245 _name: str, 

-

246 parsed_data: DictFormat, 

-

247 _attribute_path: AttributePath, 

-

248 _parser_context: ParserContextData, 

-

249) -> Dict[str, Any]: 

-

250 return parsed_data["mapping"] 

-
- - - diff --git a/coverage-report/d_d5d6843b45eec01e_metadata_detectors_py.html b/coverage-report/d_d5d6843b45eec01e_metadata_detectors_py.html deleted file mode 100644 index 2c3f260..0000000 --- a/coverage-report/d_d5d6843b45eec01e_metadata_detectors_py.html +++ /dev/null @@ -1,649 +0,0 @@ - - - - - Coverage for src/debputy/plugin/debputy/metadata_detectors.py: 96% - - - - - -
-
-

- Coverage for src/debputy/plugin/debputy/metadata_detectors.py: - 96% -

- -

- 228 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import itertools 

-

2import os 

-

3import re 

-

4import textwrap 

-

5from typing import Iterable, Iterator 

-

6 

-

7from debputy.plugin.api import ( 

-

8 VirtualPath, 

-

9 BinaryCtrlAccessor, 

-

10 PackageProcessingContext, 

-

11) 

-

12from debputy.plugin.debputy.paths import ( 

-

13 INITRAMFS_HOOK_DIR, 

-

14 SYSTEMD_TMPFILES_DIR, 

-

15 GSETTINGS_SCHEMA_DIR, 

-

16 SYSTEMD_SYSUSERS_DIR, 

-

17) 

-

18from debputy.plugin.debputy.types import DebputyCapability 

-

19from debputy.util import assume_not_none, _warn 

-

20 

-

21DPKG_ROOT = '"${DPKG_ROOT}"' 

-

22DPKG_ROOT_UNQUOTED = "${DPKG_ROOT}" 

-

23 

-

24KERNEL_MODULE_EXTENSIONS = tuple( 

-

25 f"{ext}{comp_ext}" 

-

26 for ext, comp_ext in itertools.product( 

-

27 (".o", ".ko"), 

-

28 ("", ".gz", ".bz2", ".xz"), 

-

29 ) 

-

30) 

-

31 

-

32 

-

33def detect_initramfs_hooks( 

-

34 fs_root: VirtualPath, 

-

35 ctrl: BinaryCtrlAccessor, 

-

36 _unused: PackageProcessingContext, 

-

37) -> None: 

-

38 hook_dir = fs_root.lookup(INITRAMFS_HOOK_DIR) 

-

39 if not hook_dir: 

-

40 return 

-

41 for _ in hook_dir.iterdir: 

-

42 # Only add the trigger if the directory is non-empty. It is unlikely to matter a lot, 

-

43 # but we do this to match debhelper. 

-

44 break 

-

45 else: 

-

46 return 

-

47 

-

48 ctrl.dpkg_trigger("activate-noawait", "update-initramfs") 

-

49 

-

50 

-

51def _all_tmpfiles_conf(fs_root: VirtualPath) -> Iterable[VirtualPath]: 

-

52 seen_tmpfiles = set() 

-

53 tmpfiles_dirs = [ 

-

54 SYSTEMD_TMPFILES_DIR, 

-

55 "./etc/tmpfiles.d", 

-

56 ] 

-

57 for tmpfiles_dir_path in tmpfiles_dirs: 

-

58 tmpfiles_dir = fs_root.lookup(tmpfiles_dir_path) 

-

59 if not tmpfiles_dir: 

-

60 continue 

-

61 for path in tmpfiles_dir.iterdir: 

-

62 if ( 

-

63 not path.is_file 

-

64 or not path.name.endswith(".conf") 

-

65 or path.name in seen_tmpfiles 

-

66 ): 

-

67 continue 

-

68 seen_tmpfiles.add(path.name) 

-

69 yield path 

-

70 

-

71 

-

72def detect_systemd_tmpfiles( 

-

73 fs_root: VirtualPath, 

-

74 ctrl: BinaryCtrlAccessor, 

-

75 _unused: PackageProcessingContext, 

-

76) -> None: 

-

77 tmpfiles_confs = [ 

-

78 x.name for x in sorted(_all_tmpfiles_conf(fs_root), key=lambda x: x.name) 

-

79 ] 

-

80 if not tmpfiles_confs: 

-

81 return 

-

82 

-

83 tmpfiles_escaped = ctrl.maintscript.escape_shell_words(*tmpfiles_confs) 

-

84 

-

85 snippet = textwrap.dedent( 

-

86 f"""\ 

-

87 if [ -x "$(command -v systemd-tmpfiles)" ]; then 

-

88 systemd-tmpfiles ${{DPKG_ROOT:+--root="$DPKG_ROOT"}} --create {tmpfiles_escaped} || true 

-

89 fi 

-

90 """ 

-

91 ) 

-

92 

-

93 ctrl.maintscript.on_configure(snippet) 

-

94 

-

95 

-

96def _all_sysusers_conf(fs_root: VirtualPath) -> Iterable[VirtualPath]: 

-

97 sysusers_dir = fs_root.lookup(SYSTEMD_SYSUSERS_DIR) 

-

98 if not sysusers_dir: 

-

99 return 

-

100 for child in sysusers_dir.iterdir: 

-

101 if not child.name.endswith(".conf"): 

-

102 continue 

-

103 yield child 

-

104 

-

105 

-

106def detect_systemd_sysusers( 

-

107 fs_root: VirtualPath, 

-

108 ctrl: BinaryCtrlAccessor, 

-

109 _unused: PackageProcessingContext, 

-

110) -> None: 

-

111 sysusers_confs = [p.name for p in _all_sysusers_conf(fs_root)] 

-

112 if not sysusers_confs: 

-

113 return 

-

114 

-

115 sysusers_escaped = ctrl.maintscript.escape_shell_words(*sysusers_confs) 

-

116 

-

117 snippet = textwrap.dedent( 

-

118 f"""\ 

-

119 systemd-sysusers ${{DPKG_ROOT:+--root="$DPKG_ROOT"}} --create {sysusers_escaped} || true 

-

120 """ 

-

121 ) 

-

122 

-

123 ctrl.substvars.add_dependency( 

-

124 "misc:Depends", "systemd | systemd-standalone-sysusers | systemd-sysusers" 

-

125 ) 

-

126 ctrl.maintscript.on_configure(snippet) 

-

127 

-

128 

-

129def detect_icons( 

-

130 fs_root: VirtualPath, 

-

131 ctrl: BinaryCtrlAccessor, 

-

132 _unused: PackageProcessingContext, 

-

133) -> None: 

-

134 icons_root_dir = fs_root.lookup("./usr/share/icons") 

-

135 if not icons_root_dir: 

-

136 return 

-

137 icon_dirs = [] 

-

138 for subdir in icons_root_dir.iterdir: 

-

139 if subdir.name in ("gnome", "hicolor"): 

-

140 # dh_icons skips this for some reason. 

-

141 continue 

-

142 for p in subdir.all_paths(): 

-

143 if p.is_file and p.name.endswith((".png", ".svg", ".xpm", ".icon")): 

-

144 icon_dirs.append(subdir.absolute) 

-

145 break 

-

146 if not icon_dirs: 

-

147 return 

-

148 

-

149 icon_dir_list_escaped = ctrl.maintscript.escape_shell_words(*icon_dirs) 

-

150 

-

151 postinst_snippet = textwrap.dedent( 

-

152 f"""\ 

-

153 if command -v update-icon-caches >/dev/null; then 

-

154 update-icon-caches {icon_dir_list_escaped} 

-

155 fi 

-

156 """ 

-

157 ) 

-

158 

-

159 postrm_snippet = textwrap.dedent( 

-

160 f"""\ 

-

161 if command -v update-icon-caches >/dev/null; then 

-

162 update-icon-caches {icon_dir_list_escaped} 

-

163 fi 

-

164 """ 

-

165 ) 

-

166 

-

167 ctrl.maintscript.on_configure(postinst_snippet) 

-

168 ctrl.maintscript.unconditionally_in_script("postrm", postrm_snippet) 

-

169 

-

170 

-

171def detect_gsettings_dependencies( 

-

172 fs_root: VirtualPath, 

-

173 ctrl: BinaryCtrlAccessor, 

-

174 _unused: PackageProcessingContext, 

-

175) -> None: 

-

176 gsettings_schema_dir = fs_root.lookup(GSETTINGS_SCHEMA_DIR) 

-

177 if not gsettings_schema_dir: 

-

178 return 

-

179 

-

180 for path in gsettings_schema_dir.all_paths(): 

-

181 if path.is_file and path.name.endswith((".xml", ".override")): 

-

182 ctrl.substvars.add_dependency( 

-

183 "misc:Depends", "dconf-gsettings-backend | gsettings-backend" 

-

184 ) 

-

185 break 

-

186 

-

187 

-

188def detect_kernel_modules( 

-

189 fs_root: VirtualPath, 

-

190 ctrl: BinaryCtrlAccessor, 

-

191 _unused: PackageProcessingContext, 

-

192) -> None: 

-

193 for prefix in [".", "./usr"]: 

-

194 module_root_dir = fs_root.lookup(f"{prefix}/lib/modules") 

-

195 

-

196 if not module_root_dir: 

-

197 continue 

-

198 

-

199 module_version_dirs = [] 

-

200 

-

201 for module_version_dir in module_root_dir.iterdir: 

-

202 if not module_version_dir.is_dir: 

-

203 continue 

-

204 

-

205 for fs_path in module_version_dir.all_paths(): 

-

206 if fs_path.name.endswith(KERNEL_MODULE_EXTENSIONS): 

-

207 module_version_dirs.append(module_version_dir.name) 

-

208 break 

-

209 

-

210 for module_version in module_version_dirs: 

-

211 module_version_escaped = ctrl.maintscript.escape_shell_words(module_version) 

-

212 postinst_snippet = textwrap.dedent( 

-

213 f"""\ 

-

214 if [ -e /boot/System.map-{module_version_escaped} ]; then 

-

215 depmod -a -F /boot/System.map-{module_version_escaped} {module_version_escaped} || true 

-

216 fi 

-

217 """ 

-

218 ) 

-

219 

-

220 postrm_snippet = textwrap.dedent( 

-

221 f"""\ 

-

222 if [ -e /boot/System.map-{module_version_escaped} ]; then 

-

223 depmod -a -F /boot/System.map-{module_version_escaped} {module_version_escaped} || true 

-

224 fi 

-

225 """ 

-

226 ) 

-

227 

-

228 ctrl.maintscript.on_configure(postinst_snippet) 

-

229 # TODO: This should probably be on removal. However, this is what debhelper did and we should 

-

230 # do the same until we are sure (not that it matters a lot). 

-

231 ctrl.maintscript.unconditionally_in_script("postrm", postrm_snippet) 

-

232 

-

233 

-

234def detect_xfonts( 

-

235 fs_root: VirtualPath, 

-

236 ctrl: BinaryCtrlAccessor, 

-

237 context: PackageProcessingContext, 

-

238) -> None: 

-

239 xfonts_root_dir = fs_root.lookup("./usr/share/fonts/X11/") 

-

240 if not xfonts_root_dir: 

-

241 return 

-

242 

-

243 cmds = [] 

-

244 cmds_postinst = [] 

-

245 cmds_postrm = [] 

-

246 escape_shell_words = ctrl.maintscript.escape_shell_words 

-

247 package_name = context.binary_package.name 

-

248 

-

249 for xfonts_dir in xfonts_root_dir.iterdir: 

-

250 xfonts_dirname = xfonts_dir.name 

-

251 if not xfonts_dir.is_dir or xfonts_dirname.startswith("."): 

-

252 continue 

-

253 if fs_root.lookup(f"./etc/X11/xfonts/{xfonts_dirname}/{package_name}.scale"): 

-

254 cmds.append(escape_shell_words("update-fonts-scale", xfonts_dirname)) 

-

255 cmds.append( 

-

256 escape_shell_words("update-fonts-dir", "--x11r7-layout", xfonts_dirname) 

-

257 ) 

-

258 alias_file = fs_root.lookup( 

-

259 f"./etc/X11/xfonts/{xfonts_dirname}/{package_name}.alias" 

-

260 ) 

-

261 if alias_file: 

-

262 cmds_postinst.append( 

-

263 escape_shell_words( 

-

264 "update-fonts-alias", 

-

265 "--include", 

-

266 alias_file.absolute, 

-

267 xfonts_dirname, 

-

268 ) 

-

269 ) 

-

270 cmds_postrm.append( 

-

271 escape_shell_words( 

-

272 "update-fonts-alias", 

-

273 "--exclude", 

-

274 alias_file.absolute, 

-

275 xfonts_dirname, 

-

276 ) 

-

277 ) 

-

278 

-

279 if not cmds: 

-

280 return 

-

281 

-

282 postinst_snippet = textwrap.dedent( 

-

283 f"""\ 

-

284 if command -v update-fonts-dir >/dev/null; then 

-

285 {';'.join(itertools.chain(cmds, cmds_postinst))} 

-

286 fi 

-

287 """ 

-

288 ) 

-

289 

-

290 postrm_snippet = textwrap.dedent( 

-

291 f"""\ 

-

292 if [ -x "`command -v update-fonts-dir`" ]; then 

-

293 {';'.join(itertools.chain(cmds, cmds_postrm))} 

-

294 fi 

-

295 """ 

-

296 ) 

-

297 

-

298 ctrl.maintscript.unconditionally_in_script("postinst", postinst_snippet) 

-

299 ctrl.maintscript.unconditionally_in_script("postrm", postrm_snippet) 

-

300 ctrl.substvars.add_dependency("misc:Depends", "xfonts-utils") 

-

301 

-

302 

-

303# debputy does not support python2, so we do not list python / python2. 

-

304_PYTHON_PUBLIC_DIST_DIR_NAMES = re.compile(r"(?:pypy|python)3(?:[.]\d+)?") 

-

305 

-

306 

-

307def _public_python_dist_dirs(fs_root: VirtualPath) -> Iterator[VirtualPath]: 

-

308 usr_lib = fs_root.lookup("./usr/lib") 

-

309 root_dirs = [] 

-

310 if usr_lib: 

-

311 root_dirs.append(usr_lib) 

-

312 

-

313 dbg_root = fs_root.lookup("./usr/lib/debug/usr/lib") 

-

314 if dbg_root: 314 ↛ 315line 314 didn't jump to line 315, because the condition on line 314 was never true

-

315 root_dirs.append(dbg_root) 

-

316 

-

317 for root_dir in root_dirs: 

-

318 python_dirs = ( 

-

319 path 

-

320 for path in root_dir.iterdir 

-

321 if path.is_dir and _PYTHON_PUBLIC_DIST_DIR_NAMES.match(path.name) 

-

322 ) 

-

323 for python_dir in python_dirs: 

-

324 dist_packages = python_dir.get("dist-packages") 

-

325 if not dist_packages: 

-

326 continue 

-

327 yield dist_packages 

-

328 

-

329 

-

330def _has_py_file_in_dir(d: VirtualPath) -> bool: 

-

331 return any(f.is_file and f.name.endswith(".py") for f in d.all_paths()) 331 ↛ exitline 331 didn't finish the generator expression on line 331

-

332 

-

333 

-

334def detect_pycompile_files( 

-

335 fs_root: VirtualPath, 

-

336 ctrl: BinaryCtrlAccessor, 

-

337 context: PackageProcessingContext, 

-

338) -> None: 

-

339 package = context.binary_package.name 

-

340 # TODO: Support configurable list of private dirs 

-

341 private_search_dirs = [ 

-

342 fs_root.lookup(os.path.join(d, package)) 

-

343 for d in [ 

-

344 "./usr/share", 

-

345 "./usr/share/games", 

-

346 "./usr/lib", 

-

347 f"./usr/lib/{context.binary_package.deb_multiarch}", 

-

348 "./usr/lib/games", 

-

349 ] 

-

350 ] 

-

351 private_search_dirs_with_py_files = [ 

-

352 p for p in private_search_dirs if p is not None and _has_py_file_in_dir(p) 

-

353 ] 

-

354 public_search_dirs_has_py_files = any( 

-

355 p is not None and _has_py_file_in_dir(p) 

-

356 for p in _public_python_dist_dirs(fs_root) 

-

357 ) 

-

358 

-

359 if not public_search_dirs_has_py_files and not private_search_dirs_with_py_files: 

-

360 return 

-

361 

-

362 # The dh_python3 helper also supports -V and -X. We do not use them. They can be 

-

363 # replaced by bcep support instead, which is how we will be supporting this kind 

-

364 # of configuration down the line. 

-

365 ctrl.maintscript.unconditionally_in_script( 

-

366 "prerm", 

-

367 textwrap.dedent( 

-

368 f"""\ 

-

369 if command -v py3clean >/dev/null 2>&1; then 

-

370 py3clean -p {package} 

-

371 else 

-

372 dpkg -L {package} | sed -En -e '/^(.*)\\/(.+)\\.py$/s,,rm "\\1/__pycache__/\\2".*,e' 

-

373 find /usr/lib/python3/dist-packages/ -type d -name __pycache__ -empty -print0 | xargs --null --no-run-if-empty rmdir 

-

374 fi 

-

375 """ 

-

376 ), 

-

377 ) 

-

378 if public_search_dirs_has_py_files: 

-

379 ctrl.maintscript.on_configure( 

-

380 textwrap.dedent( 

-

381 f"""\ 

-

382 if command -v py3compile >/dev/null 2>&1; then 

-

383 py3compile -p {package} 

-

384 fi 

-

385 if command -v pypy3compile >/dev/null 2>&1; then 

-

386 pypy3compile -p {package} || true 

-

387 fi 

-

388 """ 

-

389 ) 

-

390 ) 

-

391 for private_dir in private_search_dirs_with_py_files: 

-

392 escaped_dir = ctrl.maintscript.escape_shell_words(private_dir.absolute) 

-

393 ctrl.maintscript.on_configure( 

-

394 textwrap.dedent( 

-

395 f"""\ 

-

396 if command -v py3compile >/dev/null 2>&1; then 

-

397 py3compile -p {package} {escaped_dir} 

-

398 fi 

-

399 if command -v pypy3compile >/dev/null 2>&1; then 

-

400 pypy3compile -p {package} {escaped_dir} || true 

-

401 fi 

-

402 """ 

-

403 ) 

-

404 ) 

-

405 

-

406 

-

407def translate_capabilities( 

-

408 fs_root: VirtualPath, 

-

409 ctrl: BinaryCtrlAccessor, 

-

410 _context: PackageProcessingContext, 

-

411) -> None: 

-

412 caps = [] 

-

413 maintscript = ctrl.maintscript 

-

414 for p in fs_root.all_paths(): 

-

415 if not p.is_file: 

-

416 continue 

-

417 metadata_ref = p.metadata(DebputyCapability) 

-

418 capability = metadata_ref.value 

-

419 if capability is None: 

-

420 continue 

-

421 

-

422 abs_path = maintscript.escape_shell_words(p.absolute) 

-

423 

-

424 cap_script = "".join( 

-

425 [ 

-

426 " # Triggered by: {DEFINITION_SOURCE}\n" 

-

427 " _TPATH=$(dpkg-divert --truename {ABS_PATH})\n", 

-

428 ' if setcap {CAP} "{DPKG_ROOT_UNQUOTED}${{_TPATH}}"; then\n', 

-

429 ' chmod {MODE} "{DPKG_ROOT_UNQUOTED}${{_TPATH}}"\n', 

-

430 ' echo "Successfully applied capabilities {CAP} on ${{_TPATH}}"\n', 

-

431 " else\n", 

-

432 # We do not reset the mode here; generally a re-install or upgrade would re-store both mode, 

-

433 # and remove the capabilities. 

-

434 ' echo "The setcap failed to processes {CAP} on ${{_TPATH}}; falling back to no capability support" >&2\n', 

-

435 " fi\n", 

-

436 ] 

-

437 ).format( 

-

438 CAP=maintscript.escape_shell_words(capability.capabilities).replace( 

-

439 "\\+", "+" 

-

440 ), 

-

441 DPKG_ROOT_UNQUOTED=DPKG_ROOT_UNQUOTED, 

-

442 ABS_PATH=abs_path, 

-

443 MODE=maintscript.escape_shell_words(str(capability.capability_mode)), 

-

444 DEFINITION_SOURCE=capability.definition_source.replace("\n", "\\n"), 

-

445 ) 

-

446 assert cap_script.endswith("\n") 

-

447 caps.append(cap_script) 

-

448 

-

449 if not caps: 

-

450 return 

-

451 

-

452 maintscript.on_configure( 

-

453 textwrap.dedent( 

-

454 """\ 

-

455 if command -v setcap > /dev/null; then 

-

456 {SET_CAP_COMMANDS} 

-

457 unset _TPATH 

-

458 else 

-

459 echo "The setcap utility is not installed available; falling back to no capability support" >&2 

-

460 fi 

-

461 """ 

-

462 ).format( 

-

463 SET_CAP_COMMANDS="".join(caps).rstrip("\n"), 

-

464 ) 

-

465 ) 

-

466 

-

467 

-

468def pam_auth_update( 

-

469 fs_root: VirtualPath, 

-

470 ctrl: BinaryCtrlAccessor, 

-

471 _context: PackageProcessingContext, 

-

472) -> None: 

-

473 pam_configs = fs_root.lookup("/usr/share/pam-configs") 

-

474 if not pam_configs: 

-

475 return 

-

476 maintscript = ctrl.maintscript 

-

477 for pam_config in pam_configs.iterdir: 

-

478 if not pam_config.is_file: 478 ↛ 479line 478 didn't jump to line 479, because the condition on line 478 was never true

-

479 continue 

-

480 maintscript.on_configure("pam-auth-update --package\n") 

-

481 maintscript.on_before_removal( 

-

482 textwrap.dedent( 

-

483 f"""\ 

-

484 if [ "${{DPKG_MAINTSCRIPT_PACKAGE_REFCOUNT:-1}}" = 1 ]; then 

-

485 pam-auth-update --package --remove {maintscript.escape_shell_words(pam_config.name)} 

-

486 fi 

-

487 """ 

-

488 ) 

-

489 ) 

-

490 

-

491 

-

492def auto_depends_arch_any_solink( 

-

493 fs_foot: VirtualPath, 

-

494 ctrl: BinaryCtrlAccessor, 

-

495 context: PackageProcessingContext, 

-

496) -> None: 

-

497 package = context.binary_package 

-

498 if package.is_arch_all: 

-

499 return 

-

500 libbasedir = fs_foot.lookup("usr/lib") 

-

501 if not libbasedir: 

-

502 return 

-

503 libmadir = libbasedir.get(package.deb_multiarch) 

-

504 if libmadir: 504 ↛ 507line 504 didn't jump to line 507, because the condition on line 504 was never false

-

505 libdirs = [libmadir, libbasedir] 

-

506 else: 

-

507 libdirs = [libbasedir] 

-

508 targets = [] 

-

509 for libdir in libdirs: 

-

510 for path in libdir.iterdir: 

-

511 if not path.is_symlink or not path.name.endswith(".so"): 

-

512 continue 

-

513 target = path.readlink() 

-

514 resolved = assume_not_none(path.parent_dir).lookup(target) 

-

515 if resolved is not None: 515 ↛ 516line 515 didn't jump to line 516, because the condition on line 515 was never true

-

516 continue 

-

517 targets.append((libdir.path, target)) 

-

518 

-

519 roots = list(context.accessible_package_roots()) 

-

520 if not roots: 

-

521 return 

-

522 

-

523 for libdir, target in targets: 

-

524 final_path = os.path.join(libdir, target) 

-

525 matches = [] 

-

526 for opkg, ofs_root in roots: 

-

527 m = ofs_root.lookup(final_path) 

-

528 if not m: 528 ↛ 529line 528 didn't jump to line 529, because the condition on line 528 was never true

-

529 continue 

-

530 matches.append(opkg) 

-

531 if not matches or len(matches) > 1: 

-

532 if matches: 532 ↛ 539line 532 didn't jump to line 539, because the condition on line 532 was never false

-

533 all_matches = ", ".join(p.name for p in matches) 

-

534 _warn( 

-

535 f"auto-depends-solink: The {final_path} was found in multiple packages ({all_matches}):" 

-

536 f" Not generating a dependency." 

-

537 ) 

-

538 else: 

-

539 _warn( 

-

540 f"auto-depends-solink: The {final_path} was NOT found in any accessible package:" 

-

541 " Not generating a dependency. This detection only works when both packages are arch:any" 

-

542 " and they have the same build-profiles." 

-

543 ) 

-

544 continue 

-

545 pkg_dep = matches[0] 

-

546 # The debputy API should not allow this constraint to fail 

-

547 assert pkg_dep.is_arch_all == package.is_arch_all 

-

548 # If both packages are arch:all or both are arch:any, we can generate a tight dependency 

-

549 relation = f"{pkg_dep.name} (= ${{binary:Version}})" 

-

550 ctrl.substvars.add_dependency("misc:Depends", relation) 

-
- - - diff --git a/coverage-report/d_d5d6843b45eec01e_package_processors_py.html b/coverage-report/d_d5d6843b45eec01e_package_processors_py.html deleted file mode 100644 index 9754ba3..0000000 --- a/coverage-report/d_d5d6843b45eec01e_package_processors_py.html +++ /dev/null @@ -1,419 +0,0 @@ - - - - - Coverage for src/debputy/plugin/debputy/package_processors.py: 54% - - - - - -
-
-

- Coverage for src/debputy/plugin/debputy/package_processors.py: - 54% -

- -

- 168 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import contextlib 

-

2import functools 

-

3import gzip 

-

4import os 

-

5import re 

-

6import subprocess 

-

7from contextlib import ExitStack 

-

8from typing import Optional, Iterator, IO, Any, List, Dict, Callable, Union 

-

9 

-

10from debputy.plugin.api import VirtualPath 

-

11from debputy.util import _error, xargs, escape_shell, _info, assume_not_none 

-

12 

-

13 

-

14@contextlib.contextmanager 

-

15def _open_maybe_gzip(path: VirtualPath) -> Iterator[Union[IO[bytes], gzip.GzipFile]]: 

-

16 if path.name.endswith(".gz"): 

-

17 with gzip.GzipFile(path.fs_path, "rb") as fd: 

-

18 yield fd 

-

19 else: 

-

20 with path.open(byte_io=True) as fd: 

-

21 yield fd 

-

22 

-

23 

-

24_SO_LINK_RE = re.compile(rb"[.]so\s+(.*)\s*") 

-

25_LA_DEP_LIB_RE = re.compile(rb"'.+'") 

-

26 

-

27 

-

28def _detect_so_link(path: VirtualPath) -> Optional[str]: 

-

29 so_link_re = _SO_LINK_RE 

-

30 with _open_maybe_gzip(path) as fd: 

-

31 for line in fd: 

-

32 m = so_link_re.search(line) 

-

33 if m: 

-

34 return m.group(1).decode("utf-8") 

-

35 return None 

-

36 

-

37 

-

38def _replace_with_symlink(path: VirtualPath, so_link_target: str) -> None: 

-

39 adjusted_target = so_link_target 

-

40 parent_dir = path.parent_dir 

-

41 assert parent_dir is not None # For the type checking 

-

42 if parent_dir.name == os.path.dirname(adjusted_target): 

-

43 # Avoid man8/../man8/foo links 

-

44 adjusted_target = os.path.basename(adjusted_target) 

-

45 elif "/" in so_link_target: 

-

46 # symlinks and so links have a different base directory when the link has a "/". 

-

47 # Adjust with an extra "../" to align the result 

-

48 adjusted_target = "../" + adjusted_target 

-

49 

-

50 path.unlink() 

-

51 parent_dir.add_symlink(path.name, adjusted_target) 

-

52 

-

53 

-

54@functools.lru_cache(1) 

-

55def _has_man_recode() -> bool: 

-

56 # Ideally, we would just use shutil.which or something like that. 

-

57 # Unfortunately, in debhelper, we experienced problems with which 

-

58 # returning "yes" for a man tool that actually could not be run 

-

59 # on salsa CI. 

-

60 # 

-

61 # Therefore, we adopt the logic of dh_installman to run the tool 

-

62 # with --help to confirm it is not broken, because no one could 

-

63 # figure out what happened in the salsa CI and my life is still 

-

64 # too short to figure it out. 

-

65 try: 

-

66 subprocess.check_call( 

-

67 ["man-recode", "--help"], 

-

68 stdin=subprocess.DEVNULL, 

-

69 stdout=subprocess.DEVNULL, 

-

70 stderr=subprocess.DEVNULL, 

-

71 restore_signals=True, 

-

72 ) 

-

73 except subprocess.CalledProcessError: 

-

74 return False 

-

75 return True 

-

76 

-

77 

-

78def process_manpages(fs_root: VirtualPath, _unused1: Any, _unused2: Any) -> None: 

-

79 man_dir = fs_root.lookup("./usr/share/man") 

-

80 if not man_dir: 

-

81 return 

-

82 

-

83 re_encode = [] 

-

84 for path in (p for p in man_dir.all_paths() if p.is_file and p.has_fs_path): 

-

85 size = path.size 

-

86 if size == 0: 

-

87 continue 

-

88 so_link_target = None 

-

89 if size <= 1024: 

-

90 # debhelper has a 1024 byte guard on the basis that ".so file tend to be small". 

-

91 # That guard worked well for debhelper, so lets keep it for now on that basis alone. 

-

92 so_link_target = _detect_so_link(path) 

-

93 if so_link_target: 

-

94 _replace_with_symlink(path, so_link_target) 

-

95 else: 

-

96 re_encode.append(path) 

-

97 

-

98 if not re_encode or not _has_man_recode(): 

-

99 return 

-

100 

-

101 with ExitStack() as manager: 

-

102 manpages = [ 

-

103 manager.enter_context(p.replace_fs_path_content()) for p in re_encode 

-

104 ] 

-

105 static_cmd = ["man-recode", "--to-code", "UTF-8", "--suffix", ".encoded"] 

-

106 for cmd in xargs(static_cmd, manpages): 

-

107 _info(f"Ensuring manpages have utf-8 encoding via: {escape_shell(*cmd)}") 

-

108 try: 

-

109 subprocess.check_call( 

-

110 cmd, 

-

111 stdin=subprocess.DEVNULL, 

-

112 restore_signals=True, 

-

113 ) 

-

114 except subprocess.CalledProcessError: 

-

115 _error( 

-

116 "The man-recode process failed. Please review the output of `man-recode` to understand" 

-

117 " what went wrong." 

-

118 ) 

-

119 for manpage in manpages: 

-

120 dest_name = manpage 

-

121 if dest_name.endswith(".gz"): 

-

122 dest_name = dest_name[:-3] 

-

123 os.rename(f"{dest_name}.encoded", manpage) 

-

124 

-

125 

-

126def _filter_compress_paths() -> Callable[[VirtualPath], Iterator[VirtualPath]]: 

-

127 ignore_dir_basenames = { 

-

128 "_sources", 

-

129 } 

-

130 ignore_basenames = { 

-

131 ".htaccess", 

-

132 "index.sgml", 

-

133 "objects.inv", 

-

134 "search_index.json", 

-

135 "copyright", 

-

136 } 

-

137 ignore_extensions = { 

-

138 ".htm", 

-

139 ".html", 

-

140 ".xhtml", 

-

141 ".gif", 

-

142 ".png", 

-

143 ".jpg", 

-

144 ".jpeg", 

-

145 ".gz", 

-

146 ".taz", 

-

147 ".tgz", 

-

148 ".z", 

-

149 ".bz2", 

-

150 ".epub", 

-

151 ".jar", 

-

152 ".zip", 

-

153 ".odg", 

-

154 ".odp", 

-

155 ".odt", 

-

156 ".css", 

-

157 ".xz", 

-

158 ".lz", 

-

159 ".lzma", 

-

160 ".haddock", 

-

161 ".hs", 

-

162 ".woff", 

-

163 ".woff2", 

-

164 ".svg", 

-

165 ".svgz", 

-

166 ".js", 

-

167 ".devhelp2", 

-

168 ".map", # Technically, dh_compress has this one case-sensitive 

-

169 } 

-

170 ignore_special_cases = ("-gz", "-z", "_z") 

-

171 

-

172 def _filtered_walk(path: VirtualPath) -> Iterator[VirtualPath]: 

-

173 for path, children in path.walk(): 

-

174 if path.name in ignore_dir_basenames: 174 ↛ 175line 174 didn't jump to line 175, because the condition on line 174 was never true

-

175 children.clear() 

-

176 continue 

-

177 if path.is_dir and path.name == "examples": 177 ↛ 179line 177 didn't jump to line 179, because the condition on line 177 was never true

-

178 # Ignore anything beneath /usr/share/doc/*/examples 

-

179 parent = path.parent_dir 

-

180 grand_parent = parent.parent_dir if parent else None 

-

181 if grand_parent and grand_parent.absolute == "/usr/share/doc": 

-

182 children.clear() 

-

183 continue 

-

184 name = path.name 

-

185 if ( 

-

186 path.is_symlink 

-

187 or not path.is_file 

-

188 or name in ignore_basenames 

-

189 or not path.has_fs_path 

-

190 ): 

-

191 continue 

-

192 

-

193 name_lc = name.lower() 

-

194 _, ext = os.path.splitext(name_lc) 

-

195 

-

196 if ext in ignore_extensions or name_lc.endswith(ignore_special_cases): 196 ↛ 197line 196 didn't jump to line 197, because the condition on line 196 was never true

-

197 continue 

-

198 yield path 

-

199 

-

200 return _filtered_walk 

-

201 

-

202 

-

203def _find_compressable_paths(fs_root: VirtualPath) -> Iterator[VirtualPath]: 

-

204 path_filter = _filter_compress_paths() 

-

205 

-

206 for p, compress_size_threshold in ( 

-

207 ("./usr/share/info", 0), 

-

208 ("./usr/share/man", 0), 

-

209 ("./usr/share/doc", 4096), 

-

210 ): 

-

211 path = fs_root.lookup(p) 

-

212 if path is None: 

-

213 continue 

-

214 paths = path_filter(path) 

-

215 if compress_size_threshold: 215 ↛ 218line 215 didn't jump to line 218, because the condition on line 215 was never true

-

216 # The special-case for changelog and NEWS is from dh_compress. Generally these files 

-

217 # have always been compressed regardless of their size. 

-

218 paths = ( 

-

219 p 

-

220 for p in paths 

-

221 if p.size > compress_size_threshold 

-

222 or p.name.startswith(("changelog", "NEWS")) 

-

223 ) 

-

224 yield from paths 

-

225 x11_path = fs_root.lookup("./usr/share/fonts/X11") 

-

226 if x11_path: 226 ↛ 227line 226 didn't jump to line 227, because the condition on line 226 was never true

-

227 yield from ( 

-

228 p for p in x11_path.all_paths() if p.is_file and p.name.endswith(".pcf") 

-

229 ) 

-

230 

-

231 

-

232def apply_compression(fs_root: VirtualPath, _unused1: Any, _unused2: Any) -> None: 

-

233 # TODO: Support hardlinks 

-

234 compressed_files: Dict[str, str] = {} 

-

235 for path in _find_compressable_paths(fs_root): 

-

236 parent_dir = assume_not_none(path.parent_dir) 

-

237 with parent_dir.add_file(f"{path.name}.gz", mtime=path.mtime) as new_file, open( 

-

238 new_file.fs_path, "wb" 

-

239 ) as fd: 

-

240 try: 

-

241 subprocess.check_call(["gzip", "-9nc", path.fs_path], stdout=fd) 

-

242 except subprocess.CalledProcessError: 

-

243 full_command = f"gzip -9nc {escape_shell(path.fs_path)} > {escape_shell(new_file.fs_path)}" 

-

244 _error( 

-

245 f"The compression of {path.path} failed. Please review the error message from gzip to" 

-

246 f" understand what went wrong. Full command was: {full_command}" 

-

247 ) 

-

248 compressed_files[path.path] = new_file.path 

-

249 del parent_dir[path.name] 

-

250 

-

251 all_remaining_symlinks = {p.path: p for p in fs_root.all_paths() if p.is_symlink} 

-

252 changed = True 

-

253 while changed: 

-

254 changed = False 

-

255 remaining: List[VirtualPath] = list(all_remaining_symlinks.values()) 

-

256 for symlink in remaining: 

-

257 target = symlink.readlink() 

-

258 dir_target, basename_target = os.path.split(target) 

-

259 new_basename_target = f"{basename_target}.gz" 

-

260 symlink_parent_dir = assume_not_none(symlink.parent_dir) 

-

261 dir_path = symlink_parent_dir 

-

262 if dir_target != "": 

-

263 dir_path = dir_path.lookup(dir_target) 

-

264 if ( 264 ↛ 269line 264 didn't jump to line 269

-

265 not dir_path 

-

266 or basename_target in dir_path 

-

267 or new_basename_target not in dir_path 

-

268 ): 

-

269 continue 

-

270 del all_remaining_symlinks[symlink.path] 

-

271 changed = True 

-

272 

-

273 new_link_name = ( 

-

274 f"{symlink.name}.gz" 

-

275 if not symlink.name.endswith(".gz") 

-

276 else symlink.name 

-

277 ) 

-

278 symlink_parent_dir.add_symlink( 

-

279 new_link_name, os.path.join(dir_target, new_basename_target) 

-

280 ) 

-

281 symlink.unlink() 

-

282 

-

283 

-

284def _la_files(fs_root: VirtualPath) -> Iterator[VirtualPath]: 

-

285 lib_dir = fs_root.lookup("/usr/lib") 

-

286 if not lib_dir: 

-

287 return 

-

288 # Original code only iterators directly in /usr/lib. To be a faithful conversion, we do the same 

-

289 # here. 

-

290 # Eagerly resolve the list as the replacement can trigger a runtime error otherwise 

-

291 paths = list(lib_dir.iterdir) 

-

292 yield from (p for p in paths if p.is_file and p.name.endswith(".la")) 

-

293 

-

294 

-

295# Conceptually, the same feature that dh_gnome provides. 

-

296# The clean_la_files function based on the dh_gnome version written by Luca Falavigna in 2010, 

-

297# who in turn references a Makefile version of the feature. 

-

298# https://salsa.debian.org/gnome-team/gnome-pkg-tools/-/commit/2868e1e41ea45443b0fb340bf4c71c4de87d4a5b 

-

299def clean_la_files( 

-

300 fs_root: VirtualPath, 

-

301 _unused1: Any, 

-

302 _unused2: Any, 

-

303) -> None: 

-

304 for path in _la_files(fs_root): 

-

305 buffer = [] 

-

306 with path.open(byte_io=True) as fd: 

-

307 replace_file = False 

-

308 for line in fd: 

-

309 if line.startswith(b"dependency_libs"): 

-

310 replacement = _LA_DEP_LIB_RE.sub(b"''", line) 

-

311 if replacement != line: 

-

312 replace_file = True 

-

313 line = replacement 

-

314 buffer.append(line) 

-

315 

-

316 if not replace_file: 

-

317 continue 

-

318 _info(f"Clearing the dependency_libs line in {path.path}") 

-

319 with path.replace_fs_path_content() as fs_path, open(fs_path, "wb") as wfd: 

-

320 wfd.writelines(buffer) 

-
- - - diff --git a/coverage-report/d_d5d6843b45eec01e_paths_py.html b/coverage-report/d_d5d6843b45eec01e_paths_py.html deleted file mode 100644 index 1806c21..0000000 --- a/coverage-report/d_d5d6843b45eec01e_paths_py.html +++ /dev/null @@ -1,103 +0,0 @@ - - - - - Coverage for src/debputy/plugin/debputy/paths.py: 100% - - - - - -
-
-

- Coverage for src/debputy/plugin/debputy/paths.py: - 100% -

- -

- 4 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1GSETTINGS_SCHEMA_DIR = "/usr/share/glib-2.0/schemas" 

-

2INITRAMFS_HOOK_DIR = "/usr/share/initramfs-tools/hooks" 

-

3SYSTEMD_TMPFILES_DIR = "/usr/lib/tmpfiles.d" 

-

4SYSTEMD_SYSUSERS_DIR = "/usr/lib/sysusers.d" 

-
- - - diff --git a/coverage-report/d_d5d6843b45eec01e_private_api_py.html b/coverage-report/d_d5d6843b45eec01e_private_api_py.html deleted file mode 100644 index 3843e05..0000000 --- a/coverage-report/d_d5d6843b45eec01e_private_api_py.html +++ /dev/null @@ -1,3030 +0,0 @@ - - - - - Coverage for src/debputy/plugin/debputy/private_api.py: 82% - - - - - -
-
-

- Coverage for src/debputy/plugin/debputy/private_api.py: - 82% -

- -

- 541 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import ctypes 

-

2import ctypes.util 

-

3import functools 

-

4import itertools 

-

5import textwrap 

-

6import time 

-

7from datetime import datetime 

-

8from typing import ( 

-

9 cast, 

-

10 NotRequired, 

-

11 Optional, 

-

12 Tuple, 

-

13 Union, 

-

14 Type, 

-

15 TypedDict, 

-

16 List, 

-

17 Annotated, 

-

18 Any, 

-

19 Dict, 

-

20 Callable, 

-

21) 

-

22 

-

23from debian.changelog import Changelog 

-

24from debian.deb822 import Deb822 

-

25 

-

26from debputy import DEBPUTY_DOC_ROOT_DIR 

-

27from debputy._manifest_constants import ( 

-

28 MK_CONFFILE_MANAGEMENT_X_OWNING_PACKAGE, 

-

29 MK_CONFFILE_MANAGEMENT_X_PRIOR_TO_VERSION, 

-

30 MK_INSTALLATIONS_INSTALL_EXAMPLES, 

-

31 MK_INSTALLATIONS_INSTALL, 

-

32 MK_INSTALLATIONS_INSTALL_DOCS, 

-

33 MK_INSTALLATIONS_INSTALL_MAN, 

-

34 MK_INSTALLATIONS_DISCARD, 

-

35 MK_INSTALLATIONS_MULTI_DEST_INSTALL, 

-

36) 

-

37from debputy.exceptions import DebputyManifestVariableRequiresDebianDirError 

-

38from debputy.installations import InstallRule 

-

39from debputy.maintscript_snippet import DpkgMaintscriptHelperCommand 

-

40from debputy.manifest_conditions import ( 

-

41 ManifestCondition, 

-

42 BinaryPackageContextArchMatchManifestCondition, 

-

43 BuildProfileMatch, 

-

44 SourceContextArchMatchManifestCondition, 

-

45) 

-

46from debputy.manifest_parser.base_types import ( 

-

47 DebputyParsedContent, 

-

48 DebputyParsedContentStandardConditional, 

-

49 FileSystemMode, 

-

50 StaticFileSystemOwner, 

-

51 StaticFileSystemGroup, 

-

52 SymlinkTarget, 

-

53 FileSystemExactMatchRule, 

-

54 FileSystemMatchRule, 

-

55 SymbolicMode, 

-

56 TypeMapping, 

-

57 OctalMode, 

-

58 FileSystemExactNonDirMatchRule, 

-

59) 

-

60from debputy.manifest_parser.declarative_parser import DebputyParseHint 

-

61from debputy.manifest_parser.exceptions import ManifestParseException 

-

62from debputy.manifest_parser.mapper_code import type_mapper_str2package 

-

63from debputy.manifest_parser.parser_data import ParserContextData 

-

64from debputy.manifest_parser.util import AttributePath 

-

65from debputy.packages import BinaryPackage 

-

66from debputy.path_matcher import ExactFileSystemPath 

-

67from debputy.plugin.api import ( 

-

68 DebputyPluginInitializer, 

-

69 documented_attr, 

-

70 reference_documentation, 

-

71 VirtualPath, 

-

72 packager_provided_file_reference_documentation, 

-

73) 

-

74from debputy.plugin.api.impl import DebputyPluginInitializerProvider 

-

75from debputy.plugin.api.impl_types import automatic_discard_rule_example, PPFFormatParam 

-

76from debputy.plugin.api.spec import ( 

-

77 type_mapping_reference_documentation, 

-

78 type_mapping_example, 

-

79) 

-

80from debputy.plugin.debputy.binary_package_rules import register_binary_package_rules 

-

81from debputy.plugin.debputy.discard_rules import ( 

-

82 _debputy_discard_pyc_files, 

-

83 _debputy_prune_la_files, 

-

84 _debputy_prune_doxygen_cruft, 

-

85 _debputy_prune_binary_debian_dir, 

-

86 _debputy_prune_info_dir_file, 

-

87 _debputy_prune_backup_files, 

-

88 _debputy_prune_vcs_paths, 

-

89) 

-

90from debputy.plugin.debputy.manifest_root_rules import register_manifest_root_rules 

-

91from debputy.plugin.debputy.package_processors import ( 

-

92 process_manpages, 

-

93 apply_compression, 

-

94 clean_la_files, 

-

95) 

-

96from debputy.plugin.debputy.service_management import ( 

-

97 detect_systemd_service_files, 

-

98 generate_snippets_for_systemd_units, 

-

99 detect_sysv_init_service_files, 

-

100 generate_snippets_for_init_scripts, 

-

101) 

-

102from debputy.plugin.debputy.shlib_metadata_detectors import detect_shlibdeps 

-

103from debputy.plugin.debputy.strip_non_determinism import strip_non_determinism 

-

104from debputy.substitution import VariableContext 

-

105from debputy.transformation_rules import ( 

-

106 CreateSymlinkReplacementRule, 

-

107 TransformationRule, 

-

108 CreateDirectoryTransformationRule, 

-

109 RemoveTransformationRule, 

-

110 MoveTransformationRule, 

-

111 PathMetadataTransformationRule, 

-

112 CreateSymlinkPathTransformationRule, 

-

113) 

-

114from debputy.util import ( 

-

115 _normalize_path, 

-

116 PKGNAME_REGEX, 

-

117 PKGVERSION_REGEX, 

-

118 debian_policy_normalize_symlink_target, 

-

119 active_profiles_match, 

-

120 _error, 

-

121 _warn, 

-

122 _info, 

-

123 assume_not_none, 

-

124) 

-

125 

-

126_DOCUMENTED_DPKG_ARCH_TYPES = { 

-

127 "HOST": ( 

-

128 "installed on", 

-

129 "The package will be **installed** on this type of machine / system", 

-

130 ), 

-

131 "BUILD": ( 

-

132 "compiled on", 

-

133 "The compilation of this package will be performed **on** this kind of machine / system", 

-

134 ), 

-

135 "TARGET": ( 

-

136 "cross-compiler output", 

-

137 "When building a cross-compiler, it will produce output for this kind of machine/system", 

-

138 ), 

-

139} 

-

140 

-

141_DOCUMENTED_DPKG_ARCH_VARS = { 

-

142 "ARCH": "Debian's name for the architecture", 

-

143 "ARCH_ABI": "Debian's name for the architecture ABI", 

-

144 "ARCH_BITS": "Number of bits in the pointer size", 

-

145 "ARCH_CPU": "Debian's name for the CPU type", 

-

146 "ARCH_ENDIAN": "Endianness of the architecture (little/big)", 

-

147 "ARCH_LIBC": "Debian's name for the libc implementation", 

-

148 "ARCH_OS": "Debian name for the OS/kernel", 

-

149 "GNU_CPU": "GNU's name for the CPU", 

-

150 "GNU_SYSTEM": "GNU's name for the system", 

-

151 "GNU_TYPE": "GNU system type (GNU_CPU and GNU_SYSTEM combined)", 

-

152 "MULTIARCH": "Multi-arch tuple", 

-

153} 

-

154 

-

155 

-

156def _manifest_format_doc(anchor: str) -> str: 

-

157 return f"{DEBPUTY_DOC_ROOT_DIR}/MANIFEST-FORMAT.md#{anchor}" 

-

158 

-

159 

-

160@functools.lru_cache 

-

161def load_libcap() -> Tuple[bool, Optional[str], Callable[[str], bool]]: 

-

162 cap_library_path = ctypes.util.find_library("cap.so") 

-

163 has_libcap = False 

-

164 libcap = None 

-

165 if cap_library_path: 165 ↛ 172line 165 didn't jump to line 172, because the condition on line 165 was never false

-

166 try: 

-

167 libcap = ctypes.cdll.LoadLibrary(cap_library_path) 

-

168 has_libcap = True 

-

169 except OSError: 

-

170 pass 

-

171 

-

172 if libcap is None: 172 ↛ 173line 172 didn't jump to line 173, because the condition on line 172 was never true

-

173 warned = False 

-

174 

-

175 def _is_valid_cap(cap: str) -> bool: 

-

176 nonlocal warned 

-

177 if not warned: 

-

178 _info( 

-

179 "Could not load libcap.so; will not validate capabilities. Use `apt install libcap2` to provide" 

-

180 " checking of capabilities." 

-

181 ) 

-

182 warned = True 

-

183 return True 

-

184 

-

185 else: 

-

186 # cap_t cap_from_text(const char *path_p) 

-

187 libcap.cap_from_text.argtypes = [ctypes.c_char_p] 

-

188 libcap.cap_from_text.restype = ctypes.c_char_p 

-

189 

-

190 libcap.cap_free.argtypes = [ctypes.c_void_p] 

-

191 libcap.cap_free.restype = None 

-

192 

-

193 def _is_valid_cap(cap: str) -> bool: 

-

194 cap_t = libcap.cap_from_text(cap.encode("utf-8")) 

-

195 ok = cap_t is not None 

-

196 libcap.cap_free(cap_t) 

-

197 return ok 

-

198 

-

199 return has_libcap, cap_library_path, _is_valid_cap 

-

200 

-

201 

-

202def check_cap_checker() -> Callable[[str, str], None]: 

-

203 _, libcap_path, is_valid_cap = load_libcap() 

-

204 

-

205 seen_cap = set() 

-

206 

-

207 def _check_cap(cap: str, definition_source: str) -> None: 

-

208 if cap not in seen_cap and not is_valid_cap(cap): 

-

209 seen_cap.add(cap) 

-

210 cap_path = f" ({libcap_path})" if libcap_path is not None else "" 

-

211 _warn( 

-

212 f'The capabilities "{cap}" provided in {definition_source} were not understood by' 

-

213 f" libcap.so{cap_path}. Please verify you provided the correct capabilities." 

-

214 f" Note: This warning can be a false-positive if you are targeting a newer libcap.so" 

-

215 f" than the one installed on this system." 

-

216 ) 

-

217 

-

218 return _check_cap 

-

219 

-

220 

-

221def load_source_variables(variable_context: VariableContext) -> Dict[str, str]: 

-

222 try: 

-

223 changelog = variable_context.debian_dir.lookup("changelog") 

-

224 if changelog is None: 

-

225 raise DebputyManifestVariableRequiresDebianDirError( 

-

226 "The changelog was not present" 

-

227 ) 

-

228 with changelog.open() as fd: 

-

229 dch = Changelog(fd, max_blocks=2) 

-

230 except FileNotFoundError as e: 

-

231 raise DebputyManifestVariableRequiresDebianDirError( 

-

232 "The changelog was not present" 

-

233 ) from e 

-

234 first_entry = dch[0] 

-

235 first_non_binnmu_entry = dch[0] 

-

236 if first_non_binnmu_entry.other_pairs.get("binary-only", "no") == "yes": 

-

237 first_non_binnmu_entry = dch[1] 

-

238 assert first_non_binnmu_entry.other_pairs.get("binary-only", "no") == "no" 

-

239 source_version = first_entry.version 

-

240 epoch = source_version.epoch 

-

241 upstream_version = source_version.upstream_version 

-

242 debian_revision = source_version.debian_revision 

-

243 epoch_upstream = upstream_version 

-

244 upstream_debian_revision = upstream_version 

-

245 if epoch is not None and epoch != "": 245 ↛ 247line 245 didn't jump to line 247, because the condition on line 245 was never false

-

246 epoch_upstream = f"{epoch}:{upstream_version}" 

-

247 if debian_revision is not None and debian_revision != "": 247 ↛ 250line 247 didn't jump to line 250, because the condition on line 247 was never false

-

248 upstream_debian_revision = f"{upstream_version}-{debian_revision}" 

-

249 

-

250 package = first_entry.package 

-

251 if package is None: 251 ↛ 252line 251 didn't jump to line 252, because the condition on line 251 was never true

-

252 _error("Cannot determine the source package name from debian/changelog.") 

-

253 

-

254 date = first_entry.date 

-

255 if date is not None: 255 ↛ 259line 255 didn't jump to line 259, because the condition on line 255 was never false

-

256 local_time = datetime.strptime(date, "%a, %d %b %Y %H:%M:%S %z") 

-

257 source_date_epoch = str(int(local_time.timestamp())) 

-

258 else: 

-

259 _warn( 

-

260 "The latest changelog entry does not have a (parsable) date, using current time" 

-

261 " for SOURCE_DATE_EPOCH" 

-

262 ) 

-

263 source_date_epoch = str(int(time.time())) 

-

264 

-

265 if first_non_binnmu_entry is not first_entry: 

-

266 non_binnmu_date = first_non_binnmu_entry.date 

-

267 if non_binnmu_date is not None: 267 ↛ 271line 267 didn't jump to line 271, because the condition on line 267 was never false

-

268 local_time = datetime.strptime(non_binnmu_date, "%a, %d %b %Y %H:%M:%S %z") 

-

269 snd_source_date_epoch = str(int(local_time.timestamp())) 

-

270 else: 

-

271 _warn( 

-

272 "The latest (non-binNMU) changelog entry does not have a (parsable) date, using current time" 

-

273 " for SOURCE_DATE_EPOCH (for strip-nondeterminism)" 

-

274 ) 

-

275 snd_source_date_epoch = source_date_epoch = str(int(time.time())) 

-

276 else: 

-

277 snd_source_date_epoch = source_date_epoch 

-

278 return { 

-

279 "DEB_SOURCE": package, 

-

280 "DEB_VERSION": source_version.full_version, 

-

281 "DEB_VERSION_EPOCH_UPSTREAM": epoch_upstream, 

-

282 "DEB_VERSION_UPSTREAM_REVISION": upstream_debian_revision, 

-

283 "DEB_VERSION_UPSTREAM": upstream_version, 

-

284 "SOURCE_DATE_EPOCH": source_date_epoch, 

-

285 "_DEBPUTY_INTERNAL_NON_BINNMU_SOURCE": str(first_non_binnmu_entry.version), 

-

286 "_DEBPUTY_SND_SOURCE_DATE_EPOCH": snd_source_date_epoch, 

-

287 } 

-

288 

-

289 

-

290def initialize_via_private_api(public_api: DebputyPluginInitializer) -> None: 

-

291 api = cast("DebputyPluginInitializerProvider", public_api) 

-

292 

-

293 api.metadata_or_maintscript_detector( 

-

294 "dpkg-shlibdeps", 

-

295 # Private because detect_shlibdeps expects private API (hench this cast) 

-

296 cast("MetadataAutoDetector", detect_shlibdeps), 

-

297 package_type={"deb", "udeb"}, 

-

298 ) 

-

299 register_type_mappings(api) 

-

300 register_variables_via_private_api(api) 

-

301 document_builtin_variables(api) 

-

302 register_automatic_discard_rules(api) 

-

303 register_special_ppfs(api) 

-

304 register_install_rules(api) 

-

305 register_transformation_rules(api) 

-

306 register_manifest_condition_rules(api) 

-

307 register_dpkg_conffile_rules(api) 

-

308 register_processing_steps(api) 

-

309 register_service_managers(api) 

-

310 register_manifest_root_rules(api) 

-

311 register_binary_package_rules(api) 

-

312 

-

313 

-

314def register_type_mappings(api: DebputyPluginInitializerProvider) -> None: 

-

315 api.register_mapped_type( 

-

316 TypeMapping( 

-

317 FileSystemMatchRule, 

-

318 str, 

-

319 FileSystemMatchRule.parse_path_match, 

-

320 ), 

-

321 reference_documentation=type_mapping_reference_documentation( 

-

322 description=textwrap.dedent( 

-

323 """\ 

-

324 A generic file system path match with globs. 

-

325 

-

326 Manifest variable substitution will be applied and glob expansion will be performed. 

-

327 

-

328 The match will be read as one of the following cases: 

-

329 

-

330 - Exact path match if there is no globs characters like `usr/bin/debputy` 

-

331 - A basename glob like `*.txt` or `**/foo` 

-

332 - A generic path glob otherwise like `usr/lib/*.so*` 

-

333 

-

334 Except for basename globs, all matches are always relative to the root directory of 

-

335 the match, which is typically the package root directory or a search directory. 

-

336 

-

337 For basename globs, any path matching that basename beneath the package root directory 

-

338 or relevant search directories will match. 

-

339 

-

340 Please keep in mind that: 

-

341 

-

342 * glob patterns often have to be quoted as YAML interpret the glob metacharacter as 

-

343 an anchor reference. 

-

344 

-

345 * Directories can be matched via this type. Whether the rule using this type 

-

346 recurse into the directory depends on the usage and not this type. Related, if 

-

347 value for this rule ends with a literal "/", then the definition can *only* match 

-

348 directories (similar to the shell). 

-

349 

-

350 * path matches involving glob expansion are often subject to different rules than 

-

351 path matches without them. As an example, automatic discard rules does not apply 

-

352 to exact path matches, but they will filter out glob matches. 

-

353 """, 

-

354 ), 

-

355 examples=[ 

-

356 type_mapping_example("usr/bin/debputy"), 

-

357 type_mapping_example("*.txt"), 

-

358 type_mapping_example("**/foo"), 

-

359 type_mapping_example("usr/lib/*.so*"), 

-

360 type_mapping_example("usr/share/foo/data-*/"), 

-

361 ], 

-

362 ), 

-

363 ) 

-

364 

-

365 api.register_mapped_type( 

-

366 TypeMapping( 

-

367 FileSystemExactMatchRule, 

-

368 str, 

-

369 FileSystemExactMatchRule.parse_path_match, 

-

370 ), 

-

371 reference_documentation=type_mapping_reference_documentation( 

-

372 description=textwrap.dedent( 

-

373 """\ 

-

374 A file system match that does **not** expand globs. 

-

375 

-

376 Manifest variable substitution will be applied. However, globs will not be expanded. 

-

377 Any glob metacharacters will be interpreted as a literal part of path. 

-

378 

-

379 Note that a directory can be matched via this type. Whether the rule using this type 

-

380 recurse into the directory depends on the usage and is not defined by this type. 

-

381 Related, if value for this rule ends with a literal "/", then the definition can 

-

382 *only* match directories (similar to the shell). 

-

383 """, 

-

384 ), 

-

385 examples=[ 

-

386 type_mapping_example("usr/bin/dpkg"), 

-

387 type_mapping_example("usr/share/foo/"), 

-

388 type_mapping_example("usr/share/foo/data.txt"), 

-

389 ], 

-

390 ), 

-

391 ) 

-

392 

-

393 api.register_mapped_type( 

-

394 TypeMapping( 

-

395 FileSystemExactNonDirMatchRule, 

-

396 str, 

-

397 FileSystemExactNonDirMatchRule.parse_path_match, 

-

398 ), 

-

399 reference_documentation=type_mapping_reference_documentation( 

-

400 description=textwrap.dedent( 

-

401 f"""\ 

-

402 A file system match that does **not** expand globs and must not match a directory. 

-

403 

-

404 Manifest variable substitution will be applied. However, globs will not be expanded. 

-

405 Any glob metacharacters will be interpreted as a literal part of path. 

-

406 

-

407 This is like {FileSystemExactMatchRule.__name__} except that the match will fail if the 

-

408 provided path matches a directory. Since a directory cannot be matched, it is an error 

-

409 for any input to end with a "/" as only directories can be matched if the path ends 

-

410 with a "/". 

-

411 """, 

-

412 ), 

-

413 examples=[ 

-

414 type_mapping_example("usr/bin/dh_debputy"), 

-

415 type_mapping_example("usr/share/foo/data.txt"), 

-

416 ], 

-

417 ), 

-

418 ) 

-

419 

-

420 api.register_mapped_type( 

-

421 TypeMapping( 

-

422 SymlinkTarget, 

-

423 str, 

-

424 lambda v, ap, pc: SymlinkTarget.parse_symlink_target( 

-

425 v, ap, assume_not_none(pc).substitution 

-

426 ), 

-

427 ), 

-

428 reference_documentation=type_mapping_reference_documentation( 

-

429 description=textwrap.dedent( 

-

430 """\ 

-

431 A symlink target. 

-

432 

-

433 Manifest variable substitution will be applied. This is distinct from an exact file 

-

434 system match in that a symlink target is not relative to the package root by default 

-

435 (explicitly prefix for "/" for absolute path targets) 

-

436 

-

437 Note that `debputy` will policy normalize symlinks when assembling the deb, so 

-

438 use of relative or absolute symlinks comes down to preference. 

-

439 """, 

-

440 ), 

-

441 examples=[ 

-

442 type_mapping_example("../foo"), 

-

443 type_mapping_example("/usr/share/doc/bar"), 

-

444 ], 

-

445 ), 

-

446 ) 

-

447 

-

448 api.register_mapped_type( 

-

449 TypeMapping( 

-

450 StaticFileSystemOwner, 

-

451 Union[int, str], 

-

452 lambda v, ap, _: StaticFileSystemOwner.from_manifest_value(v, ap), 

-

453 ), 

-

454 reference_documentation=type_mapping_reference_documentation( 

-

455 description=textwrap.dedent( 

-

456 """\ 

-

457 File system owner reference that is part of the passwd base data (such as "root"). 

-

458 

-

459 The group can be provided in either of the following three forms: 

-

460 

-

461 * A name (recommended), such as "root" 

-

462 * The UID in the form of an integer (that is, no quoting), such as 0 (for "root") 

-

463 * The name and the UID separated by colon such as "root:0" (for "root"). 

-

464 

-

465 Note in the last case, the `debputy` will validate that the name and the UID match. 

-

466 

-

467 Some owners (such as "nobody") are deliberately disallowed. 

-

468 """ 

-

469 ), 

-

470 examples=[ 

-

471 type_mapping_example("root"), 

-

472 type_mapping_example(0), 

-

473 type_mapping_example("root:0"), 

-

474 type_mapping_example("bin"), 

-

475 ], 

-

476 ), 

-

477 ) 

-

478 api.register_mapped_type( 

-

479 TypeMapping( 

-

480 StaticFileSystemGroup, 

-

481 Union[int, str], 

-

482 lambda v, ap, _: StaticFileSystemGroup.from_manifest_value(v, ap), 

-

483 ), 

-

484 reference_documentation=type_mapping_reference_documentation( 

-

485 description=textwrap.dedent( 

-

486 """\ 

-

487 File system group reference that is part of the passwd base data (such as "root"). 

-

488 

-

489 The group can be provided in either of the following three forms: 

-

490 

-

491 * A name (recommended), such as "root" 

-

492 * The GID in the form of an integer (that is, no quoting), such as 0 (for "root") 

-

493 * The name and the GID separated by colon such as "root:0" (for "root"). 

-

494 

-

495 Note in the last case, the `debputy` will validate that the name and the GID match. 

-

496 

-

497 Some owners (such as "nobody") are deliberately disallowed. 

-

498 """ 

-

499 ), 

-

500 examples=[ 

-

501 type_mapping_example("root"), 

-

502 type_mapping_example(0), 

-

503 type_mapping_example("root:0"), 

-

504 type_mapping_example("tty"), 

-

505 ], 

-

506 ), 

-

507 ) 

-

508 

-

509 api.register_mapped_type( 

-

510 TypeMapping( 

-

511 BinaryPackage, 

-

512 str, 

-

513 type_mapper_str2package, 

-

514 ), 

-

515 reference_documentation=type_mapping_reference_documentation( 

-

516 description="Name of a package in debian/control", 

-

517 ), 

-

518 ) 

-

519 

-

520 api.register_mapped_type( 

-

521 TypeMapping( 

-

522 FileSystemMode, 

-

523 str, 

-

524 lambda v, ap, _: FileSystemMode.parse_filesystem_mode(v, ap), 

-

525 ), 

-

526 reference_documentation=type_mapping_reference_documentation( 

-

527 description="Either an octal mode or symbolic mode", 

-

528 examples=[ 

-

529 type_mapping_example("a+x"), 

-

530 type_mapping_example("u=rwX,go=rX"), 

-

531 type_mapping_example("0755"), 

-

532 ], 

-

533 ), 

-

534 ) 

-

535 api.register_mapped_type( 535 ↛ exitline 535 didn't jump to the function exit

-

536 TypeMapping( 

-

537 OctalMode, 

-

538 str, 

-

539 lambda v, ap, _: OctalMode.parse_filesystem_mode(v, ap), 

-

540 ), 

-

541 reference_documentation=type_mapping_reference_documentation( 

-

542 description="An octal mode. Must always be a string.", 

-

543 examples=[ 

-

544 type_mapping_example("0644"), 

-

545 type_mapping_example("0755"), 

-

546 ], 

-

547 ), 

-

548 ) 

-

549 

-

550 

-

551def register_service_managers( 

-

552 api: DebputyPluginInitializerProvider, 

-

553) -> None: 

-

554 api.service_provider( 

-

555 "systemd", 

-

556 detect_systemd_service_files, 

-

557 generate_snippets_for_systemd_units, 

-

558 ) 

-

559 api.service_provider( 

-

560 "sysvinit", 

-

561 detect_sysv_init_service_files, 

-

562 generate_snippets_for_init_scripts, 

-

563 ) 

-

564 

-

565 

-

566def register_automatic_discard_rules( 

-

567 api: DebputyPluginInitializerProvider, 

-

568) -> None: 

-

569 api.automatic_discard_rule( 

-

570 "python-cache-files", 

-

571 _debputy_discard_pyc_files, 

-

572 rule_reference_documentation="Discards any *.pyc, *.pyo files and any __pycache__ directories", 

-

573 examples=automatic_discard_rule_example( 

-

574 (".../foo.py", False), 

-

575 ".../__pycache__/", 

-

576 ".../__pycache__/...", 

-

577 ".../foo.pyc", 

-

578 ".../foo.pyo", 

-

579 ), 

-

580 ) 

-

581 api.automatic_discard_rule( 

-

582 "la-files", 

-

583 _debputy_prune_la_files, 

-

584 rule_reference_documentation="Discards any file with the extension .la beneath the directory /usr/lib", 

-

585 examples=automatic_discard_rule_example( 

-

586 "usr/lib/libfoo.la", 

-

587 ("usr/lib/libfoo.so.1.0.0", False), 

-

588 ), 

-

589 ) 

-

590 api.automatic_discard_rule( 

-

591 "backup-files", 

-

592 _debputy_prune_backup_files, 

-

593 rule_reference_documentation="Discards common back up files such as foo~, foo.bak or foo.orig", 

-

594 examples=( 

-

595 automatic_discard_rule_example( 

-

596 ".../foo~", 

-

597 ".../foo.orig", 

-

598 ".../foo.rej", 

-

599 ".../DEADJOE", 

-

600 ".../.foo.sw.", 

-

601 ), 

-

602 ), 

-

603 ) 

-

604 api.automatic_discard_rule( 

-

605 "version-control-paths", 

-

606 _debputy_prune_vcs_paths, 

-

607 rule_reference_documentation="Discards common version control paths such as .git, .gitignore, CVS, etc.", 

-

608 examples=automatic_discard_rule_example( 

-

609 ("tools/foo", False), 

-

610 ".../CVS/", 

-

611 ".../CVS/...", 

-

612 ".../.gitignore", 

-

613 ".../.gitattributes", 

-

614 ".../.git/", 

-

615 ".../.git/...", 

-

616 ), 

-

617 ) 

-

618 api.automatic_discard_rule( 

-

619 "gnu-info-dir-file", 

-

620 _debputy_prune_info_dir_file, 

-

621 rule_reference_documentation="Discards the /usr/share/info/dir file (causes package file conflicts)", 

-

622 examples=automatic_discard_rule_example( 

-

623 "usr/share/info/dir", 

-

624 ("usr/share/info/foo.info", False), 

-

625 ("usr/share/info/dir.info", False), 

-

626 ("usr/share/random/case/dir", False), 

-

627 ), 

-

628 ) 

-

629 api.automatic_discard_rule( 

-

630 "debian-dir", 

-

631 _debputy_prune_binary_debian_dir, 

-

632 rule_reference_documentation="(Implementation detail) Discards any DEBIAN directory to avoid it from appearing" 

-

633 " literally in the file listing", 

-

634 examples=( 

-

635 automatic_discard_rule_example( 

-

636 "DEBIAN/", 

-

637 "DEBIAN/control", 

-

638 ("usr/bin/foo", False), 

-

639 ("usr/share/DEBIAN/foo", False), 

-

640 ), 

-

641 ), 

-

642 ) 

-

643 api.automatic_discard_rule( 

-

644 "doxygen-cruft-files", 

-

645 _debputy_prune_doxygen_cruft, 

-

646 rule_reference_documentation="Discards cruft files generated by doxygen", 

-

647 examples=automatic_discard_rule_example( 

-

648 ("usr/share/doc/foo/api/doxygen.css", False), 

-

649 ("usr/share/doc/foo/api/doxygen.svg", False), 

-

650 ("usr/share/doc/foo/api/index.html", False), 

-

651 "usr/share/doc/foo/api/.../cruft.map", 

-

652 "usr/share/doc/foo/api/.../cruft.md5", 

-

653 ), 

-

654 ) 

-

655 

-

656 

-

657def register_processing_steps(api: DebputyPluginInitializerProvider) -> None: 

-

658 api.package_processor("manpages", process_manpages) 

-

659 api.package_processor("clean-la-files", clean_la_files) 

-

660 # strip-non-determinism makes assumptions about the PackageProcessingContext implementation 

-

661 api.package_processor( 

-

662 "strip-nondeterminism", 

-

663 cast("Any", strip_non_determinism), 

-

664 depends_on_processor=["manpages"], 

-

665 ) 

-

666 api.package_processor( 

-

667 "compression", 

-

668 apply_compression, 

-

669 depends_on_processor=["manpages", "strip-nondeterminism"], 

-

670 ) 

-

671 

-

672 

-

673def register_variables_via_private_api(api: DebputyPluginInitializerProvider) -> None: 

-

674 api.manifest_variable_provider( 

-

675 load_source_variables, 

-

676 { 

-

677 "DEB_SOURCE": "Name of the source package (`dpkg-parsechangelog -SSource`)", 

-

678 "DEB_VERSION": "Version from the top most changelog entry (`dpkg-parsechangelog -SVersion`)", 

-

679 "DEB_VERSION_EPOCH_UPSTREAM": "Version from the top most changelog entry *without* the Debian revision", 

-

680 "DEB_VERSION_UPSTREAM_REVISION": "Version from the top most changelog entry *without* the epoch", 

-

681 "DEB_VERSION_UPSTREAM": "Upstream version from the top most changelog entry (that is, *without* epoch and Debian revision)", 

-

682 "SOURCE_DATE_EPOCH": textwrap.dedent( 

-

683 """\ 

-

684 Timestamp from the top most changelog entry (`dpkg-parsechangelog -STimestamp`) 

-

685 Please see <https://reproducible-builds.org/docs/source-date-epoch/> for the full definition of 

-

686 this variable. 

-

687 """ 

-

688 ), 

-

689 "_DEBPUTY_INTERNAL_NON_BINNMU_SOURCE": None, 

-

690 "_DEBPUTY_SND_SOURCE_DATE_EPOCH": None, 

-

691 }, 

-

692 ) 

-

693 

-

694 

-

695def document_builtin_variables(api: DebputyPluginInitializerProvider) -> None: 

-

696 api.document_builtin_variable( 

-

697 "PACKAGE", 

-

698 "Name of the binary package (only available in binary context)", 

-

699 is_context_specific=True, 

-

700 ) 

-

701 

-

702 arch_types = _DOCUMENTED_DPKG_ARCH_TYPES 

-

703 

-

704 for arch_type, (arch_type_tag, arch_type_doc) in arch_types.items(): 

-

705 for arch_var, arch_var_doc in _DOCUMENTED_DPKG_ARCH_VARS.items(): 

-

706 full_var = f"DEB_{arch_type}_{arch_var}" 

-

707 documentation = textwrap.dedent( 

-

708 f"""\ 

-

709 {arch_var_doc} ({arch_type_tag}) 

-

710 This variable describes machine information used when the package is compiled and assembled. 

-

711 * Machine type: {arch_type_doc} 

-

712 * Value description: {arch_var_doc} 

-

713 

-

714 The value is the output of: `dpkg-architecture -q{full_var}` 

-

715 """ 

-

716 ) 

-

717 api.document_builtin_variable( 

-

718 full_var, 

-

719 documentation, 

-

720 is_for_special_case=arch_type != "HOST", 

-

721 ) 

-

722 

-

723 

-

724def _format_docbase_filename( 

-

725 path_format: str, 

-

726 format_param: PPFFormatParam, 

-

727 docbase_file: VirtualPath, 

-

728) -> str: 

-

729 with docbase_file.open() as fd: 

-

730 content = Deb822(fd) 

-

731 proper_name = content["Document"] 

-

732 if proper_name is not None: 732 ↛ 735line 732 didn't jump to line 735, because the condition on line 732 was never false

-

733 format_param["name"] = proper_name 

-

734 else: 

-

735 _warn( 

-

736 f"The docbase file {docbase_file.fs_path} is missing the Document field" 

-

737 ) 

-

738 return path_format.format(**format_param) 

-

739 

-

740 

-

741def register_special_ppfs(api: DebputyPluginInitializerProvider) -> None: 

-

742 api.packager_provided_file( 

-

743 "doc-base", 

-

744 "/usr/share/doc-base/{owning_package}.{name}", 

-

745 format_callback=_format_docbase_filename, 

-

746 ) 

-

747 

-

748 api.packager_provided_file( 

-

749 "shlibs", 

-

750 "DEBIAN/shlibs", 

-

751 allow_name_segment=False, 

-

752 reservation_only=True, 

-

753 reference_documentation=packager_provided_file_reference_documentation( 

-

754 format_documentation_uris=["man:deb-shlibs(5)"], 

-

755 ), 

-

756 ) 

-

757 api.packager_provided_file( 

-

758 "symbols", 

-

759 "DEBIAN/symbols", 

-

760 allow_name_segment=False, 

-

761 allow_architecture_segment=True, 

-

762 reservation_only=True, 

-

763 reference_documentation=packager_provided_file_reference_documentation( 

-

764 format_documentation_uris=["man:deb-symbols(5)"], 

-

765 ), 

-

766 ) 

-

767 api.packager_provided_file( 

-

768 "templates", 

-

769 "DEBIAN/templates", 

-

770 allow_name_segment=False, 

-

771 allow_architecture_segment=False, 

-

772 reservation_only=True, 

-

773 ) 

-

774 api.packager_provided_file( 

-

775 "alternatives", 

-

776 "DEBIAN/alternatives", 

-

777 allow_name_segment=False, 

-

778 allow_architecture_segment=True, 

-

779 reservation_only=True, 

-

780 ) 

-

781 

-

782 

-

783def register_install_rules(api: DebputyPluginInitializerProvider) -> None: 

-

784 api.pluggable_manifest_rule( 

-

785 InstallRule, 

-

786 MK_INSTALLATIONS_INSTALL, 

-

787 ParsedInstallRule, 

-

788 _install_rule_handler, 

-

789 source_format=_with_alt_form(ParsedInstallRuleSourceFormat), 

-

790 inline_reference_documentation=reference_documentation( 

-

791 title="Generic install (`install`)", 

-

792 description=textwrap.dedent( 

-

793 """\ 

-

794 The generic `install` rule can be used to install arbitrary paths into packages 

-

795 and is *similar* to how `dh_install` from debhelper works. It is a two "primary" uses. 

-

796 

-

797 1) The classic "install into directory" similar to the standard `dh_install` 

-

798 2) The "install as" similar to `dh-exec`'s `foo => bar` feature. 

-

799 

-

800 The `install` rule installs a path exactly once into each package it acts on. In 

-

801 the rare case that you want to install the same source *multiple* times into the 

-

802 *same* packages, please have a look at `{MULTI_DEST_INSTALL}`. 

-

803 """.format( 

-

804 MULTI_DEST_INSTALL=MK_INSTALLATIONS_MULTI_DEST_INSTALL 

-

805 ) 

-

806 ), 

-

807 non_mapping_description=textwrap.dedent( 

-

808 """\ 

-

809 When the input is a string or a list of string, then that value is used as shorthand 

-

810 for `source` or `sources` (respectively). This form can only be used when `into` is 

-

811 not required. 

-

812 """ 

-

813 ), 

-

814 attributes=[ 

-

815 documented_attr( 

-

816 ["source", "sources"], 

-

817 textwrap.dedent( 

-

818 """\ 

-

819 A path match (`source`) or a list of path matches (`sources`) defining the 

-

820 source path(s) to be installed. The path match(es) can use globs. Each match 

-

821 is tried against default search directories. 

-

822 - When a symlink is matched, then the symlink (not its target) is installed 

-

823 as-is. When a directory is matched, then the directory is installed along 

-

824 with all the contents that have not already been installed somewhere. 

-

825 """ 

-

826 ), 

-

827 ), 

-

828 documented_attr( 

-

829 "dest_dir", 

-

830 textwrap.dedent( 

-

831 """\ 

-

832 A path defining the destination *directory*. The value *cannot* use globs, but can 

-

833 use substitution. If neither `as` nor `dest-dir` is given, then `dest-dir` defaults 

-

834 to the directory name of the `source`. 

-

835 """ 

-

836 ), 

-

837 ), 

-

838 documented_attr( 

-

839 "into", 

-

840 textwrap.dedent( 

-

841 """\ 

-

842 Either a package name or a list of package names for which these paths should be 

-

843 installed. This key is conditional on whether there are multiple binary packages listed 

-

844 in `debian/control`. When there is only one binary package, then that binary is the 

-

845 default for `into`. Otherwise, the key is required. 

-

846 """ 

-

847 ), 

-

848 ), 

-

849 documented_attr( 

-

850 "install_as", 

-

851 textwrap.dedent( 

-

852 """\ 

-

853 A path defining the path to install the source as. This is a full path. This option 

-

854 is mutually exclusive with `dest-dir` and `sources` (but not `source`). When `as` is 

-

855 given, then `source` must match exactly one "not yet matched" path. 

-

856 """ 

-

857 ), 

-

858 ), 

-

859 documented_attr( 

-

860 "when", 

-

861 textwrap.dedent( 

-

862 """\ 

-

863 A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules). 

-

864 """ 

-

865 ), 

-

866 ), 

-

867 ], 

-

868 reference_documentation_url=_manifest_format_doc("generic-install-install"), 

-

869 ), 

-

870 ) 

-

871 api.pluggable_manifest_rule( 

-

872 InstallRule, 

-

873 [ 

-

874 MK_INSTALLATIONS_INSTALL_DOCS, 

-

875 "install-doc", 

-

876 ], 

-

877 ParsedInstallRule, 

-

878 _install_docs_rule_handler, 

-

879 source_format=_with_alt_form(ParsedInstallDocRuleSourceFormat), 

-

880 inline_reference_documentation=reference_documentation( 

-

881 title="Install documentation (`install-docs`)", 

-

882 description=textwrap.dedent( 

-

883 """\ 

-

884 This install rule resemble that of `dh_installdocs`. It is a shorthand over the generic 

-

885 `install` rule with the following key features: 

-

886 

-

887 1) The default `dest-dir` is to use the package's documentation directory (usually something 

-

888 like `/usr/share/doc/{{PACKAGE}}`, though it respects the "main documentation package" 

-

889 recommendation from Debian Policy). The `dest-dir` or `as` can be set in case the 

-

890 documentation in question goes into another directory or with a concrete path. In this 

-

891 case, it is still "better" than `install` due to the remaining benefits. 

-

892 2) The rule comes with pre-defined conditional logic for skipping the rule under 

-

893 `DEB_BUILD_OPTIONS=nodoc`, so you do not have to write that conditional yourself. 

-

894 3) The `into` parameter can be omitted as long as there is a exactly one non-`udeb` 

-

895 package listed in `debian/control`. 

-

896 

-

897 With these two things in mind, it behaves just like the `install` rule. 

-

898 

-

899 Note: It is often worth considering to use a more specialized version of the `install-docs` 

-

900 rule when one such is available. If you are looking to install an example or a man page, 

-

901 consider whether `install-examples` or `install-man` might be a better fit for your 

-

902 use-case. 

-

903 """ 

-

904 ), 

-

905 non_mapping_description=textwrap.dedent( 

-

906 """\ 

-

907 When the input is a string or a list of string, then that value is used as shorthand 

-

908 for `source` or `sources` (respectively). This form can only be used when `into` is 

-

909 not required. 

-

910 """ 

-

911 ), 

-

912 attributes=[ 

-

913 documented_attr( 

-

914 ["source", "sources"], 

-

915 textwrap.dedent( 

-

916 """\ 

-

917 A path match (`source`) or a list of path matches (`sources`) defining the 

-

918 source path(s) to be installed. The path match(es) can use globs. Each match 

-

919 is tried against default search directories. 

-

920 - When a symlink is matched, then the symlink (not its target) is installed 

-

921 as-is. When a directory is matched, then the directory is installed along 

-

922 with all the contents that have not already been installed somewhere. 

-

923 

-

924 - **CAVEAT**: Specifying `source: examples` where `examples` resolves to a 

-

925 directory for `install-examples` will give you an `examples/examples` 

-

926 directory in the package, which is rarely what you want. Often, you 

-

927 can solve this by using `examples/*` instead. Similar for `install-docs` 

-

928 and a `doc` or `docs` directory. 

-

929 """ 

-

930 ), 

-

931 ), 

-

932 documented_attr( 

-

933 "dest_dir", 

-

934 textwrap.dedent( 

-

935 """\ 

-

936 A path defining the destination *directory*. The value *cannot* use globs, but can 

-

937 use substitution. If neither `as` nor `dest-dir` is given, then `dest-dir` defaults 

-

938 to the relevant package documentation directory (a la `/usr/share/doc/{{PACKAGE}}`). 

-

939 """ 

-

940 ), 

-

941 ), 

-

942 documented_attr( 

-

943 "into", 

-

944 textwrap.dedent( 

-

945 """\ 

-

946 Either a package name or a list of package names for which these paths should be 

-

947 installed as documentation. This key is conditional on whether there are multiple 

-

948 (non-`udeb`) binary packages listed in `debian/control`. When there is only one 

-

949 (non-`udeb`) binary package, then that binary is the default for `into`. Otherwise, 

-

950 the key is required. 

-

951 """ 

-

952 ), 

-

953 ), 

-

954 documented_attr( 

-

955 "install_as", 

-

956 textwrap.dedent( 

-

957 """\ 

-

958 A path defining the path to install the source as. This is a full path. This option 

-

959 is mutually exclusive with `dest-dir` and `sources` (but not `source`). When `as` is 

-

960 given, then `source` must match exactly one "not yet matched" path. 

-

961 """ 

-

962 ), 

-

963 ), 

-

964 documented_attr( 

-

965 "when", 

-

966 textwrap.dedent( 

-

967 """\ 

-

968 A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules). 

-

969 This condition will be combined with the built-in condition provided by these rules 

-

970 (rather than replacing it). 

-

971 """ 

-

972 ), 

-

973 ), 

-

974 ], 

-

975 reference_documentation_url=_manifest_format_doc( 

-

976 "install-documentation-install-docs" 

-

977 ), 

-

978 ), 

-

979 ) 

-

980 api.pluggable_manifest_rule( 

-

981 InstallRule, 

-

982 [ 

-

983 MK_INSTALLATIONS_INSTALL_EXAMPLES, 

-

984 "install-example", 

-

985 ], 

-

986 ParsedInstallExamplesRule, 

-

987 _install_examples_rule_handler, 

-

988 source_format=_with_alt_form(ParsedInstallExamplesRuleSourceFormat), 

-

989 inline_reference_documentation=reference_documentation( 

-

990 title="Install examples (`install-examples`)", 

-

991 description=textwrap.dedent( 

-

992 """\ 

-

993 This install rule resemble that of `dh_installexamples`. It is a shorthand over the generic ` 

-

994 install` rule with the following key features: 

-

995 

-

996 1) It pre-defines the `dest-dir` that respects the "main documentation package" recommendation from 

-

997 Debian Policy. The `install-examples` will use the `examples` subdir for the package documentation 

-

998 dir. 

-

999 2) The rule comes with pre-defined conditional logic for skipping the rule under 

-

1000 `DEB_BUILD_OPTIONS=nodoc`, so you do not have to write that conditional yourself. 

-

1001 3) The `into` parameter can be omitted as long as there is a exactly one non-`udeb` 

-

1002 package listed in `debian/control`. 

-

1003 

-

1004 With these two things in mind, it behaves just like the `install` rule. 

-

1005 """ 

-

1006 ), 

-

1007 non_mapping_description=textwrap.dedent( 

-

1008 """\ 

-

1009 When the input is a string or a list of string, then that value is used as shorthand 

-

1010 for `source` or `sources` (respectively). This form can only be used when `into` is 

-

1011 not required. 

-

1012 """ 

-

1013 ), 

-

1014 attributes=[ 

-

1015 documented_attr( 

-

1016 ["source", "sources"], 

-

1017 textwrap.dedent( 

-

1018 """\ 

-

1019 A path match (`source`) or a list of path matches (`sources`) defining the 

-

1020 source path(s) to be installed. The path match(es) can use globs. Each match 

-

1021 is tried against default search directories. 

-

1022 - When a symlink is matched, then the symlink (not its target) is installed 

-

1023 as-is. When a directory is matched, then the directory is installed along 

-

1024 with all the contents that have not already been installed somewhere. 

-

1025 

-

1026 - **CAVEAT**: Specifying `source: examples` where `examples` resolves to a 

-

1027 directory for `install-examples` will give you an `examples/examples` 

-

1028 directory in the package, which is rarely what you want. Often, you 

-

1029 can solve this by using `examples/*` instead. Similar for `install-docs` 

-

1030 and a `doc` or `docs` directory. 

-

1031 """ 

-

1032 ), 

-

1033 ), 

-

1034 documented_attr( 

-

1035 "into", 

-

1036 textwrap.dedent( 

-

1037 """\ 

-

1038 Either a package name or a list of package names for which these paths should be 

-

1039 installed as examples. This key is conditional on whether there are (non-`udeb`) 

-

1040 multiple binary packages listed in `debian/control`. When there is only one 

-

1041 (non-`udeb`) binary package, then that binary is the default for `into`. 

-

1042 Otherwise, the key is required. 

-

1043 """ 

-

1044 ), 

-

1045 ), 

-

1046 documented_attr( 

-

1047 "when", 

-

1048 textwrap.dedent( 

-

1049 """\ 

-

1050 A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules). 

-

1051 This condition will be combined with the built-in condition provided by these rules 

-

1052 (rather than replacing it). 

-

1053 """ 

-

1054 ), 

-

1055 ), 

-

1056 ], 

-

1057 reference_documentation_url=_manifest_format_doc( 

-

1058 "install-examples-install-examples" 

-

1059 ), 

-

1060 ), 

-

1061 ) 

-

1062 api.pluggable_manifest_rule( 

-

1063 InstallRule, 

-

1064 MK_INSTALLATIONS_INSTALL_MAN, 

-

1065 ParsedInstallManpageRule, 

-

1066 _install_man_rule_handler, 

-

1067 source_format=_with_alt_form(ParsedInstallManpageRuleSourceFormat), 

-

1068 inline_reference_documentation=reference_documentation( 

-

1069 title="Install man pages (`install-man`)", 

-

1070 description=textwrap.dedent( 

-

1071 """\ 

-

1072 Install rule for installing man pages similar to `dh_installman`. It is a shorthand 

-

1073 over the generic `install` rule with the following key features: 

-

1074 

-

1075 1) The rule can only match files (notably, symlinks cannot be matched by this rule). 

-

1076 2) The `dest-dir` is computed per source file based on the man page's section and 

-

1077 language. 

-

1078 3) The `into` parameter can be omitted as long as there is a exactly one non-`udeb` 

-

1079 package listed in `debian/control`. 

-

1080 4) The rule comes with man page specific attributes such as `language` and `section` 

-

1081 for when the auto-detection is insufficient. 

-

1082 5) The rule comes with pre-defined conditional logic for skipping the rule under 

-

1083 `DEB_BUILD_OPTIONS=nodoc`, so you do not have to write that conditional yourself. 

-

1084 

-

1085 With these things in mind, the rule behaves similar to the `install` rule. 

-

1086 """ 

-

1087 ), 

-

1088 non_mapping_description=textwrap.dedent( 

-

1089 """\ 

-

1090 When the input is a string or a list of string, then that value is used as shorthand 

-

1091 for `source` or `sources` (respectively). This form can only be used when `into` is 

-

1092 not required. 

-

1093 """ 

-

1094 ), 

-

1095 attributes=[ 

-

1096 documented_attr( 

-

1097 ["source", "sources"], 

-

1098 textwrap.dedent( 

-

1099 """\ 

-

1100 A path match (`source`) or a list of path matches (`sources`) defining the 

-

1101 source path(s) to be installed. The path match(es) can use globs. Each match 

-

1102 is tried against default search directories. 

-

1103 - When a symlink is matched, then the symlink (not its target) is installed 

-

1104 as-is. When a directory is matched, then the directory is installed along 

-

1105 with all the contents that have not already been installed somewhere. 

-

1106 """ 

-

1107 ), 

-

1108 ), 

-

1109 documented_attr( 

-

1110 "into", 

-

1111 textwrap.dedent( 

-

1112 """\ 

-

1113 Either a package name or a list of package names for which these paths should be 

-

1114 installed as man pages. This key is conditional on whether there are multiple (non-`udeb`) 

-

1115 binary packages listed in `debian/control`. When there is only one (non-`udeb`) binary 

-

1116 package, then that binary is the default for `into`. Otherwise, the key is required. 

-

1117 """ 

-

1118 ), 

-

1119 ), 

-

1120 documented_attr( 

-

1121 "section", 

-

1122 textwrap.dedent( 

-

1123 """\ 

-

1124 If provided, it must be an integer between 1 and 9 (both inclusive), defining the 

-

1125 section the man pages belong overriding any auto-detection that `debputy` would 

-

1126 have performed. 

-

1127 """ 

-

1128 ), 

-

1129 ), 

-

1130 documented_attr( 

-

1131 "language", 

-

1132 textwrap.dedent( 

-

1133 """\ 

-

1134 If provided, it must be either a 2 letter language code (such as `de`), a 5 letter 

-

1135 language + dialect code (such as `pt_BR`), or one of the special keywords `C`, 

-

1136 `derive-from-path`, or `derive-from-basename`. The default is `derive-from-path`. 

-

1137 - When `language` is `C`, then the man pages are assumed to be "untranslated". 

-

1138 - When `language` is a language code (with or without dialect), then all man pages 

-

1139 matched will be assumed to be translated to that concrete language / dialect. 

-

1140 - When `language` is `derive-from-path`, then `debputy` attempts to derive the 

-

1141 language from the path (`man/<language>/man<section>`). This matches the 

-

1142 default of `dh_installman`. When no language can be found for a given source, 

-

1143 `debputy` behaves like language was `C`. 

-

1144 - When `language` is `derive-from-basename`, then `debputy` attempts to derive 

-

1145 the language from the basename (`foo.<language>.1`) similar to `dh_installman` 

-

1146 previous default. When no language can be found for a given source, `debputy` 

-

1147 behaves like language was `C`. Note this is prone to false positives where 

-

1148 `.pl`, `.so` or similar two-letter extensions gets mistaken for a language code 

-

1149 (`.pl` can both be "Polish" or "Perl Script", `.so` can both be "Somali" and 

-

1150 "Shared Object" documentation). In this configuration, such extensions are 

-

1151 always assumed to be a language. 

-

1152 """ 

-

1153 ), 

-

1154 ), 

-

1155 documented_attr( 

-

1156 "when", 

-

1157 textwrap.dedent( 

-

1158 """\ 

-

1159 A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules). 

-

1160 """ 

-

1161 ), 

-

1162 ), 

-

1163 ], 

-

1164 reference_documentation_url=_manifest_format_doc( 

-

1165 "install-manpages-install-man" 

-

1166 ), 

-

1167 ), 

-

1168 ) 

-

1169 api.pluggable_manifest_rule( 

-

1170 InstallRule, 

-

1171 MK_INSTALLATIONS_DISCARD, 

-

1172 ParsedInstallDiscardRule, 

-

1173 _install_discard_rule_handler, 

-

1174 source_format=_with_alt_form(ParsedInstallDiscardRuleSourceFormat), 

-

1175 inline_reference_documentation=reference_documentation( 

-

1176 title="Discard (or exclude) upstream provided paths (`discard`)", 

-

1177 description=textwrap.dedent( 

-

1178 """\ 

-

1179 When installing paths from `debian/tmp` into packages, it might be useful to ignore 

-

1180 some paths that you never need installed. This can be done with the `discard` rule. 

-

1181 

-

1182 Once a path is discarded, it cannot be matched by any other install rules. A path 

-

1183 that is discarded, is considered handled when `debputy` checks for paths you might 

-

1184 have forgotten to install. The `discard` feature is therefore *also* replaces the 

-

1185 `debian/not-installed` file used by `debhelper` and `cdbs`. 

-

1186 """ 

-

1187 ), 

-

1188 non_mapping_description=textwrap.dedent( 

-

1189 """\ 

-

1190 When the input is a string or a list of string, then that value is used as shorthand 

-

1191 for `path` or `paths` (respectively). 

-

1192 """ 

-

1193 ), 

-

1194 attributes=[ 

-

1195 documented_attr( 

-

1196 ["path", "paths"], 

-

1197 textwrap.dedent( 

-

1198 """\ 

-

1199 A path match (`path`) or a list of path matches (`paths`) defining the source 

-

1200 path(s) that should not be installed anywhere. The path match(es) can use globs. 

-

1201 - When a symlink is matched, then the symlink (not its target) is discarded as-is. 

-

1202 When a directory is matched, then the directory is discarded along with all the 

-

1203 contents that have not already been installed somewhere. 

-

1204 """ 

-

1205 ), 

-

1206 ), 

-

1207 documented_attr( 

-

1208 ["search_dir", "search_dirs"], 

-

1209 textwrap.dedent( 

-

1210 """\ 

-

1211 A path (`search-dir`) or a list to paths (`search-dirs`) that defines 

-

1212 which search directories apply to. This attribute is primarily useful 

-

1213 for source packages that uses "per package search dirs", and you want 

-

1214 to restrict a discard rule to a subset of the relevant search dirs. 

-

1215 Note all listed search directories must be either an explicit search 

-

1216 requested by the packager or a search directory that `debputy` 

-

1217 provided automatically (such as `debian/tmp`). Listing other paths 

-

1218 will make `debputy` report an error. 

-

1219 - Note that the `path` or `paths` must match at least one entry in 

-

1220 any of the search directories unless *none* of the search directories 

-

1221 exist (or the condition in `required-when` evaluates to false). When 

-

1222 none of the search directories exist, the discard rule is silently 

-

1223 skipped. This special-case enables you to have discard rules only 

-

1224 applicable to certain builds that are only performed conditionally. 

-

1225 """ 

-

1226 ), 

-

1227 ), 

-

1228 documented_attr( 

-

1229 "required_when", 

-

1230 textwrap.dedent( 

-

1231 """\ 

-

1232 A condition as defined in [Conditional rules](#conditional-rules). The discard 

-

1233 rule is always applied. When the conditional is present and evaluates to false, 

-

1234 the discard rule can silently match nothing.When the condition is absent, *or* 

-

1235 it evaluates to true, then each pattern provided must match at least one path. 

-

1236 """ 

-

1237 ), 

-

1238 ), 

-

1239 ], 

-

1240 reference_documentation_url=_manifest_format_doc( 

-

1241 "discard-or-exclude-upstream-provided-paths-discard" 

-

1242 ), 

-

1243 ), 

-

1244 ) 

-

1245 api.pluggable_manifest_rule( 

-

1246 InstallRule, 

-

1247 MK_INSTALLATIONS_MULTI_DEST_INSTALL, 

-

1248 ParsedMultiDestInstallRule, 

-

1249 _multi_dest_install_rule_handler, 

-

1250 source_format=ParsedMultiDestInstallRuleSourceFormat, 

-

1251 inline_reference_documentation=reference_documentation( 

-

1252 title=f"Multi destination install (`{MK_INSTALLATIONS_MULTI_DEST_INSTALL}`)", 

-

1253 description=textwrap.dedent( 

-

1254 """\ 

-

1255 The `{RULE_NAME}` is a variant of the generic `install` rule that installs sources 

-

1256 into multiple destination paths. This is needed for the rare case where you want a 

-

1257 path to be installed *twice* (or more) into the *same* package. The rule is a two 

-

1258 "primary" uses. 

-

1259 

-

1260 1) The classic "install into directory" similar to the standard `dh_install`, 

-

1261 except you list 2+ destination directories. 

-

1262 2) The "install as" similar to `dh-exec`'s `foo => bar` feature, except you list 

-

1263 2+ `as` names. 

-

1264 """.format( 

-

1265 RULE_NAME=MK_INSTALLATIONS_MULTI_DEST_INSTALL 

-

1266 ) 

-

1267 ), 

-

1268 attributes=[ 

-

1269 documented_attr( 

-

1270 ["source", "sources"], 

-

1271 textwrap.dedent( 

-

1272 """\ 

-

1273 A path match (`source`) or a list of path matches (`sources`) defining the 

-

1274 source path(s) to be installed. The path match(es) can use globs. Each match 

-

1275 is tried against default search directories. 

-

1276 - When a symlink is matched, then the symlink (not its target) is installed 

-

1277 as-is. When a directory is matched, then the directory is installed along 

-

1278 with all the contents that have not already been installed somewhere. 

-

1279 """ 

-

1280 ), 

-

1281 ), 

-

1282 documented_attr( 

-

1283 "dest_dirs", 

-

1284 textwrap.dedent( 

-

1285 """\ 

-

1286 A list of paths defining the destination *directories*. The value *cannot* use 

-

1287 globs, but can use substitution. It is mutually exclusive with `as` but must be 

-

1288 provided if `as` is not provided. The attribute must contain at least two paths 

-

1289 (if you do not have two paths, you want `install`). 

-

1290 """ 

-

1291 ), 

-

1292 ), 

-

1293 documented_attr( 

-

1294 "into", 

-

1295 textwrap.dedent( 

-

1296 """\ 

-

1297 Either a package name or a list of package names for which these paths should be 

-

1298 installed. This key is conditional on whether there are multiple binary packages listed 

-

1299 in `debian/control`. When there is only one binary package, then that binary is the 

-

1300 default for `into`. Otherwise, the key is required. 

-

1301 """ 

-

1302 ), 

-

1303 ), 

-

1304 documented_attr( 

-

1305 "install_as", 

-

1306 textwrap.dedent( 

-

1307 """\ 

-

1308 A list of paths, which defines all the places the source will be installed. 

-

1309 Each path must be a full path without globs (but can use substitution). 

-

1310 This option is mutually exclusive with `dest-dirs` and `sources` (but not 

-

1311 `source`). When `as` is given, then `source` must match exactly one 

-

1312 "not yet matched" path. The attribute must contain at least two paths 

-

1313 (if you do not have two paths, you want `install`). 

-

1314 """ 

-

1315 ), 

-

1316 ), 

-

1317 documented_attr( 

-

1318 "when", 

-

1319 textwrap.dedent( 

-

1320 """\ 

-

1321 A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules). 

-

1322 """ 

-

1323 ), 

-

1324 ), 

-

1325 ], 

-

1326 reference_documentation_url=_manifest_format_doc("generic-install-install"), 

-

1327 ), 

-

1328 ) 

-

1329 

-

1330 

-

1331def register_transformation_rules(api: DebputyPluginInitializerProvider) -> None: 

-

1332 api.pluggable_manifest_rule( 

-

1333 TransformationRule, 

-

1334 "move", 

-

1335 TransformationMoveRuleSpec, 

-

1336 _transformation_move_handler, 

-

1337 inline_reference_documentation=reference_documentation( 

-

1338 title="Move transformation rule (`move`)", 

-

1339 description=textwrap.dedent( 

-

1340 """\ 

-

1341 The move transformation rule is mostly only useful for single binary source packages, 

-

1342 where everything from upstream's build system is installed automatically into the package. 

-

1343 In those case, you might find yourself with some files that need to be renamed to match 

-

1344 Debian specific requirements. 

-

1345 

-

1346 This can be done with the `move` transformation rule, which is a rough emulation of the 

-

1347 `mv` command line tool. 

-

1348 """ 

-

1349 ), 

-

1350 attributes=[ 

-

1351 documented_attr( 

-

1352 "source", 

-

1353 textwrap.dedent( 

-

1354 """\ 

-

1355 A path match defining the source path(s) to be renamed. The value can use globs 

-

1356 and substitutions. 

-

1357 """ 

-

1358 ), 

-

1359 ), 

-

1360 documented_attr( 

-

1361 "target", 

-

1362 textwrap.dedent( 

-

1363 """\ 

-

1364 A path defining the target path. The value *cannot* use globs, but can use 

-

1365 substitution. If the target ends with a literal `/` (prior to substitution), 

-

1366 the target will *always* be a directory. 

-

1367 """ 

-

1368 ), 

-

1369 ), 

-

1370 documented_attr( 

-

1371 "when", 

-

1372 textwrap.dedent( 

-

1373 """\ 

-

1374 A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules). 

-

1375 """ 

-

1376 ), 

-

1377 ), 

-

1378 ], 

-

1379 reference_documentation_url=_manifest_format_doc( 

-

1380 "move-transformation-rule-move" 

-

1381 ), 

-

1382 ), 

-

1383 ) 

-

1384 api.pluggable_manifest_rule( 

-

1385 TransformationRule, 

-

1386 "remove", 

-

1387 TransformationRemoveRuleSpec, 

-

1388 _transformation_remove_handler, 

-

1389 source_format=_with_alt_form(TransformationRemoveRuleInputFormat), 

-

1390 inline_reference_documentation=reference_documentation( 

-

1391 title="Remove transformation rule (`remove`)", 

-

1392 description=textwrap.dedent( 

-

1393 """\ 

-

1394 The remove transformation rule is mostly only useful for single binary source packages, 

-

1395 where everything from upstream's build system is installed automatically into the package. 

-

1396 In those case, you might find yourself with some files that are _not_ relevant for the 

-

1397 Debian package (but would be relevant for other distros or for non-distro local builds). 

-

1398 Common examples include `INSTALL` files or `LICENSE` files (when they are just a subset 

-

1399 of `debian/copyright`). 

-

1400 

-

1401 In the manifest, you can ask `debputy` to remove paths from the debian package by using 

-

1402 the `remove` transformation rule. 

-

1403 

-

1404 Note that `remove` removes paths from future glob matches and transformation rules. 

-

1405 """ 

-

1406 ), 

-

1407 non_mapping_description=textwrap.dedent( 

-

1408 """\ 

-

1409 When the input is a string or a list of string, then that value is used as shorthand 

-

1410 for `path` or `paths` (respectively). 

-

1411 """ 

-

1412 ), 

-

1413 attributes=[ 

-

1414 documented_attr( 

-

1415 ["path", "paths"], 

-

1416 textwrap.dedent( 

-

1417 """\ 

-

1418 A path match (`path`) or a list of path matches (`paths`) defining the 

-

1419 path(s) inside the package that should be removed. The path match(es) 

-

1420 can use globs. 

-

1421 - When a symlink is matched, then the symlink (not its target) is removed 

-

1422 as-is. When a directory is matched, then the directory is removed 

-

1423 along with all the contents. 

-

1424 """ 

-

1425 ), 

-

1426 ), 

-

1427 documented_attr( 

-

1428 "keep_empty_parent_dirs", 

-

1429 textwrap.dedent( 

-

1430 """\ 

-

1431 A boolean determining whether to prune parent directories that become 

-

1432 empty as a consequence of this rule. When provided and `true`, this 

-

1433 rule will leave empty directories behind. Otherwise, if this rule 

-

1434 causes a directory to become empty that directory will be removed. 

-

1435 """ 

-

1436 ), 

-

1437 ), 

-

1438 documented_attr( 

-

1439 "when", 

-

1440 textwrap.dedent( 

-

1441 """\ 

-

1442 A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules). 

-

1443 This condition will be combined with the built-in condition provided by these rules 

-

1444 (rather than replacing it). 

-

1445 """ 

-

1446 ), 

-

1447 ), 

-

1448 ], 

-

1449 reference_documentation_url=_manifest_format_doc( 

-

1450 "remove-transformation-rule-remove" 

-

1451 ), 

-

1452 ), 

-

1453 ) 

-

1454 api.pluggable_manifest_rule( 

-

1455 TransformationRule, 

-

1456 "create-symlink", 

-

1457 CreateSymlinkRule, 

-

1458 _transformation_create_symlink, 

-

1459 inline_reference_documentation=reference_documentation( 

-

1460 title="Create symlinks transformation rule (`create-symlink`)", 

-

1461 description=textwrap.dedent( 

-

1462 """\ 

-

1463 Often, the upstream build system will provide the symlinks for you. However, 

-

1464 in some cases, it is useful for the packager to define distribution specific 

-

1465 symlinks. This can be done via the `create-symlink` transformation rule. 

-

1466 """ 

-

1467 ), 

-

1468 attributes=[ 

-

1469 documented_attr( 

-

1470 "path", 

-

1471 textwrap.dedent( 

-

1472 """\ 

-

1473 The path that should be a symlink. The path may contain substitution 

-

1474 variables such as `{{DEB_HOST_MULTIARCH}}` but _cannot_ use globs. 

-

1475 Parent directories are implicitly created as necessary. 

-

1476 * Note that if `path` already exists, the behaviour of this 

-

1477 transformation depends on the value of `replacement-rule`. 

-

1478 """ 

-

1479 ), 

-

1480 ), 

-

1481 documented_attr( 

-

1482 "target", 

-

1483 textwrap.dedent( 

-

1484 """\ 

-

1485 Where the symlink should point to. The target may contain substitution 

-

1486 variables such as `{{DEB_HOST_MULTIARCH}}` but _cannot_ use globs. 

-

1487 The link target is _not_ required to exist inside the package. 

-

1488 * The `debputy` tool will normalize the target according to the rules 

-

1489 of the Debian Policy. Use absolute or relative target at your own 

-

1490 preference. 

-

1491 """ 

-

1492 ), 

-

1493 ), 

-

1494 documented_attr( 

-

1495 "replacement_rule", 

-

1496 textwrap.dedent( 

-

1497 """\ 

-

1498 This attribute defines how to handle if `path` already exists. It can 

-

1499 be set to one of the following values: 

-

1500 - `error-if-exists`: When `path` already exists, `debputy` will 

-

1501 stop with an error. This is similar to `ln -s` semantics. 

-

1502 - `error-if-directory`: When `path` already exists, **and** it is 

-

1503 a directory, `debputy` will stop with an error. Otherwise, 

-

1504 remove the `path` first and then create the symlink. This is 

-

1505 similar to `ln -sf` semantics. 

-

1506 - `abort-on-non-empty-directory` (default): When `path` already 

-

1507 exists, then it will be removed provided it is a non-directory 

-

1508 **or** an *empty* directory and the symlink will then be 

-

1509 created. If the path is a *non-empty* directory, `debputy` 

-

1510 will stop with an error. 

-

1511 - `discard-existing`: When `path` already exists, it will be 

-

1512 removed. If the `path` is a directory, all its contents will 

-

1513 be removed recursively along with the directory. Finally, 

-

1514 the symlink is created. This is similar to having an explicit 

-

1515 `remove` rule just prior to the `create-symlink` that is 

-

1516 conditional on `path` existing (plus the condition defined in 

-

1517 `when` if any). 

-

1518 

-

1519 Keep in mind, that `replacement-rule` only applies if `path` exists. 

-

1520 If the symlink cannot be created, because a part of `path` exist and 

-

1521 is *not* a directory, then `create-symlink` will fail regardless of 

-

1522 the value in `replacement-rule`. 

-

1523 """ 

-

1524 ), 

-

1525 ), 

-

1526 documented_attr( 

-

1527 "when", 

-

1528 textwrap.dedent( 

-

1529 """\ 

-

1530 A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules). 

-

1531 """ 

-

1532 ), 

-

1533 ), 

-

1534 ], 

-

1535 reference_documentation_url=_manifest_format_doc( 

-

1536 "create-symlinks-transformation-rule-create-symlink" 

-

1537 ), 

-

1538 ), 

-

1539 ) 

-

1540 api.pluggable_manifest_rule( 

-

1541 TransformationRule, 

-

1542 "path-metadata", 

-

1543 PathManifestRule, 

-

1544 _transformation_path_metadata, 

-

1545 source_format=PathManifestSourceDictFormat, 

-

1546 inline_reference_documentation=reference_documentation( 

-

1547 title="Change path owner/group or mode (`path-metadata`)", 

-

1548 description=textwrap.dedent( 

-

1549 """\ 

-

1550 The `debputy` command normalizes the path metadata (such as ownership and mode) similar 

-

1551 to `dh_fixperms`. For most packages, the default is what you want. However, in some 

-

1552 cases, the package has a special case or two that `debputy` does not cover. In that 

-

1553 case, you can tell `debputy` to use the metadata you want by using the `path-metadata` 

-

1554 transformation. 

-

1555 

-

1556 Common use-cases include setuid/setgid binaries (such `usr/bin/sudo`) or/and static 

-

1557 ownership (such as /usr/bin/write). 

-

1558 """ 

-

1559 ), 

-

1560 attributes=[ 

-

1561 documented_attr( 

-

1562 ["path", "paths"], 

-

1563 textwrap.dedent( 

-

1564 """\ 

-

1565 A path match (`path`) or a list of path matches (`paths`) defining the path(s) 

-

1566 inside the package that should be affected. The path match(es) can use globs 

-

1567 and substitution variables. Special-rules for matches: 

-

1568 - Symlinks are never followed and will never be matched by this rule. 

-

1569 - Directory handling depends on the `recursive` attribute. 

-

1570 """ 

-

1571 ), 

-

1572 ), 

-

1573 documented_attr( 

-

1574 "owner", 

-

1575 textwrap.dedent( 

-

1576 """\ 

-

1577 Denotes the owner of the paths matched by `path` or `paths`. When omitted, 

-

1578 no change of owner is done. 

-

1579 """ 

-

1580 ), 

-

1581 ), 

-

1582 documented_attr( 

-

1583 "group", 

-

1584 textwrap.dedent( 

-

1585 """\ 

-

1586 Denotes the group of the paths matched by `path` or `paths`. When omitted, 

-

1587 no change of group is done. 

-

1588 """ 

-

1589 ), 

-

1590 ), 

-

1591 documented_attr( 

-

1592 "mode", 

-

1593 textwrap.dedent( 

-

1594 """\ 

-

1595 Denotes the mode of the paths matched by `path` or `paths`. When omitted, 

-

1596 no change in mode is done. Note that numeric mode must always be given as 

-

1597 a string (i.e., with quotes). Symbolic mode can be used as well. If 

-

1598 symbolic mode uses a relative definition (e.g., `o-rx`), then it is 

-

1599 relative to the matched path's current mode. 

-

1600 """ 

-

1601 ), 

-

1602 ), 

-

1603 documented_attr( 

-

1604 "capabilities", 

-

1605 textwrap.dedent( 

-

1606 """\ 

-

1607 Denotes a Linux capability that should be applied to the path. When provided, 

-

1608 `debputy` will cause the capability to be applied to all *files* denoted by 

-

1609 the `path`/`paths` attribute on install (via `postinst configure`) provided 

-

1610 that `setcap` is installed on the system when the `postinst configure` is 

-

1611 run. 

-

1612 - If any non-file paths are matched, the `capabilities` will *not* be applied 

-

1613 to those paths. 

-

1614 

-

1615 """ 

-

1616 ), 

-

1617 ), 

-

1618 documented_attr( 

-

1619 "capability_mode", 

-

1620 textwrap.dedent( 

-

1621 """\ 

-

1622 Denotes the mode to apply to the path *if* the Linux capability denoted in 

-

1623 `capabilities` was successfully applied. If omitted, it defaults to `a-s` as 

-

1624 generally capabilities are used to avoid "setuid"/"setgid" binaries. The 

-

1625 `capability-mode` is relative to the *final* path mode (the mode of the path 

-

1626 in the produced `.deb`). The `capability-mode` attribute cannot be used if 

-

1627 `capabilities` is omitted. 

-

1628 """ 

-

1629 ), 

-

1630 ), 

-

1631 documented_attr( 

-

1632 "recursive", 

-

1633 textwrap.dedent( 

-

1634 """\ 

-

1635 When a directory is matched, then the metadata changes are applied to the 

-

1636 directory itself. When `recursive` is `true`, then the transformation is 

-

1637 *also* applied to all paths beneath the directory. The default value for 

-

1638 this attribute is `false`. 

-

1639 """ 

-

1640 ), 

-

1641 ), 

-

1642 documented_attr( 

-

1643 "when", 

-

1644 textwrap.dedent( 

-

1645 """\ 

-

1646 A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules). 

-

1647 """ 

-

1648 ), 

-

1649 ), 

-

1650 ], 

-

1651 reference_documentation_url=_manifest_format_doc( 

-

1652 "change-path-ownergroup-or-mode-path-metadata" 

-

1653 ), 

-

1654 ), 

-

1655 ) 

-

1656 api.pluggable_manifest_rule( 

-

1657 TransformationRule, 

-

1658 "create-directories", 

-

1659 EnsureDirectoryRule, 

-

1660 _transformation_mkdirs, 

-

1661 source_format=_with_alt_form(EnsureDirectorySourceFormat), 

-

1662 inline_reference_documentation=reference_documentation( 

-

1663 title="Create directories transformation rule (`create-directories`)", 

-

1664 description=textwrap.dedent( 

-

1665 """\ 

-

1666 NOTE: This transformation is only really needed if you need to create an empty 

-

1667 directory somewhere in your package as an integration point. All `debputy` 

-

1668 transformations will create directories as required. 

-

1669 

-

1670 In most cases, upstream build systems and `debputy` will create all the relevant 

-

1671 directories. However, in some rare cases you may want to explicitly define a path 

-

1672 to be a directory. Maybe to silence a linter that is warning you about a directory 

-

1673 being empty, or maybe you need an empty directory that nothing else is creating for 

-

1674 you. This can be done via the `create-directories` transformation rule. 

-

1675 

-

1676 Unless you have a specific need for the mapping form, you are recommended to use the 

-

1677 shorthand form of just listing the directories you want created. 

-

1678 """ 

-

1679 ), 

-

1680 non_mapping_description=textwrap.dedent( 

-

1681 """\ 

-

1682 When the input is a string or a list of string, then that value is used as shorthand 

-

1683 for `path` or `paths` (respectively). 

-

1684 """ 

-

1685 ), 

-

1686 attributes=[ 

-

1687 documented_attr( 

-

1688 ["path", "paths"], 

-

1689 textwrap.dedent( 

-

1690 """\ 

-

1691 A path (`path`) or a list of path (`paths`) defining the path(s) inside the 

-

1692 package that should be created as directories. The path(es) _cannot_ use globs 

-

1693 but can use substitution variables. Parent directories are implicitly created 

-

1694 (with owner `root:root` and mode `0755` - only explicitly listed directories 

-

1695 are affected by the owner/mode options) 

-

1696 """ 

-

1697 ), 

-

1698 ), 

-

1699 documented_attr( 

-

1700 "owner", 

-

1701 textwrap.dedent( 

-

1702 """\ 

-

1703 Denotes the owner of the directory (but _not_ what is inside the directory). 

-

1704 Default is "root". 

-

1705 """ 

-

1706 ), 

-

1707 ), 

-

1708 documented_attr( 

-

1709 "group", 

-

1710 textwrap.dedent( 

-

1711 """\ 

-

1712 Denotes the group of the directory (but _not_ what is inside the directory). 

-

1713 Default is "root". 

-

1714 """ 

-

1715 ), 

-

1716 ), 

-

1717 documented_attr( 

-

1718 "mode", 

-

1719 textwrap.dedent( 

-

1720 """\ 

-

1721 Denotes the mode of the directory (but _not_ what is inside the directory). 

-

1722 Note that numeric mode must always be given as a string (i.e., with quotes). 

-

1723 Symbolic mode can be used as well. If symbolic mode uses a relative 

-

1724 definition (e.g., `o-rx`), then it is relative to the directory's current mode 

-

1725 (if it already exists) or `0755` if the directory is created by this 

-

1726 transformation. The default is "0755". 

-

1727 """ 

-

1728 ), 

-

1729 ), 

-

1730 documented_attr( 

-

1731 "when", 

-

1732 textwrap.dedent( 

-

1733 """\ 

-

1734 A condition as defined in [Conditional rules]({MANIFEST_FORMAT_DOC}#Conditional rules). 

-

1735 """ 

-

1736 ), 

-

1737 ), 

-

1738 ], 

-

1739 reference_documentation_url=_manifest_format_doc( 

-

1740 "create-directories-transformation-rule-directories" 

-

1741 ), 

-

1742 ), 

-

1743 ) 

-

1744 

-

1745 

-

1746def register_manifest_condition_rules(api: DebputyPluginInitializerProvider) -> None: 

-

1747 api.provide_manifest_keyword( 1747 ↛ exitline 1747 didn't jump to the function exit

-

1748 ManifestCondition, 

-

1749 "cross-compiling", 

-

1750 lambda *_: ManifestCondition.is_cross_building(), 

-

1751 inline_reference_documentation=reference_documentation( 

-

1752 title="Cross-Compiling condition `cross-compiling`", 

-

1753 description=textwrap.dedent( 

-

1754 """\ 

-

1755 The `cross-compiling` condition is used to determine if the current build is 

-

1756 performing a cross build (i.e., `DEB_BUILD_GNU_TYPE` != `DEB_HOST_GNU_TYPE`). 

-

1757 Often this has consequences for what is possible to do. 

-

1758 

-

1759 Note if you specifically want to know: 

-

1760 

-

1761 * whether build-time tests should be run, then please use the 

-

1762 `run-build-time-tests` condition. 

-

1763 * whether compiled binaries can be run as if it was a native binary, please 

-

1764 use the `can-execute-compiled-binaries` condition instead. That condition 

-

1765 accounts for cross-building in its evaluation. 

-

1766 """ 

-

1767 ), 

-

1768 reference_documentation_url=_manifest_format_doc( 

-

1769 "cross-compiling-condition-cross-compiling-string" 

-

1770 ), 

-

1771 ), 

-

1772 ) 

-

1773 api.provide_manifest_keyword( 1773 ↛ exitline 1773 didn't jump to the function exit

-

1774 ManifestCondition, 

-

1775 "can-execute-compiled-binaries", 

-

1776 lambda *_: ManifestCondition.can_execute_compiled_binaries(), 

-

1777 inline_reference_documentation=reference_documentation( 

-

1778 title="Can run produced binaries `can-execute-compiled-binaries`", 

-

1779 description=textwrap.dedent( 

-

1780 """\ 

-

1781 The `can-execute-compiled-binaries` condition is used to assert the build 

-

1782 can assume that all compiled binaries can be run as-if they were native 

-

1783 binaries. For native builds, this condition always evaluates to `true`. 

-

1784 For cross builds, the condition is generally evaluates to `false`. However, 

-

1785 there are special-cases where binaries can be run during cross-building. 

-

1786 Accordingly, this condition is subtly different from the `cross-compiling` 

-

1787 condition. 

-

1788 

-

1789 Note this condition should *not* be used when you know the binary has been 

-

1790 built for the build architecture (`DEB_BUILD_ARCH`) or for determining 

-

1791 whether build-time tests should be run (for build-time tests, please use 

-

1792 the `run-build-time-tests` condition instead). Some upstream build systems 

-

1793 are advanced enough to distinguish building a final product vs. building 

-

1794 a helper tool that needs to run during build. The latter will often be 

-

1795 compiled by a separate compiler (often using `$(CC_FOR_BUILD)`, 

-

1796 `cc_for_build` or similar variable names in upstream build systems for 

-

1797 that compiler). 

-

1798 """ 

-

1799 ), 

-

1800 reference_documentation_url=_manifest_format_doc( 

-

1801 "can-run-produced-binaries-can-execute-compiled-binaries-string" 

-

1802 ), 

-

1803 ), 

-

1804 ) 

-

1805 api.provide_manifest_keyword( 1805 ↛ exitline 1805 didn't jump to the function exit

-

1806 ManifestCondition, 

-

1807 "run-build-time-tests", 

-

1808 lambda *_: ManifestCondition.run_build_time_tests(), 

-

1809 inline_reference_documentation=reference_documentation( 

-

1810 title="Whether build time tests should be run `run-build-time-tests`", 

-

1811 description=textwrap.dedent( 

-

1812 """\ 

-

1813 The `run-build-time-tests` condition is used to determine whether (build 

-

1814 time) tests should be run for this build. This condition roughly 

-

1815 translates into whether `nocheck` is present in `DEB_BUILD_OPTIONS`. 

-

1816 

-

1817 In general, the manifest *should not* prevent build time tests from being 

-

1818 run during cross-builds. 

-

1819 """ 

-

1820 ), 

-

1821 reference_documentation_url=_manifest_format_doc( 

-

1822 "whether-build-time-tests-should-be-run-run-build-time-tests-string" 

-

1823 ), 

-

1824 ), 

-

1825 ) 

-

1826 

-

1827 api.pluggable_manifest_rule( 

-

1828 ManifestCondition, 

-

1829 "not", 

-

1830 MCNot, 

-

1831 _mc_not, 

-

1832 inline_reference_documentation=reference_documentation( 

-

1833 title="Negated condition `not` (mapping)", 

-

1834 description=textwrap.dedent( 

-

1835 """\ 

-

1836 It is possible to negate a condition via the `not` condition. 

-

1837 

-

1838 As an example: 

-

1839 

-

1840 packages: 

-

1841 util-linux: 

-

1842 transformations: 

-

1843 - create-symlink 

-

1844 path: sbin/getty 

-

1845 target: /sbin/agetty 

-

1846 when: 

-

1847 # On Hurd, the package "hurd" ships "sbin/getty". 

-

1848 # This example happens to also be alternative to `arch-marches: '!hurd-any` 

-

1849 not: 

-

1850 arch-matches: 'hurd-any' 

-

1851 

-

1852 The `not` condition is specified as a mapping, where the key is `not` and the 

-

1853 value is a nested condition. 

-

1854 """ 

-

1855 ), 

-

1856 attributes=[ 

-

1857 documented_attr( 

-

1858 "negated_condition", 

-

1859 textwrap.dedent( 

-

1860 """\ 

-

1861 The condition to be negated. 

-

1862 """ 

-

1863 ), 

-

1864 ), 

-

1865 ], 

-

1866 reference_documentation_url=_manifest_format_doc( 

-

1867 "whether-build-time-tests-should-be-run-run-build-time-tests-string" 

-

1868 ), 

-

1869 ), 

-

1870 ) 

-

1871 api.pluggable_manifest_rule( 

-

1872 ManifestCondition, 

-

1873 ["any-of", "all-of"], 

-

1874 MCAnyOfAllOf, 

-

1875 _mc_any_of, 

-

1876 source_format=List[ManifestCondition], 

-

1877 inline_reference_documentation=reference_documentation( 

-

1878 title="All or any of a list of conditions `all-of`/`any-of`", 

-

1879 description=textwrap.dedent( 

-

1880 """\ 

-

1881 It is possible to aggregate conditions using the `all-of` or `any-of` 

-

1882 condition. This provide `X and Y` and `X or Y` semantics (respectively). 

-

1883 """ 

-

1884 ), 

-

1885 reference_documentation_url=_manifest_format_doc( 

-

1886 "all-or-any-of-a-list-of-conditions-all-ofany-of-list" 

-

1887 ), 

-

1888 ), 

-

1889 ) 

-

1890 api.pluggable_manifest_rule( 

-

1891 ManifestCondition, 

-

1892 "arch-matches", 

-

1893 MCArchMatches, 

-

1894 _mc_arch_matches, 

-

1895 source_format=str, 

-

1896 inline_reference_documentation=reference_documentation( 

-

1897 title="Architecture match condition `arch-matches`", 

-

1898 description=textwrap.dedent( 

-

1899 """\ 

-

1900 Sometimes, a rule needs to be conditional on the architecture. 

-

1901 This can be done by using the `arch-matches` rule. In 99.99% 

-

1902 of the cases, `arch-matches` will be form you are looking for 

-

1903 and practically behaves like a comparison against 

-

1904 `dpkg-architecture -qDEB_HOST_ARCH`. 

-

1905 

-

1906 For the cross-compiling specialists or curious people: The 

-

1907 `arch-matches` rule behaves like a `package-context-arch-matches` 

-

1908 in the context of a binary package and like 

-

1909 `source-context-arch-matches` otherwise. The details of those 

-

1910 are covered in their own keywords. 

-

1911 """ 

-

1912 ), 

-

1913 non_mapping_description=textwrap.dedent( 

-

1914 """\ 

-

1915 The value must be a string in the form of a space separated list 

-

1916 architecture names or architecture wildcards (same syntax as the 

-

1917 architecture restriction in Build-Depends in debian/control except 

-

1918 there is no enclosing `[]` brackets). The names/wildcards can 

-

1919 optionally be prefixed by `!` to negate them. However, either 

-

1920 *all* names / wildcards must have negation or *none* of them may 

-

1921 have it. 

-

1922 """ 

-

1923 ), 

-

1924 reference_documentation_url=_manifest_format_doc( 

-

1925 "architecture-match-condition-arch-matches-mapping" 

-

1926 ), 

-

1927 ), 

-

1928 ) 

-

1929 

-

1930 context_arch_doc = reference_documentation( 

-

1931 title="Explicit source or binary package context architecture match condition" 

-

1932 " `source-context-arch-matches`, `package-context-arch-matches` (mapping)", 

-

1933 description=textwrap.dedent( 

-

1934 """\ 

-

1935 **These are special-case conditions**. Unless you know that you have a very special-case, 

-

1936 you should probably use `arch-matches` instead. These conditions are aimed at people with 

-

1937 corner-case special architecture needs. It also assumes the reader is familiar with the 

-

1938 `arch-matches` condition. 

-

1939 

-

1940 To understand these rules, here is a quick primer on `debputy`'s concept of "source context" 

-

1941 vs "(binary) package context" architecture. For a native build, these two contexts are the 

-

1942 same except that in the package context an `Architecture: all` package always resolve to 

-

1943 `all` rather than `DEB_HOST_ARCH`. As a consequence, `debputy` forbids `arch-matches` and 

-

1944 `package-context-arch-matches` in the context of an `Architecture: all` package as a warning 

-

1945 to the packager that condition does not make sense. 

-

1946 

-

1947 In the very rare case that you need an architecture condition for an `Architecture: all` package, 

-

1948 you can use `source-context-arch-matches`. However, this means your `Architecture: all` package 

-

1949 is not reproducible between different build hosts (which has known to be relevant for some 

-

1950 very special cases). 

-

1951 

-

1952 Additionally, for the 0.0001% case you are building a cross-compiling compiler (that is, 

-

1953 `DEB_HOST_ARCH != DEB_TARGET_ARCH` and you are working with `gcc` or similar) `debputy` can be 

-

1954 instructed (opt-in) to use `DEB_TARGET_ARCH` rather than `DEB_HOST_ARCH` for certain packages when 

-

1955 evaluating an architecture condition in context of a binary package. This can be useful if the 

-

1956 compiler produces supporting libraries that need to be built for the `DEB_TARGET_ARCH` rather than 

-

1957 the `DEB_HOST_ARCH`. This is where `arch-matches` or `package-context-arch-matches` can differ 

-

1958 subtly from `source-context-arch-matches` in how they evaluate the condition. This opt-in currently 

-

1959 relies on setting `X-DH-Build-For-Type: target` for each of the relevant packages in 

-

1960 `debian/control`. However, unless you are a cross-compiling specialist, you will probably never 

-

1961 need to care about nor use any of this. 

-

1962 

-

1963 Accordingly, the possible conditions are: 

-

1964 

-

1965 * `arch-matches`: This is the form recommended to laymen and as the default use-case. This 

-

1966 conditional acts `package-context-arch-matches` if the condition is used in the context 

-

1967 of a binary package. Otherwise, it acts as `source-context-arch-matches`. 

-

1968 

-

1969 * `source-context-arch-matches`: With this conditional, the provided architecture constraint is compared 

-

1970 against the build time provided host architecture (`dpkg-architecture -qDEB_HOST_ARCH`). This can 

-

1971 be useful when an `Architecture: all` package needs an architecture condition for some reason. 

-

1972 

-

1973 * `package-context-arch-matches`: With this conditional, the provided architecture constraint is compared 

-

1974 against the package's resolved architecture. This condition can only be used in the context of a binary 

-

1975 package (usually, under `packages.<name>.`). If the package is an `Architecture: all` package, the 

-

1976 condition will fail with an error as the condition always have the same outcome. For all other 

-

1977 packages, the package's resolved architecture is the same as the build time provided host architecture 

-

1978 (`dpkg-architecture -qDEB_HOST_ARCH`). 

-

1979 

-

1980 - However, as noted above there is a special case for when compiling a cross-compiling compiler, where 

-

1981 this behaves subtly different from `source-context-arch-matches`. 

-

1982 

-

1983 All conditions are used the same way as `arch-matches`. Simply replace `arch-matches` with the other 

-

1984 condition. See the `arch-matches` description for an example. 

-

1985 """ 

-

1986 ), 

-

1987 non_mapping_description=textwrap.dedent( 

-

1988 """\ 

-

1989 The value must be a string in the form of a space separated list 

-

1990 architecture names or architecture wildcards (same syntax as the 

-

1991 architecture restriction in Build-Depends in debian/control except 

-

1992 there is no enclosing `[]` brackets). The names/wildcards can 

-

1993 optionally be prefixed by `!` to negate them. However, either 

-

1994 *all* names / wildcards must have negation or *none* of them may 

-

1995 have it. 

-

1996 """ 

-

1997 ), 

-

1998 ) 

-

1999 

-

2000 api.pluggable_manifest_rule( 

-

2001 ManifestCondition, 

-

2002 "source-context-arch-matches", 

-

2003 MCArchMatches, 

-

2004 _mc_source_context_arch_matches, 

-

2005 source_format=str, 

-

2006 inline_reference_documentation=context_arch_doc, 

-

2007 ) 

-

2008 api.pluggable_manifest_rule( 

-

2009 ManifestCondition, 

-

2010 "package-context-arch-matches", 

-

2011 MCArchMatches, 

-

2012 _mc_arch_matches, 

-

2013 source_format=str, 

-

2014 inline_reference_documentation=context_arch_doc, 

-

2015 ) 

-

2016 api.pluggable_manifest_rule( 

-

2017 ManifestCondition, 

-

2018 "build-profiles-matches", 

-

2019 MCBuildProfileMatches, 

-

2020 _mc_build_profile_matches, 

-

2021 source_format=str, 

-

2022 inline_reference_documentation=reference_documentation( 

-

2023 title="Active build profile match condition `build-profiles-matches`", 

-

2024 description=textwrap.dedent( 

-

2025 """\ 

-

2026 The `build-profiles-matches` condition is used to assert whether the 

-

2027 active build profiles (`DEB_BUILD_PROFILES` / `dpkg-buildpackage -P`) 

-

2028 matches a given build profile restriction. 

-

2029 """ 

-

2030 ), 

-

2031 non_mapping_description=textwrap.dedent( 

-

2032 """\ 

-

2033 The value is a string using the same syntax as the `Build-Profiles` 

-

2034 field from `debian/control` (i.e., a space separated list of 

-

2035 `<[!]profile ...>` groups). 

-

2036 """ 

-

2037 ), 

-

2038 reference_documentation_url=_manifest_format_doc( 

-

2039 "active-build-profile-match-condition-build-profiles-matches-mapping" 

-

2040 ), 

-

2041 ), 

-

2042 ) 

-

2043 

-

2044 

-

2045def register_dpkg_conffile_rules(api: DebputyPluginInitializerProvider) -> None: 

-

2046 api.pluggable_manifest_rule( 

-

2047 DpkgMaintscriptHelperCommand, 

-

2048 "remove", 

-

2049 DpkgRemoveConffileRule, 

-

2050 _dpkg_conffile_remove, 

-

2051 inline_reference_documentation=None, # TODO: write and add 

-

2052 ) 

-

2053 

-

2054 api.pluggable_manifest_rule( 

-

2055 DpkgMaintscriptHelperCommand, 

-

2056 "rename", 

-

2057 DpkgRenameConffileRule, 

-

2058 _dpkg_conffile_rename, 

-

2059 inline_reference_documentation=None, # TODO: write and add 

-

2060 ) 

-

2061 

-

2062 

-

2063class _ModeOwnerBase(DebputyParsedContentStandardConditional): 

-

2064 mode: NotRequired[FileSystemMode] 

-

2065 owner: NotRequired[StaticFileSystemOwner] 

-

2066 group: NotRequired[StaticFileSystemGroup] 

-

2067 

-

2068 

-

2069class PathManifestSourceDictFormat(_ModeOwnerBase): 

-

2070 path: NotRequired[ 

-

2071 Annotated[FileSystemMatchRule, DebputyParseHint.target_attribute("paths")] 

-

2072 ] 

-

2073 paths: NotRequired[List[FileSystemMatchRule]] 

-

2074 recursive: NotRequired[bool] 

-

2075 capabilities: NotRequired[str] 

-

2076 capability_mode: NotRequired[FileSystemMode] 

-

2077 

-

2078 

-

2079class PathManifestRule(_ModeOwnerBase): 

-

2080 paths: List[FileSystemMatchRule] 

-

2081 recursive: NotRequired[bool] 

-

2082 capabilities: NotRequired[str] 

-

2083 capability_mode: NotRequired[FileSystemMode] 

-

2084 

-

2085 

-

2086class EnsureDirectorySourceFormat(_ModeOwnerBase): 

-

2087 path: NotRequired[ 

-

2088 Annotated[FileSystemExactMatchRule, DebputyParseHint.target_attribute("paths")] 

-

2089 ] 

-

2090 paths: NotRequired[List[FileSystemExactMatchRule]] 

-

2091 

-

2092 

-

2093class EnsureDirectoryRule(_ModeOwnerBase): 

-

2094 paths: List[FileSystemExactMatchRule] 

-

2095 

-

2096 

-

2097class CreateSymlinkRule(DebputyParsedContentStandardConditional): 

-

2098 path: FileSystemExactMatchRule 

-

2099 target: Annotated[SymlinkTarget, DebputyParseHint.not_path_error_hint()] 

-

2100 replacement_rule: NotRequired[CreateSymlinkReplacementRule] 

-

2101 

-

2102 

-

2103class TransformationMoveRuleSpec(DebputyParsedContentStandardConditional): 

-

2104 source: FileSystemMatchRule 

-

2105 target: FileSystemExactMatchRule 

-

2106 

-

2107 

-

2108class TransformationRemoveRuleSpec(DebputyParsedContentStandardConditional): 

-

2109 paths: List[FileSystemMatchRule] 

-

2110 keep_empty_parent_dirs: NotRequired[bool] 

-

2111 

-

2112 

-

2113class TransformationRemoveRuleInputFormat(DebputyParsedContentStandardConditional): 

-

2114 path: NotRequired[ 

-

2115 Annotated[FileSystemMatchRule, DebputyParseHint.target_attribute("paths")] 

-

2116 ] 

-

2117 paths: NotRequired[List[FileSystemMatchRule]] 

-

2118 keep_empty_parent_dirs: NotRequired[bool] 

-

2119 

-

2120 

-

2121class ParsedInstallRuleSourceFormat(DebputyParsedContentStandardConditional): 

-

2122 sources: NotRequired[List[FileSystemMatchRule]] 

-

2123 source: NotRequired[ 

-

2124 Annotated[FileSystemMatchRule, DebputyParseHint.target_attribute("sources")] 

-

2125 ] 

-

2126 into: NotRequired[ 

-

2127 Annotated[ 

-

2128 Union[str, List[str]], 

-

2129 DebputyParseHint.required_when_multi_binary(), 

-

2130 ] 

-

2131 ] 

-

2132 dest_dir: NotRequired[ 

-

2133 Annotated[FileSystemExactMatchRule, DebputyParseHint.not_path_error_hint()] 

-

2134 ] 

-

2135 install_as: NotRequired[ 

-

2136 Annotated[ 

-

2137 FileSystemExactMatchRule, 

-

2138 DebputyParseHint.conflicts_with_source_attributes("sources", "dest_dir"), 

-

2139 DebputyParseHint.manifest_attribute("as"), 

-

2140 DebputyParseHint.not_path_error_hint(), 

-

2141 ] 

-

2142 ] 

-

2143 

-

2144 

-

2145class ParsedInstallDocRuleSourceFormat(DebputyParsedContentStandardConditional): 

-

2146 sources: NotRequired[List[FileSystemMatchRule]] 

-

2147 source: NotRequired[ 

-

2148 Annotated[FileSystemMatchRule, DebputyParseHint.target_attribute("sources")] 

-

2149 ] 

-

2150 into: NotRequired[ 

-

2151 Annotated[ 

-

2152 Union[str, List[str]], 

-

2153 DebputyParseHint.required_when_multi_binary(package_type="deb"), 

-

2154 ] 

-

2155 ] 

-

2156 dest_dir: NotRequired[ 

-

2157 Annotated[FileSystemExactMatchRule, DebputyParseHint.not_path_error_hint()] 

-

2158 ] 

-

2159 install_as: NotRequired[ 

-

2160 Annotated[ 

-

2161 FileSystemExactMatchRule, 

-

2162 DebputyParseHint.conflicts_with_source_attributes("sources", "dest_dir"), 

-

2163 DebputyParseHint.manifest_attribute("as"), 

-

2164 DebputyParseHint.not_path_error_hint(), 

-

2165 ] 

-

2166 ] 

-

2167 

-

2168 

-

2169class ParsedInstallRule(DebputyParsedContentStandardConditional): 

-

2170 sources: List[FileSystemMatchRule] 

-

2171 into: NotRequired[List[BinaryPackage]] 

-

2172 dest_dir: NotRequired[FileSystemExactMatchRule] 

-

2173 install_as: NotRequired[FileSystemExactMatchRule] 

-

2174 

-

2175 

-

2176class ParsedMultiDestInstallRuleSourceFormat(DebputyParsedContentStandardConditional): 

-

2177 sources: NotRequired[List[FileSystemMatchRule]] 

-

2178 source: NotRequired[ 

-

2179 Annotated[FileSystemMatchRule, DebputyParseHint.target_attribute("sources")] 

-

2180 ] 

-

2181 into: NotRequired[ 

-

2182 Annotated[ 

-

2183 Union[str, List[str]], 

-

2184 DebputyParseHint.required_when_multi_binary(), 

-

2185 ] 

-

2186 ] 

-

2187 dest_dirs: NotRequired[ 

-

2188 Annotated[ 

-

2189 List[FileSystemExactMatchRule], DebputyParseHint.not_path_error_hint() 

-

2190 ] 

-

2191 ] 

-

2192 install_as: NotRequired[ 

-

2193 Annotated[ 

-

2194 List[FileSystemExactMatchRule], 

-

2195 DebputyParseHint.conflicts_with_source_attributes("sources", "dest_dirs"), 

-

2196 DebputyParseHint.not_path_error_hint(), 

-

2197 DebputyParseHint.manifest_attribute("as"), 

-

2198 ] 

-

2199 ] 

-

2200 

-

2201 

-

2202class ParsedMultiDestInstallRule(DebputyParsedContentStandardConditional): 

-

2203 sources: List[FileSystemMatchRule] 

-

2204 into: NotRequired[List[BinaryPackage]] 

-

2205 dest_dirs: NotRequired[List[FileSystemExactMatchRule]] 

-

2206 install_as: NotRequired[List[FileSystemExactMatchRule]] 

-

2207 

-

2208 

-

2209class ParsedInstallExamplesRule(DebputyParsedContentStandardConditional): 

-

2210 sources: List[FileSystemMatchRule] 

-

2211 into: NotRequired[List[BinaryPackage]] 

-

2212 

-

2213 

-

2214class ParsedInstallExamplesRuleSourceFormat(DebputyParsedContentStandardConditional): 

-

2215 sources: NotRequired[List[FileSystemMatchRule]] 

-

2216 source: NotRequired[ 

-

2217 Annotated[FileSystemMatchRule, DebputyParseHint.target_attribute("sources")] 

-

2218 ] 

-

2219 into: NotRequired[ 

-

2220 Annotated[ 

-

2221 Union[str, List[str]], 

-

2222 DebputyParseHint.required_when_multi_binary(package_type="deb"), 

-

2223 ] 

-

2224 ] 

-

2225 

-

2226 

-

2227class ParsedInstallManpageRule(DebputyParsedContentStandardConditional): 

-

2228 sources: List[FileSystemMatchRule] 

-

2229 language: NotRequired[str] 

-

2230 section: NotRequired[int] 

-

2231 into: NotRequired[List[BinaryPackage]] 

-

2232 

-

2233 

-

2234class ParsedInstallManpageRuleSourceFormat(DebputyParsedContentStandardConditional): 

-

2235 sources: NotRequired[List[FileSystemMatchRule]] 

-

2236 source: NotRequired[ 

-

2237 Annotated[FileSystemMatchRule, DebputyParseHint.target_attribute("sources")] 

-

2238 ] 

-

2239 language: NotRequired[str] 

-

2240 section: NotRequired[int] 

-

2241 into: NotRequired[ 

-

2242 Annotated[ 

-

2243 Union[str, List[str]], 

-

2244 DebputyParseHint.required_when_multi_binary(package_type="deb"), 

-

2245 ] 

-

2246 ] 

-

2247 

-

2248 

-

2249class ParsedInstallDiscardRuleSourceFormat(DebputyParsedContent): 

-

2250 paths: NotRequired[List[FileSystemMatchRule]] 

-

2251 path: NotRequired[ 

-

2252 Annotated[FileSystemMatchRule, DebputyParseHint.target_attribute("paths")] 

-

2253 ] 

-

2254 search_dir: NotRequired[ 

-

2255 Annotated[ 

-

2256 FileSystemExactMatchRule, DebputyParseHint.target_attribute("search_dirs") 

-

2257 ] 

-

2258 ] 

-

2259 search_dirs: NotRequired[List[FileSystemExactMatchRule]] 

-

2260 required_when: NotRequired[ManifestCondition] 

-

2261 

-

2262 

-

2263class ParsedInstallDiscardRule(DebputyParsedContent): 

-

2264 paths: List[FileSystemMatchRule] 

-

2265 search_dirs: NotRequired[List[FileSystemExactMatchRule]] 

-

2266 required_when: NotRequired[ManifestCondition] 

-

2267 

-

2268 

-

2269class DpkgConffileManagementRuleBase(DebputyParsedContent): 

-

2270 prior_to_version: NotRequired[str] 

-

2271 owning_package: NotRequired[str] 

-

2272 

-

2273 

-

2274class DpkgRenameConffileRule(DpkgConffileManagementRuleBase): 

-

2275 source: str 

-

2276 target: str 

-

2277 

-

2278 

-

2279class DpkgRemoveConffileRule(DpkgConffileManagementRuleBase): 

-

2280 path: str 

-

2281 

-

2282 

-

2283class MCAnyOfAllOf(DebputyParsedContent): 

-

2284 conditions: List[ManifestCondition] 

-

2285 

-

2286 

-

2287class MCNot(DebputyParsedContent): 

-

2288 negated_condition: Annotated[ 

-

2289 ManifestCondition, DebputyParseHint.manifest_attribute("not") 

-

2290 ] 

-

2291 

-

2292 

-

2293class MCArchMatches(DebputyParsedContent): 

-

2294 arch_matches: str 

-

2295 

-

2296 

-

2297class MCBuildProfileMatches(DebputyParsedContent): 

-

2298 build_profile_matches: str 

-

2299 

-

2300 

-

2301def _parse_filename( 

-

2302 filename: str, 

-

2303 attribute_path: AttributePath, 

-

2304 *, 

-

2305 allow_directories: bool = True, 

-

2306) -> str: 

-

2307 try: 

-

2308 normalized_path = _normalize_path(filename, with_prefix=False) 

-

2309 except ValueError as e: 

-

2310 raise ManifestParseException( 

-

2311 f'Error parsing the path "{filename}" defined in {attribute_path.path}: {e.args[0]}' 

-

2312 ) from None 

-

2313 if not allow_directories and filename.endswith("/"): 2313 ↛ 2314line 2313 didn't jump to line 2314, because the condition on line 2313 was never true

-

2314 raise ManifestParseException( 

-

2315 f'The path "{filename}" in {attribute_path.path} ends with "/" implying it is a directory,' 

-

2316 f" but this feature can only be used for files" 

-

2317 ) 

-

2318 if normalized_path == ".": 2318 ↛ 2319line 2318 didn't jump to line 2319, because the condition on line 2318 was never true

-

2319 raise ManifestParseException( 

-

2320 f'The path "{filename}" in {attribute_path.path} looks like the root directory,' 

-

2321 f" but this feature does not allow the root directory here." 

-

2322 ) 

-

2323 return normalized_path 

-

2324 

-

2325 

-

2326def _with_alt_form(t: Type[TypedDict]): 

-

2327 return Union[ 

-

2328 t, 

-

2329 List[str], 

-

2330 str, 

-

2331 ] 

-

2332 

-

2333 

-

2334def _dpkg_conffile_rename( 

-

2335 _name: str, 

-

2336 parsed_data: DpkgRenameConffileRule, 

-

2337 path: AttributePath, 

-

2338 _context: ParserContextData, 

-

2339) -> DpkgMaintscriptHelperCommand: 

-

2340 source_file = parsed_data["source"] 

-

2341 target_file = parsed_data["target"] 

-

2342 normalized_source = _parse_filename( 

-

2343 source_file, 

-

2344 path["source"], 

-

2345 allow_directories=False, 

-

2346 ) 

-

2347 path.path_hint = source_file 

-

2348 

-

2349 normalized_target = _parse_filename( 

-

2350 target_file, 

-

2351 path["target"], 

-

2352 allow_directories=False, 

-

2353 ) 

-

2354 normalized_source = "/" + normalized_source 

-

2355 normalized_target = "/" + normalized_target 

-

2356 

-

2357 if normalized_source == normalized_target: 2357 ↛ 2358line 2357 didn't jump to line 2358, because the condition on line 2357 was never true

-

2358 raise ManifestParseException( 

-

2359 f"Invalid rename defined in {path.path}: The source and target path are the same!" 

-

2360 ) 

-

2361 

-

2362 version, owning_package = _parse_conffile_prior_version_and_owning_package( 

-

2363 parsed_data, path 

-

2364 ) 

-

2365 return DpkgMaintscriptHelperCommand.mv_conffile( 

-

2366 path, 

-

2367 normalized_source, 

-

2368 normalized_target, 

-

2369 version, 

-

2370 owning_package, 

-

2371 ) 

-

2372 

-

2373 

-

2374def _dpkg_conffile_remove( 

-

2375 _name: str, 

-

2376 parsed_data: DpkgRemoveConffileRule, 

-

2377 path: AttributePath, 

-

2378 _context: ParserContextData, 

-

2379) -> DpkgMaintscriptHelperCommand: 

-

2380 source_file = parsed_data["path"] 

-

2381 normalized_source = _parse_filename( 

-

2382 source_file, 

-

2383 path["path"], 

-

2384 allow_directories=False, 

-

2385 ) 

-

2386 path.path_hint = source_file 

-

2387 

-

2388 normalized_source = "/" + normalized_source 

-

2389 

-

2390 version, owning_package = _parse_conffile_prior_version_and_owning_package( 

-

2391 parsed_data, path 

-

2392 ) 

-

2393 return DpkgMaintscriptHelperCommand.rm_conffile( 

-

2394 path, 

-

2395 normalized_source, 

-

2396 version, 

-

2397 owning_package, 

-

2398 ) 

-

2399 

-

2400 

-

2401def _parse_conffile_prior_version_and_owning_package( 

-

2402 d: DpkgConffileManagementRuleBase, 

-

2403 attribute_path: AttributePath, 

-

2404) -> Tuple[Optional[str], Optional[str]]: 

-

2405 prior_version = d.get("prior_to_version") 

-

2406 owning_package = d.get("owning_package") 

-

2407 

-

2408 if prior_version is not None and not PKGVERSION_REGEX.match(prior_version): 2408 ↛ 2409line 2408 didn't jump to line 2409, because the condition on line 2408 was never true

-

2409 p = attribute_path["prior_to_version"] 

-

2410 raise ManifestParseException( 

-

2411 f"The {MK_CONFFILE_MANAGEMENT_X_PRIOR_TO_VERSION} parameter in {p.path} must be a" 

-

2412 r" valid package version (i.e., match (?:\d+:)?\d[0-9A-Za-z.+:~]*(?:-[0-9A-Za-z.+:~]+)*)." 

-

2413 ) 

-

2414 

-

2415 if owning_package is not None and not PKGNAME_REGEX.match(owning_package): 2415 ↛ 2416line 2415 didn't jump to line 2416, because the condition on line 2415 was never true

-

2416 p = attribute_path["owning_package"] 

-

2417 raise ManifestParseException( 

-

2418 f"The {MK_CONFFILE_MANAGEMENT_X_OWNING_PACKAGE} parameter in {p.path} must be a valid" 

-

2419 f" package name (i.e., match {PKGNAME_REGEX.pattern})." 

-

2420 ) 

-

2421 

-

2422 return prior_version, owning_package 

-

2423 

-

2424 

-

2425def _install_rule_handler( 

-

2426 _name: str, 

-

2427 parsed_data: ParsedInstallRule, 

-

2428 path: AttributePath, 

-

2429 context: ParserContextData, 

-

2430) -> InstallRule: 

-

2431 sources = parsed_data["sources"] 

-

2432 install_as = parsed_data.get("install_as") 

-

2433 into = parsed_data.get("into") 

-

2434 dest_dir = parsed_data.get("dest_dir") 

-

2435 condition = parsed_data.get("when") 

-

2436 if not into: 

-

2437 into = [context.single_binary_package(path, package_attribute="into")] 

-

2438 into = frozenset(into) 

-

2439 if install_as is not None: 

-

2440 assert len(sources) == 1 

-

2441 assert dest_dir is None 

-

2442 return InstallRule.install_as( 

-

2443 sources[0], 

-

2444 install_as.match_rule.path, 

-

2445 into, 

-

2446 path.path, 

-

2447 condition, 

-

2448 ) 

-

2449 return InstallRule.install_dest( 

-

2450 sources, 

-

2451 dest_dir.match_rule.path if dest_dir is not None else None, 

-

2452 into, 

-

2453 path.path, 

-

2454 condition, 

-

2455 ) 

-

2456 

-

2457 

-

2458def _multi_dest_install_rule_handler( 

-

2459 _name: str, 

-

2460 parsed_data: ParsedMultiDestInstallRule, 

-

2461 path: AttributePath, 

-

2462 context: ParserContextData, 

-

2463) -> InstallRule: 

-

2464 sources = parsed_data["sources"] 

-

2465 install_as = parsed_data.get("install_as") 

-

2466 into = parsed_data.get("into") 

-

2467 dest_dirs = parsed_data.get("dest_dirs") 

-

2468 condition = parsed_data.get("when") 

-

2469 if not into: 2469 ↛ 2471line 2469 didn't jump to line 2471, because the condition on line 2469 was never false

-

2470 into = [context.single_binary_package(path, package_attribute="into")] 

-

2471 into = frozenset(into) 

-

2472 if install_as is not None: 

-

2473 assert len(sources) == 1 

-

2474 assert dest_dirs is None 

-

2475 if len(install_as) < 2: 2475 ↛ 2476line 2475 didn't jump to line 2476, because the condition on line 2475 was never true

-

2476 raise ManifestParseException( 

-

2477 f"The {path['install_as'].path} attribute must contain at least two paths." 

-

2478 ) 

-

2479 return InstallRule.install_multi_as( 

-

2480 sources[0], 

-

2481 [p.match_rule.path for p in install_as], 

-

2482 into, 

-

2483 path.path, 

-

2484 condition, 

-

2485 ) 

-

2486 if dest_dirs is None: 2486 ↛ 2487line 2486 didn't jump to line 2487, because the condition on line 2486 was never true

-

2487 raise ManifestParseException( 

-

2488 f"Either the `as` or the `dest-dirs` key must be provided at {path.path}" 

-

2489 ) 

-

2490 if len(dest_dirs) < 2: 2490 ↛ 2491line 2490 didn't jump to line 2491, because the condition on line 2490 was never true

-

2491 raise ManifestParseException( 

-

2492 f"The {path['dest_dirs'].path} attribute must contain at least two paths." 

-

2493 ) 

-

2494 return InstallRule.install_multi_dest( 

-

2495 sources, 

-

2496 [dd.match_rule.path for dd in dest_dirs], 

-

2497 into, 

-

2498 path.path, 

-

2499 condition, 

-

2500 ) 

-

2501 

-

2502 

-

2503def _install_docs_rule_handler( 

-

2504 _name: str, 

-

2505 parsed_data: ParsedInstallRule, 

-

2506 path: AttributePath, 

-

2507 context: ParserContextData, 

-

2508) -> InstallRule: 

-

2509 sources = parsed_data["sources"] 

-

2510 install_as = parsed_data.get("install_as") 

-

2511 into = parsed_data.get("into") 

-

2512 dest_dir = parsed_data.get("dest_dir") 

-

2513 condition = parsed_data.get("when") 

-

2514 if not into: 2514 ↛ 2520line 2514 didn't jump to line 2520, because the condition on line 2514 was never false

-

2515 into = [ 

-

2516 context.single_binary_package( 

-

2517 path, package_type="deb", package_attribute="into" 

-

2518 ) 

-

2519 ] 

-

2520 into = frozenset(into) 

-

2521 if install_as is not None: 2521 ↛ 2522line 2521 didn't jump to line 2522, because the condition on line 2521 was never true

-

2522 assert len(sources) == 1 

-

2523 assert dest_dir is None 

-

2524 return InstallRule.install_doc_as( 

-

2525 sources[0], 

-

2526 install_as.match_rule.path, 

-

2527 into, 

-

2528 path.path, 

-

2529 condition, 

-

2530 ) 

-

2531 return InstallRule.install_doc( 

-

2532 sources, 

-

2533 dest_dir, 

-

2534 into, 

-

2535 path.path, 

-

2536 condition, 

-

2537 ) 

-

2538 

-

2539 

-

2540def _install_examples_rule_handler( 

-

2541 _name: str, 

-

2542 parsed_data: ParsedInstallExamplesRule, 

-

2543 path: AttributePath, 

-

2544 context: ParserContextData, 

-

2545) -> InstallRule: 

-

2546 sources = parsed_data["sources"] 

-

2547 into = parsed_data.get("into") 

-

2548 if not into: 2548 ↛ 2554line 2548 didn't jump to line 2554, because the condition on line 2548 was never false

-

2549 into = [ 

-

2550 context.single_binary_package( 

-

2551 path, package_type="deb", package_attribute="into" 

-

2552 ) 

-

2553 ] 

-

2554 condition = parsed_data.get("when") 

-

2555 into = frozenset(into) 

-

2556 return InstallRule.install_examples( 

-

2557 sources, 

-

2558 into, 

-

2559 path.path, 

-

2560 condition, 

-

2561 ) 

-

2562 

-

2563 

-

2564def _install_man_rule_handler( 

-

2565 _name: str, 

-

2566 parsed_data: ParsedInstallManpageRule, 

-

2567 attribute_path: AttributePath, 

-

2568 context: ParserContextData, 

-

2569) -> InstallRule: 

-

2570 sources = parsed_data["sources"] 

-

2571 language = parsed_data.get("language") 

-

2572 section = parsed_data.get("section") 

-

2573 

-

2574 if language is not None: 

-

2575 is_lang_ok = language in ( 

-

2576 "C", 

-

2577 "derive-from-basename", 

-

2578 "derive-from-path", 

-

2579 ) 

-

2580 

-

2581 if not is_lang_ok and len(language) == 2 and language.islower(): 2581 ↛ 2582line 2581 didn't jump to line 2582, because the condition on line 2581 was never true

-

2582 is_lang_ok = True 

-

2583 

-

2584 if ( 2584 ↛ 2591line 2584 didn't jump to line 2591

-

2585 not is_lang_ok 

-

2586 and len(language) == 5 

-

2587 and language[2] == "_" 

-

2588 and language[:2].islower() 

-

2589 and language[3:].isupper() 

-

2590 ): 

-

2591 is_lang_ok = True 

-

2592 

-

2593 if not is_lang_ok: 2593 ↛ 2594line 2593 didn't jump to line 2594, because the condition on line 2593 was never true

-

2594 raise ManifestParseException( 

-

2595 f'The language attribute must in a 2-letter language code ("de"), a 5-letter language + dialect' 

-

2596 f' code ("pt_BR"), "derive-from-basename", "derive-from-path", or omitted. The problematic' 

-

2597 f' definition is {attribute_path["language"]}' 

-

2598 ) 

-

2599 

-

2600 if section is not None and (section < 1 or section > 10): 2600 ↛ 2601line 2600 didn't jump to line 2601, because the condition on line 2600 was never true

-

2601 raise ManifestParseException( 

-

2602 f"The section attribute must in the range [1-9] or omitted. The problematic definition is" 

-

2603 f' {attribute_path["section"]}' 

-

2604 ) 

-

2605 if section is None and any(s.raw_match_rule.endswith(".gz") for s in sources): 2605 ↛ 2606line 2605 didn't jump to line 2606, because the condition on line 2605 was never true

-

2606 raise ManifestParseException( 

-

2607 "Sorry, compressed man pages are not supported without an explicit `section` definition at the moment." 

-

2608 " This limitation may be removed in the future. Problematic definition from" 

-

2609 f' {attribute_path["sources"]}' 

-

2610 ) 

-

2611 if any(s.raw_match_rule.endswith("/") for s in sources): 2611 ↛ 2612line 2611 didn't jump to line 2612, because the condition on line 2611 was never true

-

2612 raise ManifestParseException( 

-

2613 'The install-man rule can only match non-directories. Therefore, none of the sources can end with "/".' 

-

2614 " as that implies the source is for a directory. Problematic definition from" 

-

2615 f' {attribute_path["sources"]}' 

-

2616 ) 

-

2617 into = parsed_data.get("into") 

-

2618 if not into: 2618 ↛ 2624line 2618 didn't jump to line 2624, because the condition on line 2618 was never false

-

2619 into = [ 

-

2620 context.single_binary_package( 

-

2621 attribute_path, package_type="deb", package_attribute="into" 

-

2622 ) 

-

2623 ] 

-

2624 condition = parsed_data.get("when") 

-

2625 into = frozenset(into) 

-

2626 return InstallRule.install_man( 

-

2627 sources, 

-

2628 into, 

-

2629 section, 

-

2630 language, 

-

2631 attribute_path.path, 

-

2632 condition, 

-

2633 ) 

-

2634 

-

2635 

-

2636def _install_discard_rule_handler( 

-

2637 _name: str, 

-

2638 parsed_data: ParsedInstallDiscardRule, 

-

2639 path: AttributePath, 

-

2640 _context: ParserContextData, 

-

2641) -> InstallRule: 

-

2642 limit_to = parsed_data.get("search_dirs") 

-

2643 if limit_to is not None and not limit_to: 2643 ↛ 2644line 2643 didn't jump to line 2644, because the condition on line 2643 was never true

-

2644 p = path["search_dirs"] 

-

2645 raise ManifestParseException(f"The {p.path} attribute must not be empty.") 

-

2646 condition = parsed_data.get("required_when") 

-

2647 return InstallRule.discard_paths( 

-

2648 parsed_data["paths"], 

-

2649 path.path, 

-

2650 condition, 

-

2651 limit_to=limit_to, 

-

2652 ) 

-

2653 

-

2654 

-

2655def _transformation_move_handler( 

-

2656 _name: str, 

-

2657 parsed_data: TransformationMoveRuleSpec, 

-

2658 path: AttributePath, 

-

2659 _context: ParserContextData, 

-

2660) -> TransformationRule: 

-

2661 source_match = parsed_data["source"] 

-

2662 target_path = parsed_data["target"].match_rule.path 

-

2663 condition = parsed_data.get("when") 

-

2664 

-

2665 if ( 2665 ↛ 2669line 2665 didn't jump to line 2669

-

2666 isinstance(source_match, ExactFileSystemPath) 

-

2667 and source_match.path == target_path 

-

2668 ): 

-

2669 raise ManifestParseException( 

-

2670 f"The transformation rule {path.path} requests a move of {source_match} to" 

-

2671 f" {target_path}, which is the same path" 

-

2672 ) 

-

2673 return MoveTransformationRule( 

-

2674 source_match.match_rule, 

-

2675 target_path, 

-

2676 target_path.endswith("/"), 

-

2677 path, 

-

2678 condition, 

-

2679 ) 

-

2680 

-

2681 

-

2682def _transformation_remove_handler( 

-

2683 _name: str, 

-

2684 parsed_data: TransformationRemoveRuleSpec, 

-

2685 attribute_path: AttributePath, 

-

2686 _context: ParserContextData, 

-

2687) -> TransformationRule: 

-

2688 paths = parsed_data["paths"] 

-

2689 keep_empty_parent_dirs = parsed_data.get("keep_empty_parent_dirs", False) 

-

2690 

-

2691 return RemoveTransformationRule( 

-

2692 [m.match_rule for m in paths], 

-

2693 keep_empty_parent_dirs, 

-

2694 attribute_path, 

-

2695 ) 

-

2696 

-

2697 

-

2698def _transformation_create_symlink( 

-

2699 _name: str, 

-

2700 parsed_data: CreateSymlinkRule, 

-

2701 attribute_path: AttributePath, 

-

2702 _context: ParserContextData, 

-

2703) -> TransformationRule: 

-

2704 link_dest = parsed_data["path"].match_rule.path 

-

2705 replacement_rule: CreateSymlinkReplacementRule = parsed_data.get( 

-

2706 "replacement_rule", 

-

2707 "abort-on-non-empty-directory", 

-

2708 ) 

-

2709 try: 

-

2710 link_target = debian_policy_normalize_symlink_target( 

-

2711 link_dest, 

-

2712 parsed_data["target"].symlink_target, 

-

2713 ) 

-

2714 except ValueError as e: # pragma: no cover 

-

2715 raise AssertionError( 

-

2716 "Debian Policy normalization should not raise ValueError here" 

-

2717 ) from e 

-

2718 

-

2719 condition = parsed_data.get("when") 

-

2720 

-

2721 return CreateSymlinkPathTransformationRule( 

-

2722 link_target, 

-

2723 link_dest, 

-

2724 replacement_rule, 

-

2725 attribute_path, 

-

2726 condition, 

-

2727 ) 

-

2728 

-

2729 

-

2730def _transformation_path_metadata( 

-

2731 _name: str, 

-

2732 parsed_data: PathManifestRule, 

-

2733 attribute_path: AttributePath, 

-

2734 _context: ParserContextData, 

-

2735) -> TransformationRule: 

-

2736 match_rules = parsed_data["paths"] 

-

2737 owner = parsed_data.get("owner") 

-

2738 group = parsed_data.get("group") 

-

2739 mode = parsed_data.get("mode") 

-

2740 recursive = parsed_data.get("recursive", False) 

-

2741 capabilities = parsed_data.get("capabilities") 

-

2742 capability_mode = parsed_data.get("capability_mode") 

-

2743 

-

2744 if capabilities is not None: 2744 ↛ 2745line 2744 didn't jump to line 2745, because the condition on line 2744 was never true

-

2745 if capability_mode is None: 

-

2746 capability_mode = SymbolicMode.parse_filesystem_mode( 

-

2747 "a-s", 

-

2748 attribute_path["capability-mode"], 

-

2749 ) 

-

2750 validate_cap = check_cap_checker() 

-

2751 validate_cap(capabilities, attribute_path["capabilities"].path) 

-

2752 elif capability_mode is not None and capabilities is None: 2752 ↛ 2753line 2752 didn't jump to line 2753, because the condition on line 2752 was never true

-

2753 raise ManifestParseException( 

-

2754 "The attribute capability-mode cannot be provided without capabilities" 

-

2755 f" in {attribute_path.path}" 

-

2756 ) 

-

2757 if owner is None and group is None and mode is None and capabilities is None: 2757 ↛ 2758line 2757 didn't jump to line 2758, because the condition on line 2757 was never true

-

2758 raise ManifestParseException( 

-

2759 "At least one of owner, group, mode, or capabilities must be provided" 

-

2760 f" in {attribute_path.path}" 

-

2761 ) 

-

2762 condition = parsed_data.get("when") 

-

2763 

-

2764 return PathMetadataTransformationRule( 

-

2765 [m.match_rule for m in match_rules], 

-

2766 owner, 

-

2767 group, 

-

2768 mode, 

-

2769 recursive, 

-

2770 capabilities, 

-

2771 capability_mode, 

-

2772 attribute_path.path, 

-

2773 condition, 

-

2774 ) 

-

2775 

-

2776 

-

2777def _transformation_mkdirs( 

-

2778 _name: str, 

-

2779 parsed_data: EnsureDirectoryRule, 

-

2780 attribute_path: AttributePath, 

-

2781 _context: ParserContextData, 

-

2782) -> TransformationRule: 

-

2783 provided_paths = parsed_data["paths"] 

-

2784 owner = parsed_data.get("owner") 

-

2785 group = parsed_data.get("group") 

-

2786 mode = parsed_data.get("mode") 

-

2787 

-

2788 condition = parsed_data.get("when") 

-

2789 

-

2790 return CreateDirectoryTransformationRule( 

-

2791 [p.match_rule.path for p in provided_paths], 

-

2792 owner, 

-

2793 group, 

-

2794 mode, 

-

2795 attribute_path.path, 

-

2796 condition, 

-

2797 ) 

-

2798 

-

2799 

-

2800def _at_least_two( 

-

2801 content: List[Any], 

-

2802 attribute_path: AttributePath, 

-

2803 attribute_name: str, 

-

2804) -> None: 

-

2805 if len(content) < 2: 2805 ↛ 2806line 2805 didn't jump to line 2806, because the condition on line 2805 was never true

-

2806 raise ManifestParseException( 

-

2807 f"Must have at least two conditions in {attribute_path[attribute_name].path}" 

-

2808 ) 

-

2809 

-

2810 

-

2811def _mc_any_of( 

-

2812 name: str, 

-

2813 parsed_data: MCAnyOfAllOf, 

-

2814 attribute_path: AttributePath, 

-

2815 _context: ParserContextData, 

-

2816) -> ManifestCondition: 

-

2817 conditions = parsed_data["conditions"] 

-

2818 _at_least_two(conditions, attribute_path, "conditions") 

-

2819 if name == "any-of": 2819 ↛ 2820line 2819 didn't jump to line 2820, because the condition on line 2819 was never true

-

2820 return ManifestCondition.any_of(conditions) 

-

2821 assert name == "all-of" 

-

2822 return ManifestCondition.all_of(conditions) 

-

2823 

-

2824 

-

2825def _mc_not( 

-

2826 _name: str, 

-

2827 parsed_data: MCNot, 

-

2828 _attribute_path: AttributePath, 

-

2829 _context: ParserContextData, 

-

2830) -> ManifestCondition: 

-

2831 condition = parsed_data["negated_condition"] 

-

2832 return condition.negated() 

-

2833 

-

2834 

-

2835def _extract_arch_matches( 

-

2836 parsed_data: MCArchMatches, 

-

2837 attribute_path: AttributePath, 

-

2838) -> List[str]: 

-

2839 arch_matches_as_str = parsed_data["arch_matches"] 

-

2840 # Can we check arch list for typos? If we do, it must be tight in how close matches it does. 

-

2841 # Consider "arm" vs. "armel" (edit distance 2, but both are valid). Likewise, names often 

-

2842 # include a bit indicator "foo", "foo32", "foo64" - all of these have an edit distance of 2 

-

2843 # of each other. 

-

2844 arch_matches_as_list = arch_matches_as_str.split() 

-

2845 attr_path = attribute_path["arch_matches"] 

-

2846 if not arch_matches_as_list: 2846 ↛ 2847line 2846 didn't jump to line 2847, because the condition on line 2846 was never true

-

2847 raise ManifestParseException( 

-

2848 f"The condition at {attr_path.path} must not be empty" 

-

2849 ) 

-

2850 

-

2851 if arch_matches_as_list[0].startswith("[") or arch_matches_as_list[-1].endswith( 2851 ↛ 2854line 2851 didn't jump to line 2854, because the condition on line 2851 was never true

-

2852 "]" 

-

2853 ): 

-

2854 raise ManifestParseException( 

-

2855 f"The architecture match at {attr_path.path} must be defined without enclosing it with " 

-

2856 '"[" or/and "]" brackets' 

-

2857 ) 

-

2858 return arch_matches_as_list 

-

2859 

-

2860 

-

2861def _mc_source_context_arch_matches( 

-

2862 _name: str, 

-

2863 parsed_data: MCArchMatches, 

-

2864 attribute_path: AttributePath, 

-

2865 _context: ParserContextData, 

-

2866) -> ManifestCondition: 

-

2867 arch_matches = _extract_arch_matches(parsed_data, attribute_path) 

-

2868 return SourceContextArchMatchManifestCondition(arch_matches) 

-

2869 

-

2870 

-

2871def _mc_package_context_arch_matches( 

-

2872 name: str, 

-

2873 parsed_data: MCArchMatches, 

-

2874 attribute_path: AttributePath, 

-

2875 context: ParserContextData, 

-

2876) -> ManifestCondition: 

-

2877 arch_matches = _extract_arch_matches(parsed_data, attribute_path) 

-

2878 

-

2879 if not context.is_in_binary_package_state: 

-

2880 raise ManifestParseException( 

-

2881 f'The condition "{name}" at {attribute_path.path} can only be used in the context of a binary package.' 

-

2882 ) 

-

2883 

-

2884 package_state = context.current_binary_package_state 

-

2885 if package_state.binary_package.is_arch_all: 

-

2886 result = context.dpkg_arch_query_table.architecture_is_concerned( 

-

2887 "all", arch_matches 

-

2888 ) 

-

2889 attr_path = attribute_path["arch_matches"] 

-

2890 raise ManifestParseException( 

-

2891 f"The package architecture restriction at {attr_path.path} is applied to the" 

-

2892 f' "Architecture: all" package {package_state.binary_package.name}, which does not make sense' 

-

2893 f" as the condition will always resolves to `{str(result).lower()}`." 

-

2894 f" If you **really** need an architecture specific constraint for this rule, consider using" 

-

2895 f' "source-context-arch-matches" instead. However, this is a very rare use-case!' 

-

2896 ) 

-

2897 return BinaryPackageContextArchMatchManifestCondition(arch_matches) 

-

2898 

-

2899 

-

2900def _mc_arch_matches( 

-

2901 name: str, 

-

2902 parsed_data: MCArchMatches, 

-

2903 attribute_path: AttributePath, 

-

2904 context: ParserContextData, 

-

2905) -> ManifestCondition: 

-

2906 if context.is_in_binary_package_state: 2906 ↛ 2907line 2906 didn't jump to line 2907, because the condition on line 2906 was never true

-

2907 return _mc_package_context_arch_matches( 

-

2908 name, parsed_data, attribute_path, context 

-

2909 ) 

-

2910 return _mc_source_context_arch_matches(name, parsed_data, attribute_path, context) 

-

2911 

-

2912 

-

2913def _mc_build_profile_matches( 

-

2914 _name: str, 

-

2915 parsed_data: MCBuildProfileMatches, 

-

2916 attribute_path: AttributePath, 

-

2917 _context: ParserContextData, 

-

2918) -> ManifestCondition: 

-

2919 build_profile_spec = parsed_data["build_profile_matches"].strip() 

-

2920 attr_path = attribute_path["build_profile_matches"] 

-

2921 if not build_profile_spec: 2921 ↛ 2922line 2921 didn't jump to line 2922, because the condition on line 2921 was never true

-

2922 raise ManifestParseException( 

-

2923 f"The condition at {attr_path.path} must not be empty" 

-

2924 ) 

-

2925 try: 

-

2926 active_profiles_match(build_profile_spec, frozenset()) 

-

2927 except ValueError as e: 

-

2928 raise ManifestParseException( 

-

2929 f"Could not parse the build specification at {attr_path.path}: {e.args[0]}" 

-

2930 ) 

-

2931 return BuildProfileMatch(build_profile_spec) 

-
- - - diff --git a/coverage-report/d_d5d6843b45eec01e_service_management_py.html b/coverage-report/d_d5d6843b45eec01e_service_management_py.html deleted file mode 100644 index 7281c92..0000000 --- a/coverage-report/d_d5d6843b45eec01e_service_management_py.html +++ /dev/null @@ -1,549 +0,0 @@ - - - - - Coverage for src/debputy/plugin/debputy/service_management.py: 82% - - - - - -
-
-

- Coverage for src/debputy/plugin/debputy/service_management.py: - 82% -

- -

- 163 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import collections 

-

2import dataclasses 

-

3import os 

-

4import textwrap 

-

5from typing import Dict, List, Literal, Iterable, Sequence 

-

6 

-

7from debputy.packages import BinaryPackage 

-

8from debputy.plugin.api.spec import ( 

-

9 ServiceRegistry, 

-

10 VirtualPath, 

-

11 PackageProcessingContext, 

-

12 BinaryCtrlAccessor, 

-

13 ServiceDefinition, 

-

14) 

-

15from debputy.util import _error, assume_not_none 

-

16 

-

17DPKG_ROOT = '"${DPKG_ROOT}"' 

-

18EMPTY_DPKG_ROOT_CONDITION = '[ -z "${DPKG_ROOT}" ]' 

-

19SERVICE_MANAGER_IS_SYSTEMD_CONDITION = "[ -d /run/systemd/system ]" 

-

20 

-

21 

-

22@dataclasses.dataclass(slots=True) 

-

23class SystemdServiceContext: 

-

24 had_install_section: bool 

-

25 

-

26 

-

27@dataclasses.dataclass(slots=True) 

-

28class SystemdUnit: 

-

29 path: VirtualPath 

-

30 names: List[str] 

-

31 type_of_service: str 

-

32 service_scope: str 

-

33 enable_by_default: bool 

-

34 start_by_default: bool 

-

35 had_install_section: bool 

-

36 

-

37 

-

38def detect_systemd_service_files( 

-

39 fs_root: VirtualPath, 

-

40 service_registry: ServiceRegistry[SystemdServiceContext], 

-

41 context: PackageProcessingContext, 

-

42) -> None: 

-

43 pkg = context.binary_package 

-

44 systemd_units = _find_and_analyze_systemd_service_files(pkg, fs_root, "system") 

-

45 for unit in systemd_units: 

-

46 service_registry.register_service( 

-

47 unit.path, 

-

48 unit.names, 

-

49 type_of_service=unit.type_of_service, 

-

50 service_scope=unit.service_scope, 

-

51 enable_by_default=unit.enable_by_default, 

-

52 start_by_default=unit.start_by_default, 

-

53 default_upgrade_rule="restart" if unit.start_by_default else "do-nothing", 

-

54 service_context=SystemdServiceContext( 

-

55 unit.had_install_section, 

-

56 ), 

-

57 ) 

-

58 

-

59 

-

60def generate_snippets_for_systemd_units( 

-

61 services: Sequence[ServiceDefinition[SystemdServiceContext]], 

-

62 ctrl: BinaryCtrlAccessor, 

-

63 _context: PackageProcessingContext, 

-

64) -> None: 

-

65 stop_before_upgrade: List[str] = [] 

-

66 stop_then_start_scripts = [] 

-

67 on_purge = [] 

-

68 start_on_install = [] 

-

69 action_on_upgrade = collections.defaultdict(list) 

-

70 assert services 

-

71 

-

72 for service_def in services: 

-

73 if service_def.auto_enable_on_install: 

-

74 template = """\ 

-

75 if deb-systemd-helper debian-installed {UNITFILE}; then 

-

76 # The following line should be removed in trixie or trixie+1 

-

77 deb-systemd-helper unmask {UNITFILE} >/dev/null || true 

-

78 

-

79 if deb-systemd-helper --quiet was-enabled {UNITFILE}; then 

-

80 # Create new symlinks, if any. 

-

81 deb-systemd-helper enable {UNITFILE} >/dev/null || true 

-

82 fi 

-

83 fi 

-

84 

-

85 # Update the statefile to add new symlinks (if any), which need to be cleaned 

-

86 # up on purge. Also remove old symlinks. 

-

87 deb-systemd-helper update-state {UNITFILE} >/dev/null || true 

-

88 """ 

-

89 else: 

-

90 template = """\ 

-

91 # The following line should be removed in trixie or trixie+1 

-

92 deb-systemd-helper unmask {UNITFILE} >/dev/null || true 

-

93 

-

94 # was-enabled defaults to true, so new installations run enable. 

-

95 if deb-systemd-helper --quiet was-enabled {UNITFILE}; then 

-

96 # Enables the unit on first installation, creates new 

-

97 # symlinks on upgrades if the unit file has changed. 

-

98 deb-systemd-helper enable {UNITFILE} >/dev/null || true 

-

99 else 

-

100 # Update the statefile to add new symlinks (if any), which need to be 

-

101 # cleaned up on purge. Also remove old symlinks. 

-

102 deb-systemd-helper update-state {UNITFILE} >/dev/null || true 

-

103 fi 

-

104 """ 

-

105 service_name = service_def.name 

-

106 

-

107 if assume_not_none(service_def.service_context).had_install_section: 

-

108 ctrl.maintscript.on_configure( 

-

109 template.format( 

-

110 UNITFILE=ctrl.maintscript.escape_shell_words(service_name), 

-

111 ) 

-

112 ) 

-

113 on_purge.append(service_name) 

-

114 elif service_def.auto_enable_on_install: 114 ↛ 115line 114 didn't jump to line 115, because the condition on line 114 was never true

-

115 _error( 

-

116 f'The service "{service_name}" cannot be enabled under "systemd" as' 

-

117 f' it has no "[Install]" section. Please correct {service_def.definition_source}' 

-

118 f' so that it does not enable the service or does not apply to "systemd"' 

-

119 ) 

-

120 

-

121 if service_def.auto_start_on_install: 121 ↛ 123line 121 didn't jump to line 123, because the condition on line 121 was never false

-

122 start_on_install.append(service_name) 

-

123 if service_def.on_upgrade == "stop-then-start": 123 ↛ 124line 123 didn't jump to line 124, because the condition on line 123 was never true

-

124 stop_then_start_scripts.append(service_name) 

-

125 elif service_def.on_upgrade in ("restart", "reload"): 125 ↛ 128line 125 didn't jump to line 128, because the condition on line 125 was never false

-

126 action: str = service_def.on_upgrade 

-

127 action_on_upgrade[action].append(service_name) 

-

128 elif service_def.on_upgrade != "do-nothing": 

-

129 raise AssertionError( 

-

130 f"Missing support for on_upgrade rule: {service_def.on_upgrade}" 

-

131 ) 

-

132 

-

133 if start_on_install or action_on_upgrade: 133 ↛ 170line 133 didn't jump to line 170, because the condition on line 133 was never false

-

134 lines = [ 

-

135 "if {EMPTY_DPKG_ROOT_CONDITION} && {SERVICE_MANAGER_IS_SYSTEMD_CONDITION}; then".format( 

-

136 EMPTY_DPKG_ROOT_CONDITION=EMPTY_DPKG_ROOT_CONDITION, 

-

137 SERVICE_MANAGER_IS_SYSTEMD_CONDITION=SERVICE_MANAGER_IS_SYSTEMD_CONDITION, 

-

138 ), 

-

139 " systemctl --system daemon-reload >/dev/null || true", 

-

140 ] 

-

141 if stop_then_start_scripts: 141 ↛ 142line 141 didn't jump to line 142, because the condition on line 141 was never true

-

142 unit_files = ctrl.maintscript.escape_shell_words(*stop_then_start_scripts) 

-

143 lines.append( 

-

144 " deb-systemd-invoke start {UNITFILES} >/dev/null || true".format( 

-

145 UNITFILES=unit_files, 

-

146 ) 

-

147 ) 

-

148 if start_on_install: 148 ↛ 156line 148 didn't jump to line 156, because the condition on line 148 was never false

-

149 lines.append(' if [ -z "$2" ]; then') 

-

150 lines.append( 

-

151 " deb-systemd-invoke start {UNITFILES} >/dev/null || true".format( 

-

152 UNITFILES=ctrl.maintscript.escape_shell_words(*start_on_install), 

-

153 ) 

-

154 ) 

-

155 lines.append(" fi") 

-

156 if action_on_upgrade: 156 ↛ 166line 156 didn't jump to line 166, because the condition on line 156 was never false

-

157 lines.append(' if [ -n "$2" ]; then') 

-

158 for action, units in action_on_upgrade.items(): 

-

159 lines.append( 

-

160 " deb-systemd-invoke {ACTION} {UNITFILES} >/dev/null || true".format( 

-

161 ACTION=action, 

-

162 UNITFILES=ctrl.maintscript.escape_shell_words(*units), 

-

163 ) 

-

164 ) 

-

165 lines.append(" fi") 

-

166 lines.append("fi") 

-

167 combined = "".join(x if x.endswith("\n") else f"{x}\n" for x in lines) 

-

168 ctrl.maintscript.on_configure(combined) 

-

169 

-

170 if stop_then_start_scripts: 170 ↛ 171line 170 didn't jump to line 171, because the condition on line 170 was never true

-

171 ctrl.maintscript.unconditionally_in_script( 

-

172 "preinst", 

-

173 textwrap.dedent( 

-

174 """\ 

-

175 if {EMPTY_DPKG_ROOT_CONDITION} && [ "$1" = upgrade ] && {SERVICE_MANAGER_IS_SYSTEMD_CONDITION} ; then 

-

176 deb-systemd-invoke stop {UNIT_FILES} >/dev/null || true 

-

177 fi 

-

178 """.format( 

-

179 EMPTY_DPKG_ROOT_CONDITION=EMPTY_DPKG_ROOT_CONDITION, 

-

180 SERVICE_MANAGER_IS_SYSTEMD_CONDITION=SERVICE_MANAGER_IS_SYSTEMD_CONDITION, 

-

181 UNIT_FILES=ctrl.maintscript.escape_shell_words( 

-

182 *stop_then_start_scripts 

-

183 ), 

-

184 ) 

-

185 ), 

-

186 ) 

-

187 

-

188 if stop_before_upgrade: 188 ↛ 189line 188 didn't jump to line 189, because the condition on line 188 was never true

-

189 ctrl.maintscript.on_before_removal( 

-

190 """\ 

-

191 if {EMPTY_DPKG_ROOT_CONDITION} && {SERVICE_MANAGER_IS_SYSTEMD_CONDITION} ; then 

-

192 deb-systemd-invoke stop {UNIT_FILES} >/dev/null || true 

-

193 fi 

-

194 """.format( 

-

195 EMPTY_DPKG_ROOT_CONDITION=EMPTY_DPKG_ROOT_CONDITION, 

-

196 SERVICE_MANAGER_IS_SYSTEMD_CONDITION=SERVICE_MANAGER_IS_SYSTEMD_CONDITION, 

-

197 UNIT_FILES=ctrl.maintscript.escape_shell_words(*stop_before_upgrade), 

-

198 ) 

-

199 ) 

-

200 if on_purge: 200 ↛ 210line 200 didn't jump to line 210, because the condition on line 200 was never false

-

201 ctrl.maintscript.on_purge( 

-

202 """\ 

-

203 if [ -x "/usr/bin/deb-systemd-helper" ]; then 

-

204 deb-systemd-helper purge {UNITFILES} >/dev/null || true 

-

205 fi 

-

206 """.format( 

-

207 UNITFILES=ctrl.maintscript.escape_shell_words(*stop_before_upgrade), 

-

208 ) 

-

209 ) 

-

210 ctrl.maintscript.on_removed( 

-

211 textwrap.dedent( 

-

212 """\ 

-

213 if {SERVICE_MANAGER_IS_SYSTEMD_CONDITION} ; then 

-

214 systemctl --system daemon-reload >/dev/null || true 

-

215 fi 

-

216 """.format( 

-

217 SERVICE_MANAGER_IS_SYSTEMD_CONDITION=SERVICE_MANAGER_IS_SYSTEMD_CONDITION 

-

218 ) 

-

219 ) 

-

220 ) 

-

221 

-

222 

-

223def _remove_quote(v: str) -> str: 

-

224 if v and v[0] == v[-1] and v[0] in ('"', "'"): 224 ↛ 226line 224 didn't jump to line 226, because the condition on line 224 was never false

-

225 return v[1:-1] 

-

226 return v 

-

227 

-

228 

-

229def _find_and_analyze_systemd_service_files( 

-

230 pkg: BinaryPackage, 

-

231 fs_root: VirtualPath, 

-

232 systemd_service_dir: Literal["system", "user"], 

-

233) -> Iterable[SystemdUnit]: 

-

234 service_dirs = [ 

-

235 f"./usr/lib/systemd/{systemd_service_dir}", 

-

236 f"./lib/systemd/{systemd_service_dir}", 

-

237 ] 

-

238 had_install_sections = set() 

-

239 aliases: Dict[str, List[str]] = collections.defaultdict(list) 

-

240 seen = set() 

-

241 all_files = [] 

-

242 expected_units = set() 

-

243 expected_units_required_by = collections.defaultdict(list) 

-

244 

-

245 for d in service_dirs: 

-

246 system_dir = fs_root.lookup(d) 

-

247 if not system_dir: 

-

248 continue 

-

249 for child in system_dir.iterdir: 

-

250 if child.is_symlink: 

-

251 dest = os.path.basename(child.readlink()) 

-

252 aliases[dest].append(child.name) 

-

253 elif child.is_file and child.name not in seen: 253 ↛ 249line 253 didn't jump to line 249, because the condition on line 253 was never false

-

254 seen.add(child.name) 

-

255 all_files.append(child) 

-

256 if "@" in child.name: 

-

257 # dh_installsystemd does not check the contents of templated services, 

-

258 # and we match that. 

-

259 continue 

-

260 with child.open() as fd: 

-

261 for line in fd: 

-

262 line = line.strip() 

-

263 line_lc = line.lower() 

-

264 if line_lc == "[install]": 

-

265 had_install_sections.add(child.name) 

-

266 elif line_lc.startswith("alias="): 266 ↛ 272line 266 didn't jump to line 272, because the condition on line 266 was never false

-

267 # This code assumes service names cannot contain spaces (as in 

-

268 # if you copy-paste it for another field it might not work) 

-

269 aliases[child.name].extend( 

-

270 _remove_quote(x) for x in line[6:].split() 

-

271 ) 

-

272 elif line_lc.startswith("also="): 

-

273 # This code assumes service names cannot contain spaces (as in 

-

274 # if you copy-paste it for another field it might not work) 

-

275 for unit in (_remove_quote(x) for x in line[5:].split()): 

-

276 expected_units_required_by[unit].append(child.absolute) 

-

277 expected_units.add(unit) 

-

278 for path in all_files: 

-

279 if "@" in path.name: 

-

280 # Match dh_installsystemd, which skips templated services 

-

281 continue 

-

282 names = aliases[path.name] 

-

283 _, type_of_service = path.name.rsplit(".", 1) 

-

284 expected_units.difference_update(names) 

-

285 expected_units.discard(path.name) 

-

286 names.extend(x[:-8] for x in list(names) if x.endswith(".service")) 

-

287 names.insert(0, path.name) 

-

288 if path.name.endswith(".service"): 

-

289 names.insert(1, path.name[:-8]) 

-

290 yield SystemdUnit( 

-

291 path, 

-

292 names, 

-

293 type_of_service, 

-

294 systemd_service_dir, 

-

295 # Bug (?) compat with dh_installsystemd. All units are started, but only 

-

296 # enable those with an `[Install]` section. 

-

297 # Possibly related bug #1055599 

-

298 enable_by_default=path.name in had_install_sections, 

-

299 start_by_default=True, 

-

300 had_install_section=path.name in had_install_sections, 

-

301 ) 

-

302 

-

303 if expected_units: 303 ↛ 304line 303 didn't jump to line 304, because the condition on line 303 was never true

-

304 for unit_name in expected_units: 

-

305 required_by = expected_units_required_by[unit_name] 

-

306 required_names = ", ".join(required_by) 

-

307 _error( 

-

308 f"The unit {unit_name} was required by {required_names} (via Also=...)" 

-

309 f" but was not present in the package {pkg.name}" 

-

310 ) 

-

311 

-

312 

-

313def generate_snippets_for_init_scripts( 

-

314 services: Sequence[ServiceDefinition[None]], 

-

315 ctrl: BinaryCtrlAccessor, 

-

316 _context: PackageProcessingContext, 

-

317) -> None: 

-

318 for service_def in services: 

-

319 script_name = service_def.path.name 

-

320 script_installed_path = service_def.path.absolute 

-

321 

-

322 update_rcd_params = ( 

-

323 "defaults" if service_def.auto_enable_on_install else "defaults-disabled" 

-

324 ) 

-

325 

-

326 ctrl.maintscript.unconditionally_in_script( 

-

327 "preinst", 

-

328 textwrap.dedent( 

-

329 """\ 

-

330 if [ "$1" = "install" ] && [ -n "$2" ] && [ -x {DPKG_ROOT}{SCRIPT_PATH} ] ; then 

-

331 chmod +x {DPKG_ROOT}{SCRIPT_PATH} >/dev/null || true 

-

332 fi 

-

333 """.format( 

-

334 DPKG_ROOT=DPKG_ROOT, 

-

335 SCRIPT_PATH=ctrl.maintscript.escape_shell_words( 

-

336 script_installed_path 

-

337 ), 

-

338 ) 

-

339 ), 

-

340 ) 

-

341 

-

342 lines = [ 

-

343 "if {EMPTY_DPKG_ROOT_CONDITION} && [ -x {SCRIPT_PATH} ]; then", 

-

344 " update-rc.d {SCRIPT_NAME} {UPDATE_RCD_PARAMS} >/dev/null || exit 1", 

-

345 ] 

-

346 

-

347 if ( 347 ↛ 359line 347 didn't jump to line 359

-

348 service_def.auto_start_on_install 

-

349 and service_def.on_upgrade != "stop-then-start" 

-

350 ): 

-

351 lines.append(' if [ -z "$2" ]; then') 

-

352 lines.append( 

-

353 " invoke-rc.d --skip-systemd-native {SCRIPT_NAME} start >/dev/null || exit 1".format( 

-

354 SCRIPT_NAME=ctrl.maintscript.escape_shell_words(script_name), 

-

355 ) 

-

356 ) 

-

357 lines.append(" fi") 

-

358 

-

359 if service_def.on_upgrade in ("restart", "reload"): 359 ↛ 368line 359 didn't jump to line 368, because the condition on line 359 was never false

-

360 lines.append(' if [ -n "$2" ]; then') 

-

361 lines.append( 

-

362 " invoke-rc.d --skip-systemd-native {SCRIPT_NAME} {ACTION} >/dev/null || exit 1".format( 

-

363 SCRIPT_NAME=ctrl.maintscript.escape_shell_words(script_name), 

-

364 ACTION=service_def.on_upgrade, 

-

365 ) 

-

366 ) 

-

367 lines.append(" fi") 

-

368 elif service_def.on_upgrade == "stop-then-start": 

-

369 lines.append( 

-

370 " invoke-rc.d --skip-systemd-native {SCRIPT_NAME} start >/dev/null || exit 1".format( 

-

371 SCRIPT_NAME=ctrl.maintscript.escape_shell_words(script_name), 

-

372 ) 

-

373 ) 

-

374 ctrl.maintscript.unconditionally_in_script( 

-

375 "preinst", 

-

376 textwrap.dedent( 

-

377 """\ 

-

378 if {EMPTY_DPKG_ROOT_CONDITION} && [ "$1" = "upgrade" ] && [ -x {SCRIPT_PATH} ]; then 

-

379 invoke-rc.d --skip-systemd-native {SCRIPT_NAME} stop > /dev/null || true 

-

380 fi 

-

381 """.format( 

-

382 EMPTY_DPKG_ROOT_CONDITION=EMPTY_DPKG_ROOT_CONDITION, 

-

383 SCRIPT_PATH=ctrl.maintscript.escape_shell_words( 

-

384 script_installed_path 

-

385 ), 

-

386 SCRIPT_NAME=ctrl.maintscript.escape_shell_words(script_name), 

-

387 ) 

-

388 ), 

-

389 ) 

-

390 elif service_def.on_upgrade != "do-nothing": 

-

391 raise AssertionError( 

-

392 f"Missing support for on_upgrade rule: {service_def.on_upgrade}" 

-

393 ) 

-

394 

-

395 lines.append("fi") 

-

396 combined = "".join(x if x.endswith("\n") else f"{x}\n" for x in lines) 

-

397 ctrl.maintscript.on_configure( 

-

398 combined.format( 

-

399 EMPTY_DPKG_ROOT_CONDITION=EMPTY_DPKG_ROOT_CONDITION, 

-

400 DPKG_ROOT=DPKG_ROOT, 

-

401 UPDATE_RCD_PARAMS=update_rcd_params, 

-

402 SCRIPT_PATH=ctrl.maintscript.escape_shell_words(script_installed_path), 

-

403 SCRIPT_NAME=ctrl.maintscript.escape_shell_words(script_name), 

-

404 ) 

-

405 ) 

-

406 

-

407 ctrl.maintscript.on_removed( 

-

408 textwrap.dedent( 

-

409 """\ 

-

410 if [ -x {DPKG_ROOT}{SCRIPT_PATH} ]; then 

-

411 chmod -x {DPKG_ROOT}{SCRIPT_PATH} > /dev/null || true 

-

412 fi 

-

413 """.format( 

-

414 DPKG_ROOT=DPKG_ROOT, 

-

415 SCRIPT_PATH=ctrl.maintscript.escape_shell_words( 

-

416 script_installed_path 

-

417 ), 

-

418 ) 

-

419 ) 

-

420 ) 

-

421 ctrl.maintscript.on_purge( 

-

422 textwrap.dedent( 

-

423 """\ 

-

424 if {EMPTY_DPKG_ROOT_CONDITION} ; then 

-

425 update-rc.d {SCRIPT_NAME} remove >/dev/null 

-

426 fi 

-

427 """.format( 

-

428 SCRIPT_NAME=ctrl.maintscript.escape_shell_words(script_name), 

-

429 EMPTY_DPKG_ROOT_CONDITION=EMPTY_DPKG_ROOT_CONDITION, 

-

430 ) 

-

431 ) 

-

432 ) 

-

433 

-

434 

-

435def detect_sysv_init_service_files( 

-

436 fs_root: VirtualPath, 

-

437 service_registry: ServiceRegistry[None], 

-

438 _context: PackageProcessingContext, 

-

439) -> None: 

-

440 etc_init = fs_root.lookup("/etc/init.d") 

-

441 if not etc_init: 

-

442 return 

-

443 for path in etc_init.iterdir: 

-

444 if path.is_dir or not path.is_executable: 

-

445 continue 

-

446 

-

447 service_registry.register_service( 

-

448 path, 

-

449 path.name, 

-

450 ) 

-
- - - diff --git a/coverage-report/d_d5d6843b45eec01e_shlib_metadata_detectors_py.html b/coverage-report/d_d5d6843b45eec01e_shlib_metadata_detectors_py.html deleted file mode 100644 index eaefd35..0000000 --- a/coverage-report/d_d5d6843b45eec01e_shlib_metadata_detectors_py.html +++ /dev/null @@ -1,146 +0,0 @@ - - - - - Coverage for src/debputy/plugin/debputy/shlib_metadata_detectors.py: 100% - - - - - -
-
-

- Coverage for src/debputy/plugin/debputy/shlib_metadata_detectors.py: - 100% -

- -

- 17 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1from typing import List 

-

2 

-

3from debputy import elf_util 

-

4from debputy.elf_util import ELF_LINKING_TYPE_DYNAMIC 

-

5from debputy.plugin.api import ( 

-

6 VirtualPath, 

-

7 PackageProcessingContext, 

-

8) 

-

9from debputy.plugin.api.impl import BinaryCtrlAccessorProvider 

-

10 

-

11SKIPPED_DEBUG_DIRS = [ 

-

12 "lib", 

-

13 "lib64", 

-

14 "usr", 

-

15 "bin", 

-

16 "sbin", 

-

17 "opt", 

-

18 "dev", 

-

19 "emul", 

-

20 ".build-id", 

-

21] 

-

22 

-

23SKIP_DIRS = {f"./usr/lib/debug/{subdir}" for subdir in SKIPPED_DEBUG_DIRS} 

-

24 

-

25 

-

26def _walk_filter(fs_path: VirtualPath, children: List[VirtualPath]) -> bool: 

-

27 if fs_path.path in SKIP_DIRS: 

-

28 children.clear() 

-

29 return False 

-

30 return True 

-

31 

-

32 

-

33def detect_shlibdeps( 

-

34 fs_root: VirtualPath, 

-

35 ctrl: BinaryCtrlAccessorProvider, 

-

36 _context: PackageProcessingContext, 

-

37) -> None: 

-

38 elf_files_to_process = elf_util.find_all_elf_files( 

-

39 fs_root, 

-

40 walk_filter=_walk_filter, 

-

41 with_linking_type=ELF_LINKING_TYPE_DYNAMIC, 

-

42 ) 

-

43 

-

44 if not elf_files_to_process: 

-

45 return 

-

46 

-

47 ctrl.dpkg_shlibdeps(elf_files_to_process) 

-
- - - diff --git a/coverage-report/d_d5d6843b45eec01e_strip_non_determinism_py.html b/coverage-report/d_d5d6843b45eec01e_strip_non_determinism_py.html deleted file mode 100644 index e934d39..0000000 --- a/coverage-report/d_d5d6843b45eec01e_strip_non_determinism_py.html +++ /dev/null @@ -1,363 +0,0 @@ - - - - - Coverage for src/debputy/plugin/debputy/strip_non_determinism.py: 68% - - - - - -
-
-

- Coverage for src/debputy/plugin/debputy/strip_non_determinism.py: - 68% -

- -

- 109 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2import os.path 

-

3import re 

-

4import subprocess 

-

5from contextlib import ExitStack 

-

6from enum import IntEnum 

-

7from typing import Iterator, Optional, List, Callable, Any, Tuple, Union 

-

8 

-

9from debputy.plugin.api import VirtualPath 

-

10from debputy.plugin.api.impl_types import PackageProcessingContextProvider 

-

11from debputy.util import xargs, _info, escape_shell, _error 

-

12 

-

13 

-

14class DetectionVerdict(IntEnum): 

-

15 NOT_RELEVANT = 1 

-

16 NEEDS_FILE_OUTPUT = 2 

-

17 PROCESS = 3 

-

18 

-

19 

-

20def _file_starts_with( 

-

21 sequences: Union[bytes, Tuple[bytes, ...]] 

-

22) -> Callable[[VirtualPath], bool]: 

-

23 if isinstance(sequences, bytes): 

-

24 longest_sequence = len(sequences) 

-

25 sequences = (sequences,) 

-

26 else: 

-

27 longest_sequence = max(len(s) for s in sequences) 

-

28 

-

29 def _checker(path: VirtualPath) -> bool: 

-

30 with path.open(byte_io=True, buffering=4096) as fd: 

-

31 buffer = fd.read(longest_sequence) 

-

32 return buffer in sequences 

-

33 

-

34 return _checker 

-

35 

-

36 

-

37def _is_javadoc_file(path: VirtualPath) -> bool: 

-

38 with path.open(buffering=4096) as fd: 

-

39 c = fd.read(1024) 

-

40 return "<!-- Generated by javadoc" in c 

-

41 

-

42 

-

43class SndDetectionRule: 

-

44 def initial_verdict(self, path: VirtualPath) -> DetectionVerdict: 

-

45 raise NotImplementedError 

-

46 

-

47 def file_output_verdict( 

-

48 self, 

-

49 path: VirtualPath, 

-

50 file_analysis: Optional[str], 

-

51 ) -> bool: 

-

52 raise TypeError( 

-

53 "Should not have been called or the rule forgot to implement this method" 

-

54 ) 

-

55 

-

56 

-

57@dataclasses.dataclass(frozen=True, slots=True) 

-

58class ExtensionPlusFileOutputRule(SndDetectionRule): 

-

59 extensions: Tuple[str, ...] 

-

60 file_pattern: Optional[re.Pattern[str]] = None 

-

61 

-

62 def initial_verdict(self, path: VirtualPath) -> DetectionVerdict: 

-

63 _, ext = os.path.splitext(path.name) 

-

64 if ext not in self.extensions: 64 ↛ 66line 64 didn't jump to line 66, because the condition on line 64 was never false

-

65 return DetectionVerdict.NOT_RELEVANT 

-

66 if self.file_pattern is None: 

-

67 return DetectionVerdict.PROCESS 

-

68 return DetectionVerdict.NEEDS_FILE_OUTPUT 

-

69 

-

70 def file_output_verdict( 

-

71 self, 

-

72 path: VirtualPath, 

-

73 file_analysis: str, 

-

74 ) -> bool: 

-

75 file_pattern = self.file_pattern 

-

76 assert file_pattern is not None 

-

77 m = file_pattern.search(file_analysis) 

-

78 return m is not None 

-

79 

-

80 

-

81@dataclasses.dataclass(frozen=True, slots=True) 

-

82class ExtensionPlusContentCheck(SndDetectionRule): 

-

83 extensions: Tuple[str, ...] 

-

84 content_check: Callable[[VirtualPath], bool] 

-

85 

-

86 def initial_verdict(self, path: VirtualPath) -> DetectionVerdict: 

-

87 _, ext = os.path.splitext(path.name) 

-

88 if ext not in self.extensions: 

-

89 return DetectionVerdict.NOT_RELEVANT 

-

90 content_verdict = self.content_check(path) 

-

91 if content_verdict: 91 ↛ 92line 91 didn't jump to line 92, because the condition on line 91 was never true

-

92 return DetectionVerdict.PROCESS 

-

93 return DetectionVerdict.NOT_RELEVANT 

-

94 

-

95 

-

96class PyzipFileCheck(SndDetectionRule): 

-

97 def _is_pyzip_file(self, path: VirtualPath) -> bool: 

-

98 with path.open(byte_io=True, buffering=4096) as fd: 

-

99 c = fd.read(32) 

-

100 if not c.startswith(b"#!"): 100 ↛ 103line 100 didn't jump to line 103, because the condition on line 100 was never false

-

101 return False 

-

102 

-

103 return b"\nPK\x03\x04" in c 

-

104 

-

105 def initial_verdict(self, path: VirtualPath) -> DetectionVerdict: 

-

106 if self._is_pyzip_file(path): 106 ↛ 107line 106 didn't jump to line 107, because the condition on line 106 was never true

-

107 return DetectionVerdict.PROCESS 

-

108 return DetectionVerdict.NOT_RELEVANT 

-

109 

-

110 

-

111# These detection rules should be aligned with `get_normalizer_for_file` in File::StripNondeterminism. 

-

112# Note if we send a file too much, it is just bad for performance. If we send a file to little, we 

-

113# risk non-determinism in the final output. 

-

114SND_DETECTION_RULES: List[SndDetectionRule] = [ 

-

115 ExtensionPlusContentCheck( 

-

116 extensions=(".a",), 

-

117 content_check=_file_starts_with( 

-

118 ( 

-

119 b"!<arch>\n", 

-

120 b"!<thin>\n", 

-

121 ), 

-

122 ), 

-

123 ), 

-

124 ExtensionPlusContentCheck( 

-

125 extensions=(".png",), 

-

126 content_check=_file_starts_with(b"\x89PNG\x0D\x0A\x1A\x0A"), 

-

127 ), 

-

128 ExtensionPlusContentCheck( 

-

129 extensions=(".gz", ".dz"), 

-

130 content_check=_file_starts_with(b"\x1F\x8B"), 

-

131 ), 

-

132 ExtensionPlusContentCheck( 

-

133 extensions=( 

-

134 # .zip related 

-

135 ".zip", 

-

136 ".pk3", 

-

137 ".epub", 

-

138 ".whl", 

-

139 ".xpi", 

-

140 ".htb", 

-

141 ".zhfst", 

-

142 ".par", 

-

143 ".codadef", 

-

144 # .jar related 

-

145 ".jar", 

-

146 ".war", 

-

147 ".hpi", 

-

148 ".apk", 

-

149 ".sym", 

-

150 ), 

-

151 content_check=_file_starts_with( 

-

152 ( 

-

153 b"PK\x03\x04\x1F", 

-

154 b"PK\x05\x06", 

-

155 b"PK\x07\x08", 

-

156 ) 

-

157 ), 

-

158 ), 

-

159 ExtensionPlusContentCheck( 

-

160 extensions=( 

-

161 ".mo", 

-

162 ".gmo", 

-

163 ), 

-

164 content_check=_file_starts_with( 

-

165 ( 

-

166 b"\x95\x04\x12\xde", 

-

167 b"\xde\x12\x04\x95", 

-

168 ) 

-

169 ), 

-

170 ), 

-

171 ExtensionPlusContentCheck( 

-

172 extensions=(".uimage",), 

-

173 content_check=_file_starts_with(b"\x27\x05\x19\x56"), 

-

174 ), 

-

175 ExtensionPlusContentCheck( 

-

176 extensions=(".bflt",), 

-

177 content_check=_file_starts_with(b"\x62\x46\x4C\x54"), 

-

178 ), 

-

179 ExtensionPlusContentCheck( 

-

180 extensions=(".jmod",), 

-

181 content_check=_file_starts_with(b"JM"), 

-

182 ), 

-

183 ExtensionPlusContentCheck( 

-

184 extensions=(".html",), 

-

185 content_check=_is_javadoc_file, 

-

186 ), 

-

187 PyzipFileCheck(), 

-

188 ExtensionPlusFileOutputRule( 

-

189 extensions=(".cpio",), 

-

190 # XXX: Add file output check (requires the file output support) 

-

191 ), 

-

192] 

-

193 

-

194 

-

195def _detect_paths_with_possible_non_determinism( 

-

196 fs_root: VirtualPath, 

-

197) -> Iterator[VirtualPath]: 

-

198 needs_file_output = [] 

-

199 for path in fs_root.all_paths(): 

-

200 if not path.is_file: 

-

201 continue 

-

202 verdict = DetectionVerdict.NOT_RELEVANT 

-

203 needs_file_output_rules = [] 

-

204 for rule in SND_DETECTION_RULES: 

-

205 v = rule.initial_verdict(path) 

-

206 if v > verdict: 206 ↛ 207line 206 didn't jump to line 207, because the condition on line 206 was never true

-

207 verdict = v 

-

208 if verdict == DetectionVerdict.PROCESS: 208 ↛ 209line 208 didn't jump to line 209, because the condition on line 208 was never true

-

209 yield path 

-

210 break 

-

211 elif verdict == DetectionVerdict.NEEDS_FILE_OUTPUT: 211 ↛ 212line 211 didn't jump to line 212, because the condition on line 211 was never true

-

212 needs_file_output_rules.append(rule) 

-

213 

-

214 if verdict == DetectionVerdict.NEEDS_FILE_OUTPUT: 214 ↛ 215line 214 didn't jump to line 215, because the condition on line 214 was never true

-

215 needs_file_output.append((path, needs_file_output_rules)) 

-

216 

-

217 assert not needs_file_output 

-

218 # FIXME: Implement file check 

-

219 

-

220 

-

221def _apply_strip_non_determinism(timestamp: str, paths: List[VirtualPath]) -> None: 

-

222 static_cmd = [ 

-

223 "strip-nondeterminism", 

-

224 f"--timestamp={timestamp}", 

-

225 "-v", 

-

226 "--normalizers=+all", 

-

227 ] 

-

228 with ExitStack() as manager: 

-

229 affected_files = [ 

-

230 manager.enter_context(p.replace_fs_path_content()) for p in paths 

-

231 ] 

-

232 for cmd in xargs(static_cmd, affected_files): 

-

233 _info( 

-

234 f"Removing (possible) unnecessary non-deterministic content via: {escape_shell(*cmd)}" 

-

235 ) 

-

236 try: 

-

237 subprocess.check_call( 

-

238 cmd, 

-

239 stdin=subprocess.DEVNULL, 

-

240 restore_signals=True, 

-

241 ) 

-

242 except subprocess.CalledProcessError: 

-

243 _error( 

-

244 "Attempting to remove unnecessary non-deterministic content failed. Please review" 

-

245 " the error from strip-nondeterminism above understand what went wrong." 

-

246 ) 

-

247 

-

248 

-

249def strip_non_determinism( 

-

250 fs_root: VirtualPath, _: Any, context: PackageProcessingContextProvider 

-

251) -> None: 

-

252 paths = list(_detect_paths_with_possible_non_determinism(fs_root)) 

-

253 

-

254 if not paths: 254 ↛ 258line 254 didn't jump to line 258, because the condition on line 254 was never false

-

255 _info("Detected no paths to be processed by strip-nondeterminism") 

-

256 return 

-

257 

-

258 substitution = context._manifest.substitution 

-

259 

-

260 source_date_epoch = substitution.substitute( 

-

261 "{{_DEBPUTY_SND_SOURCE_DATE_EPOCH}}", "Internal; strip-nondeterminism" 

-

262 ) 

-

263 

-

264 _apply_strip_non_determinism(source_date_epoch, paths) 

-
- - - diff --git a/coverage-report/d_d5d6843b45eec01e_types_py.html b/coverage-report/d_d5d6843b45eec01e_types_py.html deleted file mode 100644 index c4198b9..0000000 --- a/coverage-report/d_d5d6843b45eec01e_types_py.html +++ /dev/null @@ -1,109 +0,0 @@ - - - - - Coverage for src/debputy/plugin/debputy/types.py: 100% - - - - - -
-
-

- Coverage for src/debputy/plugin/debputy/types.py: - 100% -

- -

- 7 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2 

-

3from debputy.manifest_parser.base_types import FileSystemMode 

-

4 

-

5 

-

6@dataclasses.dataclass(slots=True) 

-

7class DebputyCapability: 

-

8 capabilities: str 

-

9 capability_mode: FileSystemMode 

-

10 definition_source: str 

-
- - - diff --git a/coverage-report/d_e9c451f4ae334f76___init___py.html b/coverage-report/d_e9c451f4ae334f76___init___py.html deleted file mode 100644 index 8183ffa..0000000 --- a/coverage-report/d_e9c451f4ae334f76___init___py.html +++ /dev/null @@ -1,290 +0,0 @@ - - - - - Coverage for src/debputy/lsp/vendoring/_deb822_repro/__init__.py: 100% - - - - - -
-
-

- Coverage for src/debputy/lsp/vendoring/_deb822_repro/__init__.py: - 100% -

- -

- 3 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1# The "from X import Y as Y" looks weird, but we are stuck in a fight 

-

2# between mypy and pylint in the CI. 

-

3# 

-

4# mypy --strict insists on either of following for re-exporting 

-

5# 1) Do a "from debian._deb822_repro.X import *" 

-

6# 2) Do a "from .X import Y" 

-

7# 3) Do a "from debian._deb822_repro.X import Y as Z" 

-

8# 

-

9# pylint on the CI fails on relative imports (it assumes "lib" is a 

-

10# part of the python package name in relative imports). This rules 

-

11# out 2) from the mypy list. The use of 1) would cause overlapping 

-

12# imports (and also it felt prudent to import only what was exported). 

-

13# 

-

14# This left 3) as the only option for now, which pylint then complains 

-

15# about (not unreasonably in general). Unfortunately, we can disable 

-

16# that warning in this work around. But once 2) becomes an option 

-

17# without pylint tripping over itself on the CI, then it considerably 

-

18# better than this approach. 

-

19# 

-

20 

-

21""" Round-trip safe dictionary-like interfaces to RFC822-like files 

-

22 

-

23This module is a round-trip safe API for working with RFC822-like Debian data 

-

24formats. It is primarily aimed files managed by humans, like debian/control. 

-

25While it is be able to process any Deb822 file, you might find the debian.deb822 

-

26module better suited for larger files such as the `Packages` and `Sources` 

-

27from the Debian archive due to reasons explained below. 

-

28 

-

29Being round-trip safe means that this module will faithfully preserve the original 

-

30formatting including whitespace and comments from the input where not modified. 

-

31A concrete example:: 

-

32 

-

33 >>> from debian._deb822_repro import parse_deb822_file 

-

34 >>> example_deb822_paragraph = ''' 

-

35 ... Package: foo 

-

36 ... # Field comment (because it becomes just before a field) 

-

37 ... Section: main/devel 

-

38 ... Depends: libfoo, 

-

39 ... # Inline comment (associated with the next line) 

-

40 ... libbar, 

-

41 ... ''' 

-

42 >>> deb822_file = parse_deb822_file(example_deb822_paragraph.splitlines()) 

-

43 >>> paragraph = next(iter(deb822_file)) 

-

44 >>> paragraph['Section'] = 'devel' 

-

45 >>> output = deb822_file.dump() 

-

46 >>> output == example_deb822_paragraph.replace('Section: main/devel', 'Section: devel') 

-

47 True 

-

48 

-

49This makes it particularly good for automated changes/corrections to files (partly) 

-

50maintained by humans. 

-

51 

-

52Compared to debian.deb822 

-

53------------------------- 

-

54 

-

55The round-trip safe API is primarily useful when your program is editing files 

-

56and the file in question is (likely) to be hand-edited or formatted directly by 

-

57human maintainers. This includes files like debian/control and the 

-

58debian/copyright using the "DEP-5" format. 

-

59 

-

60The round-trip safe API also supports parsing and working with invalid files. 

-

61This enables programs to work on the file in cases where the file was a left 

-

62with an error in an attempt to correct it (or ignore it). 

-

63 

-

64On the flip side, the debian.deb822 module generally uses less memory than the 

-

65round trip safe API. In some cases, it will also have faster data structures 

-

66because its internal data structures are simpler. Accordingly, when you are doing 

-

67read-only work or/and working with large files a la the Packages or Sources 

-

68files from the Debian archive, then the round-trip safe API either provides no 

-

69advantages or its trade-offs might show up in performance statistics. 

-

70 

-

71The memory and runtime performance difference should generally be constant for 

-

72valid files but not necessarily a small one. For invalid files, some operations 

-

73can degrade in runtime performance in particular cases (memory performance for 

-

74invalid files are comparable to that of valid files). 

-

75 

-

76Converting from debian.deb822 

-

77============================= 

-

78 

-

79The following is a short example for how to migrate from debian.deb822 to 

-

80the round-trip safe API. Given the following source text:: 

-

81 

-

82 >>> dctrl_input = b''' 

-

83 ... Source: foo 

-

84 ... Build-Depends: debhelper-compat (= 13) 

-

85 ... 

-

86 ... Package: bar 

-

87 ... Architecture: any 

-

88 ... Depends: ${misc:Depends}, 

-

89 ... ${shlibs:Depends}, 

-

90 ... Description: provides some exciting feature 

-

91 ... yada yada yada 

-

92 ... . 

-

93 ... more deskription with a misspelling 

-

94 ... '''.lstrip() # To remove the leading newline 

-

95 >>> # A few definitions to emulate file I/O (would be different in the program) 

-

96 >>> import contextlib, os 

-

97 >>> @contextlib.contextmanager 

-

98 ... def open_input(): 

-

99 ... # Works with and without keepends=True. 

-

100 ... # Keep the ends here to truly emulate an open file. 

-

101 ... yield dctrl_input.splitlines(keepends=True) 

-

102 >>> def open_output(): 

-

103 ... return open(os.devnull, 'wb') 

-

104 

-

105With debian.deb822, your code might look like this:: 

-

106 

-

107 >>> from debian.deb822 import Deb822 

-

108 >>> with open_input() as in_fd, open_output() as out_fd: 

-

109 ... for paragraph in Deb822.iter_paragraphs(in_fd): 

-

110 ... if 'Description' not in paragraph: 

-

111 ... continue 

-

112 ... description = paragraph['Description'] 

-

113 ... # Fix typo 

-

114 ... paragraph['Description'] = description.replace('deskription', 'description') 

-

115 ... paragraph.dump(out_fd) 

-

116 

-

117With the round-trip safe API, the rewrite would look like this:: 

-

118 

-

119 >>> from debian._deb822_repro import parse_deb822_file 

-

120 >>> with open_input() as in_fd, open_output() as out_fd: 

-

121 ... parsed_file = parse_deb822_file(in_fd) 

-

122 ... for paragraph in parsed_file: 

-

123 ... if 'Description' not in paragraph: 

-

124 ... continue 

-

125 ... description = paragraph['Description'] 

-

126 ... # Fix typo 

-

127 ... paragraph['Description'] = description.replace('deskription', 'description') 

-

128 ... parsed_file.dump(out_fd) 

-

129 

-

130Key changes are: 

-

131 

-

132 1. Imports are different. 

-

133 2. Deb822.iter_paragraphs is replaced by parse_deb822_file and a reference to 

-

134 its return value is kept for later. 

-

135 3. Instead of dumping paragraphs one by one, the return value from 

-

136 parse_deb822_file is dumped at the end. 

-

137 

-

138 - The round-trip safe api does support "per-paragraph" but formatting 

-

139 and comments between paragraphs would be lost in the output. This may 

-

140 be an acceptable tradeoff or desired for some cases. 

-

141 

-

142Note that the round trip safe API does not accept all the same parameters as the 

-

143debian.deb822 module does. Often this is because the feature is not relevant for 

-

144the round-trip safe API (e.g., python-apt cannot be used as it discard comments) 

-

145or is obsolete in the debian.deb822 module and therefore omitted. 

-

146 

-

147For list based fields, you may want to have a look at the 

-

148Deb822ParagraphElement.as_interpreted_dict_view method. 

-

149 

-

150Stability of this API 

-

151--------------------- 

-

152 

-

153The API is subject to change based on feedback from early adopters and beta 

-

154testers. That said, the code for valid files is unlikely to change in 

-

155a backwards incompatible way. 

-

156 

-

157Things that might change in an incompatible way include: 

-

158 * Whether invalid files are accepted (parsed without errors) by default. 

-

159 (currently they are) 

-

160 * How invalid files are parsed. As an example, currently a syntax error acts 

-

161 as a paragraph separator. Whether it should is open to debate. 

-

162 

-

163""" 

-

164 

-

165# pylint: disable=useless-import-alias 

-

166from .parsing import ( 

-

167 parse_deb822_file as parse_deb822_file, 

-

168 LIST_SPACE_SEPARATED_INTERPRETATION as LIST_SPACE_SEPARATED_INTERPRETATION, 

-

169 LIST_COMMA_SEPARATED_INTERPRETATION as LIST_COMMA_SEPARATED_INTERPRETATION, 

-

170 Interpretation as Interpretation, 

-

171 # Primarily for documentation purposes / help() 

-

172 Deb822FileElement as Deb822FileElement, 

-

173 Deb822NoDuplicateFieldsParagraphElement, 

-

174 Deb822ParagraphElement as Deb822ParagraphElement, 

-

175) 

-

176from .types import ( 

-

177 AmbiguousDeb822FieldKeyError as AmbiguousDeb822FieldKeyError, 

-

178 SyntaxOrParseError, 

-

179) 

-

180 

-

181__all__ = [ 

-

182 "parse_deb822_file", 

-

183 "AmbiguousDeb822FieldKeyError", 

-

184 "LIST_SPACE_SEPARATED_INTERPRETATION", 

-

185 "LIST_COMMA_SEPARATED_INTERPRETATION", 

-

186 "Interpretation", 

-

187 "Deb822FileElement", 

-

188 "Deb822NoDuplicateFieldsParagraphElement", 

-

189 "Deb822ParagraphElement", 

-

190 "SyntaxOrParseError", 

-

191] 

-
- - - diff --git a/coverage-report/d_e9c451f4ae334f76__util_py.html b/coverage-report/d_e9c451f4ae334f76__util_py.html deleted file mode 100644 index c0a0706..0000000 --- a/coverage-report/d_e9c451f4ae334f76__util_py.html +++ /dev/null @@ -1,390 +0,0 @@ - - - - - Coverage for src/debputy/lsp/vendoring/_deb822_repro/_util.py: 57% - - - - - -
-
-

- Coverage for src/debputy/lsp/vendoring/_deb822_repro/_util.py: - 57% -

- -

- 154 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import collections 

-

2import collections.abc 

-

3import logging 

-

4import sys 

-

5import textwrap 

-

6from abc import ABC 

-

7 

-

8try: 

-

9 from typing import ( 

-

10 Optional, 

-

11 Union, 

-

12 Iterable, 

-

13 Callable, 

-

14 TYPE_CHECKING, 

-

15 Iterator, 

-

16 Type, 

-

17 cast, 

-

18 List, 

-

19 Generic, 

-

20 ) 

-

21 from debian._util import T 

-

22 from .types import TE, R, TokenOrElement 

-

23 

-

24 _combine_parts_ret_type = Callable[ 

-

25 [Iterable[Union[TokenOrElement, TE]]], Iterable[Union[TokenOrElement, R]] 

-

26 ] 

-

27except ImportError: 

-

28 # pylint: disable=unnecessary-lambda-assignment 

-

29 TYPE_CHECKING = False 

-

30 cast = lambda t, v: v 

-

31 

-

32 

-

33if TYPE_CHECKING: 

-

34 from .parsing import Deb822Element 

-

35 from .tokens import Deb822Token 

-

36 

-

37 

-

38def print_ast( 

-

39 ast_tree, # type: Union[Iterable[TokenOrElement], 'Deb822Element'] 

-

40 *, 

-

41 end_marker_after=5, # type: Optional[int] 

-

42 output_function=None # type: Optional[Callable[[str], None]] 

-

43): 

-

44 # type: (...) -> None 

-

45 """Debugging aid, which can dump a Deb822Element or a list of tokens/elements 

-

46 

-

47 :param ast_tree: Either a Deb822Element or an iterable Deb822Token/Deb822Element entries 

-

48 (both types may be mixed in the same iterable, which enable it to dump the 

-

49 ast tree at different stages of parse_deb822_file method) 

-

50 :param end_marker_after: The dump will add "end of element" markers if a 

-

51 given element spans at least this many tokens/elements. Can be disabled 

-

52 with by passing None as value. Use 0 for unconditionally marking all 

-

53 elements (note that tokens never get an "end of element" marker as they 

-

54 are not an elements). 

-

55 :param output_function: Callable that receives a single str argument and is responsible 

-

56 for "displaying" that line. The callable may be invoked multiple times (one per line 

-

57 of output). Defaults to logging.info if omitted. 

-

58 

-

59 """ 

-

60 # Avoid circular dependency 

-

61 # pylint: disable=import-outside-toplevel 

-

62 from debian._deb822_repro.parsing import Deb822Element 

-

63 

-

64 prefix = None 

-

65 if isinstance(ast_tree, Deb822Element): 

-

66 ast_tree = [ast_tree] 

-

67 stack = [(0, "", iter(ast_tree))] 

-

68 current_no = 0 

-

69 if output_function is None: 

-

70 output_function = logging.info 

-

71 while stack: 

-

72 start_no, name, current_iter = stack[-1] 

-

73 for current in current_iter: 

-

74 current_no += 1 

-

75 if prefix is None: 

-

76 prefix = " " * len(stack) 

-

77 if isinstance(current, Deb822Element): 

-

78 stack.append( 

-

79 (current_no, current.__class__.__name__, iter(current.iter_parts())) 

-

80 ) 

-

81 output_function(prefix + current.__class__.__name__) 

-

82 prefix = None 

-

83 break 

-

84 output_function(prefix + str(current)) 

-

85 else: 

-

86 # current_iter is depleted 

-

87 stack.pop() 

-

88 prefix = None 

-

89 if ( 

-

90 end_marker_after is not None 

-

91 and start_no + end_marker_after <= current_no 

-

92 and name 

-

93 ): 

-

94 if prefix is None: 

-

95 prefix = " " * len(stack) 

-

96 output_function(prefix + "# <-- END OF " + name) 

-

97 

-

98 

-

99def combine_into_replacement( 

-

100 source_class, # type: Type[TE] 

-

101 replacement_class, # type: Type[R] 

-

102 *, 

-

103 constructor=None # type: Optional[Callable[[List[TE]], R]] 

-

104): 

-

105 # type: (...) -> _combine_parts_ret_type[TE, R] 

-

106 """Combines runs of one type into another type 

-

107 

-

108 This is primarily useful for transforming tokens (e.g, Comment tokens) into 

-

109 the relevant element (such as the Comment element). 

-

110 """ 

-

111 if constructor is None: 

-

112 _constructor = cast("Callable[[List[TE]], R]", replacement_class) 

-

113 else: 

-

114 # Force mypy to see that constructor is no longer optional 

-

115 _constructor = constructor 

-

116 

-

117 def _impl(token_stream): 

-

118 # type: (Iterable[Union[TokenOrElement, TE]]) -> Iterable[Union[TokenOrElement, R]] 

-

119 tokens = [] 

-

120 for token in token_stream: 

-

121 if isinstance(token, source_class): 

-

122 tokens.append(token) 

-

123 continue 

-

124 

-

125 if tokens: 

-

126 yield _constructor(list(tokens)) 

-

127 tokens.clear() 

-

128 yield token 

-

129 

-

130 if tokens: 

-

131 yield _constructor(tokens) 

-

132 

-

133 return _impl 

-

134 

-

135 

-

136if sys.version_info >= (3, 9) or TYPE_CHECKING: 136 ↛ 141line 136 didn't jump to line 141, because the condition on line 136 was never false

-

137 _bufferingIterator_Base = collections.abc.Iterator[T] 

-

138else: 

-

139 # Python 3.5 - 3.8 compat - we are not allowed to subscript the abc.Iterator 

-

140 # - use this little hack to work around it 

-

141 class _bufferingIterator_Base(collections.abc.Iterator, Generic[T], ABC): 

-

142 pass 

-

143 

-

144 

-

145class BufferingIterator(_bufferingIterator_Base[T], Generic[T]): 

-

146 

-

147 def __init__(self, stream): 

-

148 # type: (Iterable[T]) -> None 

-

149 self._stream = iter(stream) # type: Iterator[T] 

-

150 self._buffer = collections.deque() # type: collections.deque[T] 

-

151 self._expired = False # type: bool 

-

152 

-

153 def __next__(self): 

-

154 # type: () -> T 

-

155 if self._buffer: 

-

156 return self._buffer.popleft() 

-

157 if self._expired: 

-

158 raise StopIteration 

-

159 return next(self._stream) 

-

160 

-

161 def takewhile(self, predicate): 

-

162 # type: (Callable[[T], bool]) -> Iterable[T] 

-

163 """Variant of itertools.takewhile except it does not discard the first non-matching token""" 

-

164 buffer = self._buffer 

-

165 while buffer or self._fill_buffer(5): 165 ↛ exitline 165 didn't return from function 'takewhile', because the condition on line 165 was never false

-

166 v = buffer[0] 

-

167 if predicate(v): 

-

168 buffer.popleft() 

-

169 yield v 

-

170 else: 

-

171 break 

-

172 

-

173 def consume_many(self, count): 

-

174 # type: (int) -> List[T] 

-

175 self._fill_buffer(count) 

-

176 buffer = self._buffer 

-

177 if len(buffer) == count: 

-

178 ret = list(buffer) 

-

179 buffer.clear() 

-

180 else: 

-

181 ret = [] 

-

182 while buffer and count: 

-

183 ret.append(buffer.popleft()) 

-

184 count -= 1 

-

185 return ret 

-

186 

-

187 def peek_buffer(self): 

-

188 # type: () -> List[T] 

-

189 return list(self._buffer) 

-

190 

-

191 def peek_find( 

-

192 self, 

-

193 predicate, # type: Callable[[T], bool] 

-

194 limit=None, # type: Optional[int] 

-

195 ): 

-

196 # type: (...) -> Optional[int] 

-

197 buffer = self._buffer 

-

198 i = 0 

-

199 while limit is None or i < limit: 199 ↛ 208line 199 didn't jump to line 208, because the condition on line 199 was never false

-

200 if i >= len(buffer): 

-

201 self._fill_buffer(i + 5) 

-

202 if i >= len(buffer): 

-

203 return None 

-

204 v = buffer[i] 

-

205 if predicate(v): 

-

206 return i + 1 

-

207 i += 1 

-

208 return None 

-

209 

-

210 def _fill_buffer(self, number): 

-

211 # type: (int) -> bool 

-

212 if not self._expired: 

-

213 while len(self._buffer) < number: 

-

214 try: 

-

215 self._buffer.append(next(self._stream)) 

-

216 except StopIteration: 

-

217 self._expired = True 

-

218 break 

-

219 return bool(self._buffer) 

-

220 

-

221 def peek(self): 

-

222 # type: () -> Optional[T] 

-

223 return self.peek_at(1) 

-

224 

-

225 def peek_at(self, tokens_ahead): 

-

226 # type: (int) -> Optional[T] 

-

227 self._fill_buffer(tokens_ahead) 

-

228 return ( 

-

229 self._buffer[tokens_ahead - 1] 

-

230 if len(self._buffer) >= tokens_ahead 

-

231 else None 

-

232 ) 

-

233 

-

234 def peek_many(self, number): 

-

235 # type: (int) -> List[T] 

-

236 self._fill_buffer(number) 

-

237 buffer = self._buffer 

-

238 if len(buffer) == number: 

-

239 ret = list(buffer) 

-

240 elif number: 

-

241 ret = [] 

-

242 for t in buffer: 

-

243 ret.append(t) 

-

244 number -= 1 

-

245 if not number: 

-

246 break 

-

247 else: 

-

248 ret = [] 

-

249 return ret 

-

250 

-

251 

-

252def len_check_iterator( 

-

253 content, # type: str 

-

254 stream, # type: Iterable[TE] 

-

255 content_len=None, # type: Optional[int] 

-

256): 

-

257 # type: (...) -> Iterable[TE] 

-

258 """Flatten a parser's output into tokens and verify it covers the entire line/text""" 

-

259 if content_len is None: 259 ↛ 260line 259 didn't jump to line 260, because the condition on line 259 was never true

-

260 content_len = len(content) 

-

261 # Fail-safe to ensure none of the value parsers incorrectly parse a value. 

-

262 covered = 0 

-

263 for token_or_element in stream: 

-

264 # We use the AttributeError to discriminate between elements and tokens 

-

265 # The cast()s are here to assist / workaround mypy not realizing that. 

-

266 try: 

-

267 tokens = cast("Deb822Element", token_or_element).iter_tokens() 

-

268 except AttributeError: 

-

269 token = cast("Deb822Token", token_or_element) 

-

270 covered += len(token.text) 

-

271 else: 

-

272 for token in tokens: 

-

273 covered += len(token.text) 

-

274 yield token_or_element 

-

275 if covered != content_len: 275 ↛ 276line 275 didn't jump to line 276, because the condition on line 275 was never true

-

276 if covered < content_len: 

-

277 msg = textwrap.dedent( 

-

278 """\ 

-

279 Value parser did not fully cover the entire line with tokens ( 

-

280 missing range {covered}..{content_len}). Occurred when parsing "{content}" 

-

281 """ 

-

282 ).format(covered=covered, content_len=content_len, line=content) 

-

283 raise ValueError(msg) 

-

284 msg = textwrap.dedent( 

-

285 """\ 

-

286 Value parser emitted tokens for more text than was present? Should have 

-

287 emitted {content_len} characters, got {covered}. Occurred when parsing 

-

288 "{content}" 

-

289 """ 

-

290 ).format(covered=covered, content_len=content_len, content=content) 

-

291 raise ValueError(msg) 

-
- - - diff --git a/coverage-report/d_e9c451f4ae334f76_formatter_py.html b/coverage-report/d_e9c451f4ae334f76_formatter_py.html deleted file mode 100644 index b80d750..0000000 --- a/coverage-report/d_e9c451f4ae334f76_formatter_py.html +++ /dev/null @@ -1,577 +0,0 @@ - - - - - Coverage for src/debputy/lsp/vendoring/_deb822_repro/formatter.py: 80% - - - - - -
-
-

- Coverage for src/debputy/lsp/vendoring/_deb822_repro/formatter.py: - 80% -

- -

- 128 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import operator 

-

2 

-

3from ._util import BufferingIterator 

-

4from .tokens import Deb822Token 

-

5 

-

6# Consider these "opaque" enum-like values. The actual value was chosen to 

-

7# make repr easier to implement, but they are subject to change. 

-

8_CONTENT_TYPE_VALUE = "is_value" 

-

9_CONTENT_TYPE_COMMENT = "is_comment" 

-

10_CONTENT_TYPE_SEPARATOR = "is_separator" 

-

11 

-

12try: 

-

13 from typing import Iterator, Union, Literal 

-

14 from .types import TokenOrElement, FormatterCallback 

-

15except ImportError: 

-

16 pass 

-

17 

-

18 

-

19class FormatterContentToken(object): 

-

20 """Typed, tagged text for use with the formatting API 

-

21 

-

22 The FormatterContentToken is used by the formatting API and provides the 

-

23 formatter callback with context about the textual tokens it is supposed 

-

24 to format. 

-

25 """ 

-

26 

-

27 __slots__ = ("_text", "_content_type") 

-

28 

-

29 def __init__(self, text, content_type): 

-

30 # type: (str, object) -> None 

-

31 self._text = text 

-

32 self._content_type = content_type 

-

33 

-

34 @classmethod 

-

35 def from_token_or_element(cls, token_or_element): 

-

36 # type: (TokenOrElement) -> FormatterContentToken 

-

37 if isinstance(token_or_element, Deb822Token): 

-

38 if token_or_element.is_comment: 

-

39 return cls.comment_token(token_or_element.text) 

-

40 if token_or_element.is_whitespace: 

-

41 raise ValueError("FormatterContentType cannot be whitespace") 

-

42 return cls.value_token(token_or_element.text) 

-

43 # Elements are assumed to be content (this is specialized for the 

-

44 # interpretations where comments are always tokens). 

-

45 return cls.value_token(token_or_element.convert_to_text()) 

-

46 

-

47 @classmethod 

-

48 def separator_token(cls, text): 

-

49 # type: (str) -> FormatterContentToken 

-

50 # Special-case separators as a minor memory optimization 

-

51 if text == " ": 

-

52 return SPACE_SEPARATOR_FT 

-

53 if text == ",": 53 ↛ 54line 53 didn't jump to line 54, because the condition on line 53 was never true

-

54 return COMMA_SEPARATOR_FT 

-

55 return cls(text, _CONTENT_TYPE_SEPARATOR) 

-

56 

-

57 @classmethod 

-

58 def comment_token(cls, text): 

-

59 # type: (str) -> FormatterContentToken 

-

60 """Generates a single comment token with the provided text 

-

61 

-

62 Mostly useful for creating test cases 

-

63 """ 

-

64 return cls(text, _CONTENT_TYPE_COMMENT) 

-

65 

-

66 @classmethod 

-

67 def value_token(cls, text): 

-

68 # type: (str) -> FormatterContentToken 

-

69 """Generates a single value token with the provided text 

-

70 

-

71 Mostly useful for creating test cases 

-

72 """ 

-

73 return cls(text, _CONTENT_TYPE_VALUE) 

-

74 

-

75 @property 

-

76 def is_comment(self): 

-

77 # type: () -> bool 

-

78 """True if this formatter token represent a comment 

-

79 

-

80 This should be used for determining whether the token is a comment 

-

81 or not. It might be tempting to check whether the text in the token 

-

82 starts with a "#" but that is insufficient because a value *can* 

-

83 start with that as well. Whether it is a comment or a value is 

-

84 based on the context (it is a comment if and only if the "#" was 

-

85 at the start of a line) but the formatter often do not have the 

-

86 context available to assert this. 

-

87 

-

88 The formatter *should* preserve the order of comments and interleave 

-

89 between the value tokens in the same order as it see them. Failing 

-

90 to preserve the order of comments and values can cause confusing 

-

91 comments (such as associating the comment with a different value 

-

92 than it was written for). 

-

93 

-

94 The formatter *may* discard comment tokens if it does not want to 

-

95 preserve them. If so, they would be omitted in the output, which 

-

96 may be acceptable in some cases. This is a lot better than 

-

97 re-ordering comments. 

-

98 

-

99 Formatters must be aware of the following special cases for comments: 

-

100 * Comments *MUST* be emitted after a newline. If the very first token 

-

101 is a comment, the formatter is expected to emit a newline before it 

-

102 as well (Fields cannot start immediately on a comment). 

-

103 """ 

-

104 return self._content_type is _CONTENT_TYPE_COMMENT 

-

105 

-

106 @property 

-

107 def is_value(self): 

-

108 # type: () -> bool 

-

109 """True if this formatter token represents a semantic value 

-

110 

-

111 The formatter *MUST* preserve values as-in in its output. It may 

-

112 "unpack" it from the token (as in, return it as a part of a plain 

-

113 str) but the value content must not be changed nor re-ordered relative 

-

114 to other value tokens (as that could change the meaning of the field). 

-

115 """ 

-

116 return self._content_type is _CONTENT_TYPE_VALUE 

-

117 

-

118 @property 

-

119 def is_separator(self): 

-

120 # type: () -> bool 

-

121 """True if this formatter token represents a separator token 

-

122 

-

123 The formatter is not required to preserve the provided separators but it 

-

124 is required to properly separate values. In fact, often is a lot easier 

-

125 to discard existing separator tokens. As an example, in whitespace 

-

126 separated list of values space, tab and newline all counts as separator. 

-

127 However, formatting-wise, there is a world of difference between the 

-

128 a space, tab and a newline. In particularly, newlines must be followed 

-

129 by an additional space or tab (to act as a value continuation line) if 

-

130 there is a value following it (otherwise, the generated output is 

-

131 invalid). 

-

132 """ 

-

133 return self._content_type is _CONTENT_TYPE_SEPARATOR 

-

134 

-

135 @property 

-

136 def is_whitespace(self): 

-

137 # type: () -> bool 

-

138 """True if this formatter token represents a whitespace token""" 

-

139 return self._content_type is _CONTENT_TYPE_SEPARATOR and self._text.isspace() 

-

140 

-

141 @property 

-

142 def text(self): 

-

143 # type: () -> str 

-

144 """The actual context of the token 

-

145 

-

146 This field *must not* be used to determine the type of token. The 

-

147 formatter cannot reliably tell whether "#..." is a comment or a value 

-

148 (it can be both). Use is_value and is_comment instead for discriminating 

-

149 token types. 

-

150 

-

151 For value tokens, this the concrete value to be omitted. 

-

152 

-

153 For comment token, this is the full comment text. 

-

154 

-

155 This is the same as str(token). 

-

156 """ 

-

157 return self._text 

-

158 

-

159 def __str__(self): 

-

160 # type: () -> str 

-

161 return self._text 

-

162 

-

163 def __repr__(self): 

-

164 # type: () -> str 

-

165 return "{}({!r}, {}=True)".format( 

-

166 self.__class__.__name__, self._text, self._content_type 

-

167 ) 

-

168 

-

169 

-

170SPACE_SEPARATOR_FT = FormatterContentToken(" ", _CONTENT_TYPE_SEPARATOR) 

-

171COMMA_SEPARATOR_FT = FormatterContentToken(",", _CONTENT_TYPE_SEPARATOR) 

-

172 

-

173 

-

174def one_value_per_line_formatter( 

-

175 indentation, # type: Union[int, Literal["FIELD_NAME_LENGTH"]] 

-

176 trailing_separator=True, # type: bool 

-

177 immediate_empty_line=False, # type: bool 

-

178): 

-

179 # type: (...) -> FormatterCallback 

-

180 """Provide a simple formatter that can handle indentation and trailing separators 

-

181 

-

182 All formatters returned by this function puts exactly one value per line. This 

-

183 pattern is commonly seen in the "Depends" field and similar fields of 

-

184 debian/control files. 

-

185 

-

186 :param indentation: Either the literal string "FIELD_NAME_LENGTH" or a positive 

-

187 integer, which determines the indentation for fields. If it is an integer, 

-

188 then a fixed indentation is used (notably the value 1 ensures the shortest 

-

189 possible indentation). Otherwise, if it is "FIELD_NAME_LENGTH", then the 

-

190 indentation is set such that it aligns the values based on the field name. 

-

191 :param trailing_separator: If True, then the last value will have a trailing 

-

192 separator token (e.g., ",") after it. 

-

193 :param immediate_empty_line: Whether the value should always start with an 

-

194 empty line. If True, then the result becomes something like "Field:\n value". 

-

195 

-

196 """ 

-

197 if indentation != "FIELD_NAME_LENGTH" and indentation < 1: 197 ↛ 198line 197 didn't jump to line 198, because the condition on line 197 was never true

-

198 raise ValueError('indentation must be at least 1 (or "FIELD_NAME_LENGTH")') 

-

199 

-

200 def _formatter( 

-

201 name, # type: str 

-

202 sep_token, # type: FormatterContentToken 

-

203 formatter_tokens, # type: Iterator[FormatterContentToken] 

-

204 ): 

-

205 # type: (...) -> Iterator[Union[FormatterContentToken, str]] 

-

206 if indentation == "FIELD_NAME_LENGTH": 

-

207 indent_len = len(name) + 2 

-

208 else: 

-

209 indent_len = indentation 

-

210 indent = " " * indent_len 

-

211 

-

212 emitted_first_line = False 

-

213 tok_iter = BufferingIterator(formatter_tokens) 

-

214 is_value = operator.attrgetter("is_value") 

-

215 if immediate_empty_line: 

-

216 emitted_first_line = True 

-

217 yield "\n" 

-

218 for t in tok_iter: 

-

219 if t.is_comment: 

-

220 if not emitted_first_line: 

-

221 yield "\n" 

-

222 yield t 

-

223 elif t.is_value: 

-

224 if not emitted_first_line: 

-

225 yield " " 

-

226 else: 

-

227 yield indent 

-

228 yield t 

-

229 if not sep_token.is_whitespace and ( 

-

230 trailing_separator or tok_iter.peek_find(is_value) 

-

231 ): 

-

232 yield sep_token 

-

233 yield "\n" 

-

234 else: 

-

235 # Skip existing separators (etc.) 

-

236 continue 

-

237 emitted_first_line = True 

-

238 

-

239 return _formatter 

-

240 

-

241 

-

242one_value_per_line_trailing_separator = one_value_per_line_formatter( 

-

243 "FIELD_NAME_LENGTH", trailing_separator=True 

-

244) 

-

245 

-

246 

-

247def format_field( 

-

248 formatter, # type: FormatterCallback 

-

249 field_name, # type: str 

-

250 separator_token, # type: FormatterContentToken 

-

251 token_iter, # type: Iterator[FormatterContentToken] 

-

252): 

-

253 # type: (...) -> str 

-

254 """Format a field using a provided formatter 

-

255 

-

256 This function formats a series of tokens using the provided formatter. 

-

257 It can be used as a standalone formatter engine and can be used in test 

-

258 suites to validate third-party formatters (enabling them to test for 

-

259 corner cases without involving parsing logic). 

-

260 

-

261 The formatter receives series of FormatterContentTokens (via the 

-

262 token_iter) and is expected to yield one or more str or 

-

263 FormatterContentTokens. The calling function will combine all of 

-

264 these into a single string, which will be used as the value. 

-

265 

-

266 The formatter is recommended to yield the provided value and comment 

-

267 tokens interleaved with text segments of whitespace and separators 

-

268 as part of its output. If it preserve comment and value tokens, the 

-

269 calling function can provide some runtime checks to catch bugs 

-

270 (like the formatter turning a comment into a value because it forgot 

-

271 to ensure that the comment was emitted directly after a newline 

-

272 character). 

-

273 

-

274 When writing a formatter, please keep the following in mind: 

-

275 

-

276 * The output of the formatter is appended directly after the ":" separator. 

-

277 Most formatters will want to emit either a space or a newline as the very 

-

278 first character for readability. 

-

279 (compare "Depends:foo\\n" to "Depends: foo\\n") 

-

280 

-

281 * The formatter must always end its output on a newline. This is a design 

-

282 choice of how the round-trip safe parser represent values that is imposed 

-

283 on the formatter. 

-

284 

-

285 * It is often easier to discard/ignore all separator tokens from the 

-

286 the provided token sequence and instead just yield separator tokens/str 

-

287 where the formatter wants to place them. 

-

288 

-

289 - The formatter is strongly recommended to special-case formatting 

-

290 for whitespace separators (check for `separator_token.is_whitespace`). 

-

291 

-

292 This is because space, tab and newline all counts as valid separators 

-

293 and can all appear in the token sequence. If the original field uses 

-

294 a mix of these separators it is likely to completely undermine the 

-

295 desired result. Not to mention the additional complexity of handling 

-

296 when a separator token happens to use the newline character which 

-

297 affects how the formatter is supposed what comes after it 

-

298 (see the rules for comments, empty lines and continuation line 

-

299 markers). 

-

300 

-

301 * The formatter must remember to emit a "continuation line" marker 

-

302 (typically a single space or tab) when emitting a value after 

-

303 a newline or a comment. A `yield " "` is sufficient. 

-

304 

-

305 - The continuation line marker may be embedded inside a str 

-

306 with other whitespace (such as the newline coming before it 

-

307 or/and whitespace used for indentation purposes following 

-

308 the marker). 

-

309 

-

310 * The formatter must not cause the output to contain completely 

-

311 empty/whitespace lines as these cause syntax errors. The first 

-

312 line never counts as an empty line (as it will be appended after 

-

313 the field name). 

-

314 

-

315 * Tokens must be discriminated via the `token.is_value` (etc.) 

-

316 properties. Assuming that `token.text.startswith("#")` implies a 

-

317 comment and similar stunts are wrong. As an example, "#foo" is a 

-

318 perfectly valid value in some contexts. 

-

319 

-

320 * Comment tokens *always* take up exactly one complete line including 

-

321 the newline character at the end of the line. They must be emitted 

-

322 directly after a newline character or another comment token. 

-

323 

-

324 * Special cases that are rare but can happen: 

-

325 

-

326 - Fields *can* start with comments and requires a formatter provided newline. 

-

327 (Example: "Depends:\\n# Comment here\\n foo") 

-

328 

-

329 - Fields *can* start on a separator or have two separators in a row. 

-

330 This is especially true for whitespace separated fields where every 

-

331 whitespace counts as a separator, but it can also happen with other 

-

332 separators (such as comma). 

-

333 

-

334 - Value tokens can contain whitespace (for non-whitespace separators). 

-

335 When they do, the formatter must not attempt change nor "normalize" 

-

336 the whitespace inside the value token as that might change how the 

-

337 value is interpreted. (If you want to normalize such whitespace, 

-

338 the formatter is at the wrong abstraction level. Instead, manipulate 

-

339 the values directly in the value interpretation layer) 

-

340 

-

341 This function will provide *some* runtime checks of its input and the 

-

342 output from the formatter to detect some errors early and provide 

-

343 helpful diagnostics. If you use the function for testing, you are 

-

344 recommended to rely on verifying the output of the function rather than 

-

345 relying on the runtime checks (as these are subject to change). 

-

346 

-

347 :param formatter: A formatter (see FormatterCallback for the type). 

-

348 Basic formatting is provided via one_value_per_line_trailing_separator 

-

349 (a formatter) or one_value_per_line_formatter (a formatter generator). 

-

350 :param field_name: The name of the field. 

-

351 :param separator_token: One of SPACE_SEPARATOR and COMMA_SEPARATOR 

-

352 :param token_iter: An iterable of tokens to be formatted. 

-

353 

-

354 The following example shows how to define a formatter_callback along with 

-

355 a few verifications. 

-

356 

-

357 >>> fmt_field_len_sep = one_value_per_line_trailing_separator 

-

358 >>> fmt_shortest = one_value_per_line_formatter( 

-

359 ... 1, 

-

360 ... trailing_separator=False 

-

361 ... ) 

-

362 >>> fmt_newline_first = one_value_per_line_formatter( 

-

363 ... 1, 

-

364 ... trailing_separator=False, 

-

365 ... immediate_empty_line=True 

-

366 ... ) 

-

367 >>> # Omit separator tokens for in the token list for simplicity (the formatter does 

-

368 >>> # not use them, and it enables us to keep the example simple by reusing the list) 

-

369 >>> tokens = [ 

-

370 ... FormatterContentToken.value_token("foo"), 

-

371 ... FormatterContentToken.comment_token("# some comment about bar\\n"), 

-

372 ... FormatterContentToken.value_token("bar"), 

-

373 ... ] 

-

374 >>> # Starting with fmt_dl_ts 

-

375 >>> print(format_field(fmt_field_len_sep, "Depends", COMMA_SEPARATOR_FT, tokens), end='') 

-

376 Depends: foo, 

-

377 # some comment about bar 

-

378 bar, 

-

379 >>> print(format_field(fmt_field_len_sep, "Architecture", SPACE_SEPARATOR_FT, tokens), end='') 

-

380 Architecture: foo 

-

381 # some comment about bar 

-

382 bar 

-

383 >>> # Control check for the special case where the field starts with a comment 

-

384 >>> print(format_field(fmt_field_len_sep, "Depends", COMMA_SEPARATOR_FT, tokens[1:]), end='') 

-

385 Depends: 

-

386 # some comment about bar 

-

387 bar, 

-

388 >>> # Also, check single line values (to ensure it ends on a newline) 

-

389 >>> print(format_field(fmt_field_len_sep, "Depends", COMMA_SEPARATOR_FT, tokens[2:]), end='') 

-

390 Depends: bar, 

-

391 >>> ### Changing format to the shortest length 

-

392 >>> print(format_field(fmt_shortest, "Depends", COMMA_SEPARATOR_FT, tokens), end='') 

-

393 Depends: foo, 

-

394 # some comment about bar 

-

395 bar 

-

396 >>> print(format_field(fmt_shortest, "Architecture", SPACE_SEPARATOR_FT, tokens), end='') 

-

397 Architecture: foo 

-

398 # some comment about bar 

-

399 bar 

-

400 >>> # Control check for the special case where the field starts with a comment 

-

401 >>> print(format_field(fmt_shortest, "Depends", COMMA_SEPARATOR_FT, tokens[1:]), end='') 

-

402 Depends: 

-

403 # some comment about bar 

-

404 bar 

-

405 >>> # Also, check single line values (to ensure it ends on a newline) 

-

406 >>> print(format_field(fmt_shortest, "Depends", COMMA_SEPARATOR_FT, tokens[2:]), end='') 

-

407 Depends: bar 

-

408 >>> ### Changing format to the newline first format 

-

409 >>> print(format_field(fmt_newline_first, "Depends", COMMA_SEPARATOR_FT, tokens), end='') 

-

410 Depends: 

-

411 foo, 

-

412 # some comment about bar 

-

413 bar 

-

414 >>> print(format_field(fmt_newline_first, "Architecture", SPACE_SEPARATOR_FT, tokens), end='') 

-

415 Architecture: 

-

416 foo 

-

417 # some comment about bar 

-

418 bar 

-

419 >>> # Control check for the special case where the field starts with a comment 

-

420 >>> print(format_field(fmt_newline_first, "Depends", COMMA_SEPARATOR_FT, tokens[1:]), end='') 

-

421 Depends: 

-

422 # some comment about bar 

-

423 bar 

-

424 >>> # Also, check single line values (to ensure it ends on a newline) 

-

425 >>> print(format_field(fmt_newline_first, "Depends", COMMA_SEPARATOR_FT, tokens[2:]), end='') 

-

426 Depends: 

-

427 bar 

-

428 """ 

-

429 formatted_tokens = [field_name, ":"] 

-

430 just_after_newline = False 

-

431 last_was_value_token = False 

-

432 if isinstance(token_iter, list): 

-

433 # Stop people from using this to test known "invalid" cases. 

-

434 last_token = token_iter[-1] 

-

435 if last_token.is_comment: 435 ↛ 436line 435 didn't jump to line 436, because the condition on line 435 was never true

-

436 raise ValueError( 

-

437 "Invalid token_iter: Field values cannot end with comments" 

-

438 ) 

-

439 for token in formatter(field_name, separator_token, token_iter): 

-

440 token_as_text = str(token) 

-

441 # If we are given formatter tokens, then use them to verify the output. 

-

442 if isinstance(token, FormatterContentToken): 

-

443 if token.is_comment: 

-

444 if not just_after_newline: 444 ↛ 445line 444 didn't jump to line 445, because the condition on line 444 was never true

-

445 raise ValueError( 

-

446 "Bad format: Comments must appear directly after a newline." 

-

447 ) 

-

448 # for the sake of ensuring people use proper test data. 

-

449 if not token_as_text.startswith("#"): 449 ↛ 450line 449 didn't jump to line 450, because the condition on line 449 was never true

-

450 raise ValueError("Invalid Comment token: Must start with #") 

-

451 if not token_as_text.endswith("\n"): 451 ↛ 452line 451 didn't jump to line 452, because the condition on line 451 was never true

-

452 raise ValueError("Invalid Comment token: Must end on a newline") 

-

453 elif token.is_value: 

-

454 if token_as_text[0].isspace() or token_as_text[-1].isspace(): 454 ↛ 455line 454 didn't jump to line 455, because the condition on line 454 was never true

-

455 raise ValueError( 

-

456 "Invalid Value token: It cannot start nor end on whitespace" 

-

457 ) 

-

458 if just_after_newline: 458 ↛ 459line 458 didn't jump to line 459, because the condition on line 458 was never true

-

459 raise ValueError("Bad format: Missing continuation line marker") 

-

460 if last_was_value_token: 460 ↛ 461line 460 didn't jump to line 461, because the condition on line 460 was never true

-

461 raise ValueError("Bad format: Formatter omitted a separator") 

-

462 

-

463 last_was_value_token = token.is_value 

-

464 else: 

-

465 last_was_value_token = False 

-

466 

-

467 if just_after_newline: 

-

468 if token_as_text[0] in ("\r", "\n"): 468 ↛ 469line 468 didn't jump to line 469, because the condition on line 468 was never true

-

469 raise ValueError("Bad format: Saw completely empty line.") 

-

470 if not token_as_text[0].isspace() and not token_as_text.startswith("#"): 470 ↛ 471line 470 didn't jump to line 471, because the condition on line 470 was never true

-

471 raise ValueError("Bad format: Saw completely empty line.") 

-

472 formatted_tokens.append(token_as_text) 

-

473 just_after_newline = token_as_text.endswith("\n") 

-

474 

-

475 formatted_text = "".join(formatted_tokens) 

-

476 if not formatted_text.endswith("\n"): 476 ↛ 477line 476 didn't jump to line 477, because the condition on line 476 was never true

-

477 raise ValueError("Bad format: The field value must end on a newline") 

-

478 return formatted_text 

-
- - - diff --git a/coverage-report/d_e9c451f4ae334f76_locatable_py.html b/coverage-report/d_e9c451f4ae334f76_locatable_py.html deleted file mode 100644 index 6ce16fc..0000000 --- a/coverage-report/d_e9c451f4ae334f76_locatable_py.html +++ /dev/null @@ -1,512 +0,0 @@ - - - - - Coverage for src/debputy/lsp/vendoring/_deb822_repro/locatable.py: 90% - - - - - -
-
-

- Coverage for src/debputy/lsp/vendoring/_deb822_repro/locatable.py: - 90% -

- -

- 122 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import dataclasses 

-

2import itertools 

-

3import sys 

-

4 

-

5from typing import Optional, TYPE_CHECKING, Iterable 

-

6 

-

7if TYPE_CHECKING: 

-

8 from typing import Self 

-

9 from .parsing import Deb822Element 

-

10 

-

11 

-

12_DATA_CLASS_OPTIONAL_ARGS = {} 

-

13if sys.version_info >= (3, 10): 13 ↛ 20line 13 didn't jump to line 20, because the condition on line 13 was never false

-

14 # The `slots` feature greatly reduces the memory usage by avoiding the `__dict__` 

-

15 # instance. But at the end of the day, performance is "nice to have" for this 

-

16 # feature and all current consumers are at Python 3.12 (except the CI tests...) 

-

17 _DATA_CLASS_OPTIONAL_ARGS["slots"] = True 

-

18 

-

19 

-

20@dataclasses.dataclass(frozen=True, **_DATA_CLASS_OPTIONAL_ARGS) 

-

21class Position: 

-

22 """Describes a "cursor" position inside a file 

-

23 

-

24 It consists of a line position (0-based line number) and a cursor position. This is modelled 

-

25 after the "Position" in Language Server Protocol (LSP). 

-

26 """ 

-

27 

-

28 line_position: int 

-

29 """Describes the line position as a 0-based line number 

-

30 

-

31 See line_number if you want a human-readable line number 

-

32 """ 

-

33 cursor_position: int 

-

34 """Describes a cursor position ("between two characters") or a character offset. 

-

35 

-

36 When this value is 0, the position is at the start of a line. When it is 1, then 

-

37 the position is between the first and the second character (etc.). 

-

38 """ 

-

39 

-

40 @property 

-

41 def line_number(self) -> int: 

-

42 """The line number as human would count it""" 

-

43 return self.line_position + 1 

-

44 

-

45 def relative_to(self, new_base: "Position") -> "Position": 

-

46 """Offsets the position relative to another position 

-

47 

-

48 This is useful to avoid the `position_in_file()` method by caching where 

-

49 the parents position and then for its children you use `range_in_parent()` 

-

50 plus `relative_to()` to rebase the range. 

-

51 

-

52 >>> parent: Locatable = ... # doctest: +SKIP 

-

53 >>> children: Iterable[Locatable] = ... # doctest: +SKIP 

-

54 >>> # This will expensive 

-

55 >>> parent_pos = parent.position_in_file( # doctest: +SKIP 

-

56 ... skip_leading_comments=False 

-

57 ... ) 

-

58 >>> for child in children: # doctest: +SKIP 

-

59 ... child_pos = child.position_in_parent() 

-

60 ... # Avoid a position_in_file() for each child 

-

61 ... child_pos_in_file = child_pos.relative_to(parent_pos) 

-

62 ... ... # Use the child_pos_in_file for something 

-

63 

-

64 :param new_base: The position that should have been the origin rather than 

-

65 (0, 0). 

-

66 :returns: The range offset relative to the base position. 

-

67 """ 

-

68 if self.line_position == 0 and self.cursor_position == 0: 

-

69 return new_base 

-

70 if new_base.line_position == 0 and new_base.cursor_position == 0: 

-

71 return self 

-

72 if self.line_position == 0: 

-

73 line_number = new_base.line_position 

-

74 line_char_offset = new_base.cursor_position + self.cursor_position 

-

75 else: 

-

76 line_number = self.line_position + new_base.line_position 

-

77 line_char_offset = self.cursor_position 

-

78 return Position( 

-

79 line_number, 

-

80 line_char_offset, 

-

81 ) 

-

82 

-

83 

-

84@dataclasses.dataclass(frozen=True, **_DATA_CLASS_OPTIONAL_ARGS) 

-

85class Range: 

-

86 """Describes a range inside a file 

-

87 

-

88 This can be useful to describe things like "from line 4, cursor position 2 

-

89 to line 7 to cursor position 10". When describing a full line including the 

-

90 newline, use line N, cursor position 0 to line N+1. cursor position 0. 

-

91 

-

92 It is also used to denote the size of objects (in that case, the start position 

-

93 is set to START_POSITION as a convention if the precise location is not 

-

94 specified). 

-

95 

-

96 This is modelled after the "Range" in Language Server Protocol (LSP). 

-

97 """ 

-

98 

-

99 start_pos: Position 

-

100 end_pos: Position 

-

101 

-

102 @property 

-

103 def start_line_position(self) -> int: 

-

104 """Describes the start line position as a 0-based line number 

-

105 

-

106 See start_line_number if you want a human-readable line number 

-

107 """ 

-

108 return self.start_pos.line_position 

-

109 

-

110 @property 

-

111 def start_cursor_position(self) -> int: 

-

112 """Describes the starting cursor position 

-

113 

-

114 When this value is 0, the position is at the start of a line. When it is 1, then 

-

115 the position is between the first and the second character (etc.). 

-

116 """ 

-

117 return self.start_pos.cursor_position 

-

118 

-

119 @property 

-

120 def start_line_number(self) -> int: 

-

121 """The start line number as human would count it""" 

-

122 return self.start_pos.line_number 

-

123 

-

124 @property 

-

125 def end_line_position(self) -> int: 

-

126 """Describes the end line position as a 0-based line number 

-

127 

-

128 See end_line_number if you want a human-readable line number 

-

129 """ 

-

130 return self.end_pos.line_position 

-

131 

-

132 @property 

-

133 def end_line_number(self) -> int: 

-

134 """The end line number as human would count it""" 

-

135 return self.end_pos.line_number 

-

136 

-

137 @property 

-

138 def end_cursor_position(self) -> int: 

-

139 """Describes the end cursor position 

-

140 

-

141 When this value is 0, the position is at the start of a line. When it is 1, then 

-

142 the position is between the first and the second character (etc.). 

-

143 """ 

-

144 return self.end_pos.cursor_position 

-

145 

-

146 @property 

-

147 def line_count(self) -> int: 

-

148 """The number of lines (newlines) spanned by this range. 

-

149 

-

150 Will be zero when the range fits inside one line. 

-

151 """ 

-

152 return self.end_line_position - self.start_line_position 

-

153 

-

154 @classmethod 

-

155 def between(cls, a: Position, b: Position) -> "Self": 

-

156 """Computes the range between two positions 

-

157 

-

158 Unlike the constructor, this will always create a "positive" range. 

-

159 That is, the "earliest" position will always be the start position 

-

160 regardless of the order they were passed to `between`. When using 

-

161 the Range constructor, you have freedom to do "inverse" ranges 

-

162 in case that is ever useful 

-

163 """ 

-

164 if a.line_position > b.line_position or ( 164 ↛ 168line 164 didn't jump to line 168, because the condition on line 164 was never true

-

165 a.line_position == b.line_position and a.cursor_position > b.cursor_position 

-

166 ): 

-

167 # Order swap, so `a` is always the earliest position 

-

168 a, b = b, a 

-

169 return cls( 

-

170 a, 

-

171 b, 

-

172 ) 

-

173 

-

174 def relative_to(self, new_base: Position) -> "Range": 

-

175 """Offsets the range relative to another position 

-

176 

-

177 This is useful to avoid the `position_in_file()` method by caching where 

-

178 the parents position and then for its children you use `range_in_parent()` 

-

179 plus `relative_to()` to rebase the range. 

-

180 

-

181 >>> parent: Locatable = ... # doctest: +SKIP 

-

182 >>> children: Iterable[Locatable] = ... # doctest: +SKIP 

-

183 >>> # This will expensive 

-

184 >>> parent_pos = parent.position_in_file( # doctest: +SKIP 

-

185 ... skip_leading_comments=False 

-

186 ... ) 

-

187 >>> for child in children: # doctest: +SKIP 

-

188 ... child_range = child.range_in_parent() 

-

189 ... # Avoid a position_in_file() for each child 

-

190 ... child_range_in_file = child_range.relative_to(parent_pos) 

-

191 ... ... # Use the child_range_in_file for something 

-

192 

-

193 :param new_base: The position that should have been the origin rather than 

-

194 (0, 0). 

-

195 :returns: The range offset relative to the base position. 

-

196 """ 

-

197 if new_base == START_POSITION: 

-

198 return self 

-

199 return Range( 

-

200 self.start_pos.relative_to(new_base), 

-

201 self.end_pos.relative_to(new_base), 

-

202 ) 

-

203 

-

204 def as_size(self) -> "Range": 

-

205 """Reduces the range to a "size" 

-

206 

-

207 The returned range will always have its start position to (0, 0) and 

-

208 its end position shifted accordingly if it was not already based at 

-

209 (0, 0). 

-

210 

-

211 The original range is not mutated and, if it is already at (0, 0), the 

-

212 method will just return it as-is. 

-

213 """ 

-

214 if self.start_pos == START_POSITION: 214 ↛ 216line 214 didn't jump to line 216, because the condition on line 214 was never false

-

215 return self 

-

216 line_count = self.line_count 

-

217 if line_count: 

-

218 new_end_cursor_position = self.end_cursor_position 

-

219 else: 

-

220 delta = self.end_cursor_position - self.start_cursor_position 

-

221 new_end_cursor_position = delta 

-

222 return Range( 

-

223 START_POSITION, 

-

224 Position( 

-

225 line_count, 

-

226 new_end_cursor_position, 

-

227 ), 

-

228 ) 

-

229 

-

230 @classmethod 

-

231 def from_position_and_size(cls, base: Position, size: "Range") -> "Self": 

-

232 """Compute a range from a position and the size of another range 

-

233 

-

234 This provides you with a range starting at the base position that has 

-

235 the same effective span as the size parameter. 

-

236 

-

237 :param base: The desired starting position 

-

238 :param size: A range, which will be used as a size (that is, it will 

-

239 be reduced to a size via the `as_size()` method) for the resulting 

-

240 range 

-

241 :returns: A range at the provided base position that has the size of 

-

242 the provided range. 

-

243 """ 

-

244 line_position = base.line_position 

-

245 cursor_position = base.cursor_position 

-

246 size_rebased = size.as_size() 

-

247 lines = size_rebased.line_count 

-

248 if lines: 

-

249 line_position += lines 

-

250 cursor_position = size_rebased.end_cursor_position 

-

251 else: 

-

252 delta = ( 

-

253 size_rebased.end_cursor_position - size_rebased.start_cursor_position 

-

254 ) 

-

255 cursor_position += delta 

-

256 return cls( 

-

257 base, 

-

258 Position( 

-

259 line_position, 

-

260 cursor_position, 

-

261 ), 

-

262 ) 

-

263 

-

264 @classmethod 

-

265 def from_position_and_sizes( 

-

266 cls, base: Position, sizes: Iterable["Range"] 

-

267 ) -> "Self": 

-

268 """Compute a range from a position and the size of number of ranges 

-

269 

-

270 :param base: The desired starting position 

-

271 :param sizes: All the ranges that combined makes up the size of the 

-

272 desired position. Note that order can affect the end result. Particularly 

-

273 the end character offset gets reset every time a size spans a line. 

-

274 :returns: A range at the provided base position that has the size of 

-

275 the provided range. 

-

276 """ 

-

277 line_position = base.line_position 

-

278 cursor_position = base.cursor_position 

-

279 for size in sizes: 

-

280 size_rebased = size.as_size() 

-

281 lines = size_rebased.line_count 

-

282 if lines: 

-

283 line_position += lines 

-

284 cursor_position = size_rebased.end_cursor_position 

-

285 else: 

-

286 delta = ( 

-

287 size_rebased.end_cursor_position 

-

288 - size_rebased.start_cursor_position 

-

289 ) 

-

290 cursor_position += delta 

-

291 return cls( 

-

292 base, 

-

293 Position( 

-

294 line_position, 

-

295 cursor_position, 

-

296 ), 

-

297 ) 

-

298 

-

299 

-

300START_POSITION = Position(0, 0) 

-

301SECOND_CHAR_POS = Position(0, 1) 

-

302SECOND_LINE_POS = Position(1, 0) 

-

303ONE_CHAR_RANGE = Range.between(START_POSITION, SECOND_CHAR_POS) 

-

304ONE_LINE_RANGE = Range.between(START_POSITION, SECOND_LINE_POS) 

-

305 

-

306 

-

307class Locatable: 

-

308 __slots__ = () 

-

309 

-

310 @property 

-

311 def parent_element(self): 

-

312 # type: () -> Optional[Deb822Element] 

-

313 raise NotImplementedError 

-

314 

-

315 def position_in_parent(self, *, skip_leading_comments: bool = True) -> Position: 

-

316 """The start position of this token/element inside its parent 

-

317 

-

318 This is operation is generally linear to the number of "parts" (elements/tokens) 

-

319 inside the parent. 

-

320 

-

321 :param skip_leading_comments: If True, then if any leading comment that 

-

322 that can be skipped will be excluded in the position of this locatable. 

-

323 This is useful if you want the position "semantic" content of a field 

-

324 without also highlighting a leading comment. Remember to align this 

-

325 parameter with the `size` call, so the range does not "overshoot" 

-

326 into the next element (or falls short and only covers part of an 

-

327 element). Note that this option can only be used to filter out leading 

-

328 comments when the comments are a subset of the element. It has no 

-

329 effect on elements that are entirely made of comments. 

-

330 """ 

-

331 # pylint: disable=unused-argument 

-

332 # Note: The base class makes no assumptions about what tokens can be skipped, 

-

333 # therefore, skip_leading_comments is unused here. However, I do not want the 

-

334 # API to differ between elements and tokens. 

-

335 

-

336 parent = self.parent_element 

-

337 if parent is None: 337 ↛ 338line 337 didn't jump to line 338, because the condition on line 337 was never true

-

338 raise TypeError( 

-

339 "Cannot determine the position since the object is detached" 

-

340 ) 

-

341 relevant_parts = itertools.takewhile( 

-

342 lambda x: x is not self, parent.iter_parts() 

-

343 ) 

-

344 span = Range.from_position_and_sizes( 

-

345 START_POSITION, 

-

346 (x.size(skip_leading_comments=False) for x in relevant_parts), 

-

347 ) 

-

348 return span.end_pos 

-

349 

-

350 def range_in_parent(self, *, skip_leading_comments: bool = True) -> Range: 

-

351 """The range of this token/element inside its parent 

-

352 

-

353 This is operation is generally linear to the number of "parts" (elements/tokens) 

-

354 inside the parent. 

-

355 

-

356 :param skip_leading_comments: If True, then if any leading comment that 

-

357 that can be skipped will be excluded in the position of this locatable. 

-

358 This is useful if you want the position "semantic" content of a field 

-

359 without also highlighting a leading comment. Remember to align this 

-

360 parameter with the `size` call, so the range does not "overshoot" 

-

361 into the next element (or falls short and only covers part of an 

-

362 element). Note that this option can only be used to filter out leading 

-

363 comments when the comments are a subset of the element. It has no 

-

364 effect on elements that are entirely made of comments. 

-

365 """ 

-

366 pos = self.position_in_parent(skip_leading_comments=skip_leading_comments) 

-

367 return Range.from_position_and_size( 

-

368 pos, self.size(skip_leading_comments=skip_leading_comments) 

-

369 ) 

-

370 

-

371 def position_in_file(self, *, skip_leading_comments: bool = True) -> Position: 

-

372 """The start position of this token/element in this file 

-

373 

-

374 This is an *expensive* operation and in many cases have to traverse 

-

375 the entire file structure to answer the query. Consider whether 

-

376 you can maintain the parent's position and then use 

-

377 `position_in_parent()` combined with 

-

378 `child_position.relative_to(parent_position)` 

-

379 

-

380 :param skip_leading_comments: If True, then if any leading comment that 

-

381 that can be skipped will be excluded in the position of this locatable. 

-

382 This is useful if you want the position "semantic" content of a field 

-

383 without also highlighting a leading comment. Remember to align this 

-

384 parameter with the `size` call, so the range does not "overshoot" 

-

385 into the next element (or falls short and only covers part of an 

-

386 element). Note that this option can only be used to filter out leading 

-

387 comments when the comments are a subset of the element. It has no 

-

388 effect on elements that are entirely made of comments. 

-

389 """ 

-

390 position = self.position_in_parent( 

-

391 skip_leading_comments=skip_leading_comments, 

-

392 ) 

-

393 parent = self.parent_element 

-

394 if parent is not None: 394 ↛ 397line 394 didn't jump to line 397, because the condition on line 394 was never false

-

395 parent_position = parent.position_in_file(skip_leading_comments=False) 

-

396 position = position.relative_to(parent_position) 

-

397 return position 

-

398 

-

399 def size(self, *, skip_leading_comments: bool = True) -> Range: 

-

400 """Describe the objects size as a continuous range 

-

401 

-

402 :param skip_leading_comments: If True, then if any leading comment that 

-

403 that can be skipped will be excluded in the position of this locatable. 

-

404 This is useful if you want the position "semantic" content of a field 

-

405 without also highlighting a leading comment. Remember to align this 

-

406 parameter with the `position_in_file` or `position_in_parent` call, 

-

407 so the range does not "overshoot" into the next element (or falls 

-

408 short and only covers part of an element). Note that this option can 

-

409 only be used to filter out leading comments when the comments are a 

-

410 subset of the element. It has no effect on elements that are entirely 

-

411 made of comments. 

-

412 """ 

-

413 raise NotImplementedError 

-
- - - diff --git a/coverage-report/d_e9c451f4ae334f76_parsing_py.html b/coverage-report/d_e9c451f4ae334f76_parsing_py.html deleted file mode 100644 index 40a0c05..0000000 --- a/coverage-report/d_e9c451f4ae334f76_parsing_py.html +++ /dev/null @@ -1,3596 +0,0 @@ - - - - - Coverage for src/debputy/lsp/vendoring/_deb822_repro/parsing.py: 59% - - - - - -
-
-

- Coverage for src/debputy/lsp/vendoring/_deb822_repro/parsing.py: - 59% -

- -

- 1464 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1# -*- coding: utf-8 -*- vim: fileencoding=utf-8 : 

-

2 

-

3import collections.abc 

-

4import contextlib 

-

5import sys 

-

6import textwrap 

-

7import weakref 

-

8from abc import ABC 

-

9from types import TracebackType 

-

10from weakref import ReferenceType 

-

11 

-

12from ._util import ( 

-

13 combine_into_replacement, 

-

14 BufferingIterator, 

-

15 len_check_iterator, 

-

16) 

-

17from .formatter import ( 

-

18 FormatterContentToken, 

-

19 one_value_per_line_trailing_separator, 

-

20 format_field, 

-

21) 

-

22from .locatable import Locatable, START_POSITION, Position, Range 

-

23from .tokens import ( 

-

24 Deb822Token, 

-

25 Deb822ValueToken, 

-

26 Deb822SemanticallySignificantWhiteSpace, 

-

27 Deb822SpaceSeparatorToken, 

-

28 Deb822CommentToken, 

-

29 Deb822WhitespaceToken, 

-

30 Deb822ValueContinuationToken, 

-

31 Deb822NewlineAfterValueToken, 

-

32 Deb822CommaToken, 

-

33 Deb822FieldNameToken, 

-

34 Deb822FieldSeparatorToken, 

-

35 Deb822ErrorToken, 

-

36 tokenize_deb822_file, 

-

37 comma_split_tokenizer, 

-

38 whitespace_split_tokenizer, 

-

39) 

-

40from .types import AmbiguousDeb822FieldKeyError, SyntaxOrParseError 

-

41from debian._util import ( 

-

42 resolve_ref, 

-

43 LinkedList, 

-

44 LinkedListNode, 

-

45 OrderedSet, 

-

46 _strI, 

-

47 default_field_sort_key, 

-

48) 

-

49 

-

50try: 

-

51 from typing import ( 

-

52 Iterable, 

-

53 Iterator, 

-

54 List, 

-

55 Union, 

-

56 Dict, 

-

57 Optional, 

-

58 Callable, 

-

59 Any, 

-

60 Generic, 

-

61 Type, 

-

62 Tuple, 

-

63 IO, 

-

64 cast, 

-

65 overload, 

-

66 Mapping, 

-

67 TYPE_CHECKING, 

-

68 Sequence, 

-

69 ) 

-

70 from debian._util import T 

-

71 

-

72 # for some reason, pylint does not see that Commentish is used in typing 

-

73 from .types import ( # pylint: disable=unused-import 

-

74 ST, 

-

75 VE, 

-

76 TE, 

-

77 ParagraphKey, 

-

78 TokenOrElement, 

-

79 Commentish, 

-

80 ParagraphKeyBase, 

-

81 FormatterCallback, 

-

82 ) 

-

83 

-

84 if TYPE_CHECKING: 

-

85 StreamingValueParser = Callable[ 

-

86 [Deb822Token, BufferingIterator[Deb822Token]], VE 

-

87 ] 

-

88 StrToValueParser = Callable[[str], Iterable[Union["Deb822Token", VE]]] 

-

89 KVPNode = LinkedListNode["Deb822KeyValuePairElement"] 

-

90 else: 

-

91 StreamingValueParser = None 

-

92 StrToValueParser = None 

-

93 KVPNode = None 

-

94except ImportError: 

-

95 if not TYPE_CHECKING: 

-

96 # pylint: disable=unnecessary-lambda-assignment 

-

97 cast = lambda t, v: v 

-

98 overload = lambda f: None 

-

99 

-

100 

-

101class ValueReference(Generic[TE]): 

-

102 """Reference to a value inside a Deb822 paragraph 

-

103 

-

104 This is useful for cases where want to modify values "in-place" or maybe 

-

105 conditionally remove a value after looking at it. 

-

106 

-

107 ValueReferences can be invalidated by various changes or actions performed 

-

108 to the underlying provider of the value reference. As an example, sorting 

-

109 a list of values will generally invalidate all ValueReferences related to 

-

110 that list. 

-

111 

-

112 The ValueReference will raise validity issues where it detects them but most 

-

113 of the time it will not notice. As a means to this end, the ValueReference 

-

114 will *not* keep a strong reference to the underlying value. This enables it 

-

115 to detect when the container goes out of scope. However, keep in mind that 

-

116 the timeliness of garbage collection is implementation defined (e.g., pypy 

-

117 does not use ref-counting). 

-

118 """ 

-

119 

-

120 __slots__ = ( 

-

121 "_node", 

-

122 "_render", 

-

123 "_value_factory", 

-

124 "_removal_handler", 

-

125 "_mutation_notifier", 

-

126 ) 

-

127 

-

128 def __init__( 

-

129 self, 

-

130 node, # type: LinkedListNode[TE] 

-

131 render, # type: Callable[[TE], str] 

-

132 value_factory, # type: Callable[[str], TE] 

-

133 removal_handler, # type: Callable[[LinkedListNode[TokenOrElement]], None] 

-

134 mutation_notifier, # type: Optional[Callable[[], None]] 

-

135 ): 

-

136 self._node = weakref.ref( 

-

137 node 

-

138 ) # type: Optional[ReferenceType[LinkedListNode[TE]]] 

-

139 self._render = render 

-

140 self._value_factory = value_factory 

-

141 self._removal_handler = removal_handler 

-

142 self._mutation_notifier = mutation_notifier 

-

143 

-

144 def _resolve_node(self): 

-

145 # type: () -> LinkedListNode[TE] 

-

146 # NB: We check whether the "ref" itself is None (instead of the ref resolving to None) 

-

147 # This enables us to tell the difference between "known removal" vs. "garbage collected" 

-

148 if self._node is None: 148 ↛ 149line 148 didn't jump to line 149, because the condition on line 148 was never true

-

149 raise RuntimeError("Cannot use ValueReference after remove()") 

-

150 node = self._node() 

-

151 if node is None: 151 ↛ 152line 151 didn't jump to line 152, because the condition on line 151 was never true

-

152 raise RuntimeError("ValueReference is invalid (garbage collected)") 

-

153 return node 

-

154 

-

155 @property 

-

156 def value(self): 

-

157 # type: () -> str 

-

158 """Resolve the reference into a str""" 

-

159 return self._render(self._resolve_node().value) 

-

160 

-

161 @value.setter 

-

162 def value(self, new_value): 

-

163 # type: (str) -> None 

-

164 """Update the reference value 

-

165 

-

166 Updating the value via this method will *not* invalidate the reference (or other 

-

167 references to the same container). 

-

168 

-

169 This can raise an exception if the new value does not follow the requirements 

-

170 for the referenced values. As an example, values in whitespace separated 

-

171 lists cannot contain spaces and would trigger an exception. 

-

172 """ 

-

173 self._resolve_node().value = self._value_factory(new_value) 

-

174 if self._mutation_notifier is not None: 

-

175 self._mutation_notifier() 

-

176 

-

177 @property 

-

178 def locatable(self): 

-

179 # type: () -> Locatable 

-

180 """Reference to a locatable that can be used to determine where this value is""" 

-

181 return self._resolve_node().value 

-

182 

-

183 def remove(self): 

-

184 # type: () -> None 

-

185 """Remove the underlying value 

-

186 

-

187 This will invalidate the ValueReference (and any other ValueReferences pointing 

-

188 to that exact value). The validity of other ValueReferences to that container 

-

189 remains unaffected. 

-

190 """ 

-

191 self._removal_handler( 

-

192 cast("LinkedListNode[TokenOrElement]", self._resolve_node()) 

-

193 ) 

-

194 self._node = None 

-

195 

-

196 

-

197if sys.version_info >= (3, 9) or TYPE_CHECKING: 197 ↛ 204line 197 didn't jump to line 204, because the condition on line 197 was never false

-

198 _Deb822ParsedTokenList_ContextManager = contextlib.AbstractContextManager[T] 

-

199else: 

-

200 # Python 3.5 - 3.8 compat - we are not allowed to subscript the abc.Iterator 

-

201 # - use this little hack to work around it 

-

202 # Note that Python 3.5 is so old that it does not have AbstractContextManager, 

-

203 # so we re-implement it here. 

-

204 class _Deb822ParsedTokenList_ContextManager(Generic[T]): 

-

205 

-

206 def __enter__(self): 

-

207 return self 

-

208 

-

209 def __exit__(self, exc_type, exc_val, exc_tb): 

-

210 return None 

-

211 

-

212 

-

213class Deb822ParsedTokenList( 

-

214 Generic[VE, ST], 

-

215 _Deb822ParsedTokenList_ContextManager["Deb822ParsedTokenList[VE, ST]"], 

-

216): 

-

217 

-

218 def __init__( 

-

219 self, 

-

220 kvpair_element, # type: 'Deb822KeyValuePairElement' 

-

221 interpreted_value_element, # type: Deb822InterpretationProxyElement 

-

222 vtype, # type: Type[VE] 

-

223 stype, # type: Type[ST] 

-

224 str2value_parser, # type: StrToValueParser[VE] 

-

225 default_separator_factory, # type: Callable[[], ST] 

-

226 render, # type: Callable[[VE], str] 

-

227 ): 

-

228 # type: (...) -> None 

-

229 self._kvpair_element = kvpair_element 

-

230 self._proxy_element = interpreted_value_element 

-

231 self._token_list = LinkedList(interpreted_value_element.parts) 

-

232 self._vtype = vtype 

-

233 self._stype = stype 

-

234 self._str2value_parser = str2value_parser 

-

235 self._default_separator_factory = default_separator_factory 

-

236 self._value_factory = _parser_to_value_factory(str2value_parser, vtype) 

-

237 self._render = render 

-

238 self._format_preserve_original_formatting = True 

-

239 self._formatter = ( 

-

240 one_value_per_line_trailing_separator 

-

241 ) # type: FormatterCallback 

-

242 self._changed = False 

-

243 self.__continuation_line_char = None # type: Optional[str] 

-

244 assert self._token_list 

-

245 last_token = self._token_list.tail 

-

246 

-

247 if last_token is not None and isinstance( 247 ↛ exitline 247 didn't return from function '__init__', because the condition on line 247 was never false

-

248 last_token, Deb822NewlineAfterValueToken 

-

249 ): 

-

250 # We always remove the last newline (if present), because then 

-

251 # adding values will happen after the last value rather than on 

-

252 # a new line by default. 

-

253 # 

-

254 # On write, we always ensure the value ends on a newline (even 

-

255 # if it did not before). This is simpler and should be a 

-

256 # non-issue in practise. 

-

257 self._token_list.pop() 

-

258 

-

259 def __iter__(self): 

-

260 # type: () -> Iterator[str] 

-

261 yield from (self._render(v) for v in self.value_parts) 

-

262 

-

263 def __bool__(self): 

-

264 # type: () -> bool 

-

265 return next(iter(self), None) is not None 

-

266 

-

267 def __exit__( 

-

268 self, 

-

269 exc_type, # type: Optional[Type[BaseException]] 

-

270 exc_val, # type: Optional[BaseException] 

-

271 exc_tb, # type: Optional[TracebackType] 

-

272 ): 

-

273 # type: (...) -> Optional[bool] 

-

274 if exc_type is None and self._changed: 274 ↛ 276line 274 didn't jump to line 276, because the condition on line 274 was never false

-

275 self._update_field() 

-

276 return super().__exit__(exc_type, exc_val, exc_tb) 

-

277 

-

278 @property 

-

279 def value_parts(self): 

-

280 # type: () -> Iterator[VE] 

-

281 yield from (v for v in self._token_list if isinstance(v, self._vtype)) 

-

282 

-

283 def _mark_changed(self): 

-

284 # type: () -> None 

-

285 self._changed = True 

-

286 

-

287 def iter_value_references(self): 

-

288 # type: () -> Iterator[ValueReference[VE]] 

-

289 """Iterate over all values in the list (as ValueReferences) 

-

290 

-

291 This is useful for doing inplace modification of the values or even 

-

292 streaming removal of field values. It is in general also more 

-

293 efficient when more than one value is updated or removed. 

-

294 """ 

-

295 yield from ( 

-

296 ValueReference( 

-

297 cast("LinkedListNode[VE]", n), 

-

298 self._render, 

-

299 self._value_factory, 

-

300 self._remove_node, 

-

301 self._mark_changed, 

-

302 ) 

-

303 for n in self._token_list.iter_nodes() 

-

304 if isinstance(n.value, self._vtype) 

-

305 ) 

-

306 

-

307 def append_separator(self, space_after_separator=True): 

-

308 # type: (bool) -> None 

-

309 

-

310 separator_token = self._default_separator_factory() 

-

311 if separator_token.is_whitespace: 311 ↛ 314line 311 didn't jump to line 314, because the condition on line 311 was never false

-

312 space_after_separator = False 

-

313 

-

314 self._changed = True 

-

315 self._append_continuation_line_token_if_necessary() 

-

316 self._token_list.append(separator_token) 

-

317 

-

318 if space_after_separator and not separator_token.is_whitespace: 318 ↛ 319line 318 didn't jump to line 319, because the condition on line 318 was never true

-

319 self._token_list.append(Deb822WhitespaceToken(" ")) 

-

320 

-

321 def replace(self, orig_value, new_value): 

-

322 # type: (str, str) -> None 

-

323 """Replace the first instance of a value with another 

-

324 

-

325 This method will *not* affect the validity of ValueReferences. 

-

326 """ 

-

327 vtype = self._vtype 

-

328 for node in self._token_list.iter_nodes(): 328 ↛ 334line 328 didn't jump to line 334, because the loop on line 328 didn't complete

-

329 if isinstance(node.value, vtype) and self._render(node.value) == orig_value: 

-

330 node.value = self._value_factory(new_value) 

-

331 self._changed = True 

-

332 break 

-

333 else: 

-

334 raise ValueError("list.replace(x, y): x not in list") 

-

335 

-

336 def remove(self, value): 

-

337 # type: (str) -> None 

-

338 """Remove the first instance of a value 

-

339 

-

340 Removal will invalidate ValueReferences to the value being removed. 

-

341 ValueReferences to other values will be unaffected. 

-

342 """ 

-

343 vtype = self._vtype 

-

344 for node in self._token_list.iter_nodes(): 

-

345 if isinstance(node.value, vtype) and self._render(node.value) == value: 

-

346 node_to_remove = node 

-

347 break 

-

348 else: 

-

349 raise ValueError("list.remove(x): x not in list") 

-

350 

-

351 return self._remove_node(node_to_remove) 

-

352 

-

353 def _remove_node(self, node_to_remove): 

-

354 # type: (LinkedListNode[TokenOrElement]) -> None 

-

355 vtype = self._vtype 

-

356 self._changed = True 

-

357 

-

358 # We naively want to remove the node and every thing to the left of it 

-

359 # until the previous value. That is the basic idea for now (ignoring 

-

360 # special-cases for now). 

-

361 # 

-

362 # Example: 

-

363 # 

-

364 # """ 

-

365 # Multiline-Keywords: bar[ 

-

366 # # Comment about foo 

-

367 # foo] 

-

368 # baz 

-

369 # Keywords: bar[ foo] baz 

-

370 # Comma-List: bar[, foo], baz, 

-

371 # Multiline-Comma-List: bar[, 

-

372 # # Comment about foo 

-

373 # foo], 

-

374 # baz, 

-

375 # """ 

-

376 # 

-

377 # Assuming we want to remove "foo" for the lists, the []-markers 

-

378 # show what we aim to remove. This has the nice side-effect of 

-

379 # preserving whether nor not the value has a trailing separator. 

-

380 # Note that we do *not* attempt to repair missing separators but 

-

381 # it may fix duplicated separators by "accident". 

-

382 # 

-

383 # Now, there are two special cases to be aware of, where this approach 

-

384 # has short comings: 

-

385 # 

-

386 # 1) If foo is the only value (in which case, "delete everything" 

-

387 # is the only option). 

-

388 # 2) If foo is the first value 

-

389 # 3) If foo is not the only value on the line and we see a comment 

-

390 # inside the deletion range. 

-

391 # 

-

392 # For 2) + 3), we attempt to flip and range to delete and every 

-

393 # thing after it (up to but exclusion "baz") instead. This 

-

394 # definitely fixes 3), but 2) has yet another corner case, namely: 

-

395 # 

-

396 # """ 

-

397 # Multiline-Comma-List: foo, 

-

398 # # Remark about bar 

-

399 # bar, 

-

400 # Another-Case: foo 

-

401 # # Remark, also we use leading separator 

-

402 # , bar 

-

403 # """ 

-

404 # 

-

405 # The options include: 

-

406 # 

-

407 # A) Discard the comment - brain-dead simple 

-

408 # B) Hoist the comment up to a field comment, but then what if the 

-

409 # field already has a comment? 

-

410 # C) Clear the first value line leaving just the newline and 

-

411 # replace the separator before "bar" (if present) with a space. 

-

412 # (leaving you with the value of the form "\n# ...\n bar") 

-

413 # 

-

414 

-

415 first_value_on_lhs = None # type: Optional[LinkedListNode[TokenOrElement]] 

-

416 first_value_on_rhs = None # type: Optional[LinkedListNode[TokenOrElement]] 

-

417 comment_before_previous_value = False 

-

418 comment_before_next_value = False 

-

419 for past_node in node_to_remove.iter_previous(skip_current=True): 

-

420 past_token = past_node.value 

-

421 if isinstance(past_token, Deb822Token) and past_token.is_comment: 

-

422 comment_before_previous_value = True 

-

423 continue 

-

424 if isinstance(past_token, vtype): 

-

425 first_value_on_lhs = past_node 

-

426 break 

-

427 

-

428 for future_node in node_to_remove.iter_next(skip_current=True): 

-

429 future_token = future_node.value 

-

430 if isinstance(future_token, Deb822Token) and future_token.is_comment: 

-

431 comment_before_next_value = True 

-

432 continue 

-

433 if isinstance(future_token, vtype): 

-

434 first_value_on_rhs = future_node 

-

435 break 

-

436 

-

437 if first_value_on_rhs is None and first_value_on_lhs is None: 

-

438 # This was the last value, just remove everything. 

-

439 self._token_list.clear() 

-

440 return 

-

441 

-

442 if first_value_on_lhs is not None and not comment_before_previous_value: 

-

443 # Delete left 

-

444 delete_lhs_of_node = True 

-

445 elif first_value_on_rhs is not None and not comment_before_next_value: 

-

446 # Delete right 

-

447 delete_lhs_of_node = False 

-

448 else: 

-

449 # There is a comment on either side (or no value on one and a 

-

450 # comment and the other). Keep it simple, we just delete to 

-

451 # one side (preferring deleting to left if possible). 

-

452 delete_lhs_of_node = first_value_on_lhs is not None 

-

453 

-

454 if delete_lhs_of_node: 

-

455 first_remain_lhs = first_value_on_lhs 

-

456 first_remain_rhs = node_to_remove.next_node 

-

457 else: 

-

458 first_remain_lhs = node_to_remove.previous_node 

-

459 first_remain_rhs = first_value_on_rhs 

-

460 

-

461 # Actual deletion - with some manual labour to update HEAD/TAIL of 

-

462 # the list in case we do a "delete everything left/right this node". 

-

463 if first_remain_lhs is None: 

-

464 self._token_list.head_node = first_remain_rhs 

-

465 if first_remain_rhs is None: 

-

466 self._token_list.tail_node = first_remain_lhs 

-

467 LinkedListNode.link_nodes(first_remain_lhs, first_remain_rhs) 

-

468 

-

469 def append(self, value): 

-

470 # type: (str) -> None 

-

471 vt = self._value_factory(value) 

-

472 self.append_value(vt) 

-

473 

-

474 def append_value(self, vt): 

-

475 # type: (VE) -> None 

-

476 value_parts = self._token_list 

-

477 if value_parts: 

-

478 needs_separator = False 

-

479 stype = self._stype 

-

480 vtype = self._vtype 

-

481 for t in reversed(value_parts): 481 ↛ 488line 481 didn't jump to line 488, because the loop on line 481 didn't complete

-

482 if isinstance(t, vtype): 

-

483 needs_separator = True 

-

484 break 

-

485 if isinstance(t, stype): 

-

486 break 

-

487 

-

488 if needs_separator: 

-

489 self.append_separator() 

-

490 else: 

-

491 # Looks nicer if there is a space before the very first value 

-

492 self._token_list.append(Deb822WhitespaceToken(" ")) 

-

493 self._append_continuation_line_token_if_necessary() 

-

494 self._changed = True 

-

495 value_parts.append(vt) 

-

496 

-

497 def _previous_is_newline(self): 

-

498 # type: () -> bool 

-

499 tail = self._token_list.tail 

-

500 return tail is not None and tail.convert_to_text().endswith("\n") 

-

501 

-

502 def append_newline(self): 

-

503 # type: () -> None 

-

504 if self._previous_is_newline(): 504 ↛ 505line 504 didn't jump to line 505, because the condition on line 504 was never true

-

505 raise ValueError( 

-

506 "Cannot add a newline after a token that ends on a newline" 

-

507 ) 

-

508 self._token_list.append(Deb822NewlineAfterValueToken()) 

-

509 

-

510 def append_comment(self, comment_text): 

-

511 # type: (str) -> None 

-

512 tail = self._token_list.tail 

-

513 if tail is None or not tail.convert_to_text().endswith("\n"): 

-

514 self.append_newline() 

-

515 comment_token = Deb822CommentToken(_format_comment(comment_text)) 

-

516 self._token_list.append(comment_token) 

-

517 

-

518 @property 

-

519 def _continuation_line_char(self): 

-

520 # type: () -> str 

-

521 char = self.__continuation_line_char 

-

522 if char is None: 

-

523 # Use ' ' by default but match the existing field if possible. 

-

524 char = " " 

-

525 for token in self._token_list: 

-

526 if isinstance(token, Deb822ValueContinuationToken): 

-

527 char = token.text 

-

528 break 

-

529 self.__continuation_line_char = char 

-

530 return char 

-

531 

-

532 def _append_continuation_line_token_if_necessary(self): 

-

533 # type: () -> None 

-

534 tail = self._token_list.tail 

-

535 if tail is not None and tail.convert_to_text().endswith("\n"): 535 ↛ 536line 535 didn't jump to line 536, because the condition on line 535 was never true

-

536 self._token_list.append( 

-

537 Deb822ValueContinuationToken(self._continuation_line_char) 

-

538 ) 

-

539 

-

540 def reformat_when_finished(self): 

-

541 # type: () -> None 

-

542 self._enable_reformatting() 

-

543 self._changed = True 

-

544 

-

545 def _enable_reformatting(self): 

-

546 # type: () -> None 

-

547 self._format_preserve_original_formatting = False 

-

548 

-

549 def no_reformatting_when_finished(self): 

-

550 # type: () -> None 

-

551 self._format_preserve_original_formatting = True 

-

552 

-

553 def value_formatter( 

-

554 self, 

-

555 formatter, # type: FormatterCallback 

-

556 force_reformat=False, # type: bool 

-

557 ): 

-

558 # type: (...) -> None 

-

559 """Use a custom formatter when formatting the value 

-

560 

-

561 :param formatter: A formatter (see debian._deb822_repro.formatter.format_field 

-

562 for details) 

-

563 :param force_reformat: If True, always reformat the field even if there are 

-

564 no (other) changes performed. By default, fields are only reformatted if 

-

565 they are changed. 

-

566 """ 

-

567 self._formatter = formatter 

-

568 self._format_preserve_original_formatting = False 

-

569 if force_reformat: 

-

570 self._changed = True 

-

571 

-

572 def clear(self): 

-

573 # type: () -> None 

-

574 """Like list.clear() - removes all content (including comments and spaces)""" 

-

575 if self._token_list: 

-

576 self._changed = True 

-

577 self._token_list.clear() 

-

578 

-

579 def _iter_content_as_tokens(self): 

-

580 # type: () -> Iterable[Deb822Token] 

-

581 for te in self._token_list: 

-

582 if isinstance(te, Deb822Element): 

-

583 yield from te.iter_tokens() 

-

584 else: 

-

585 yield te 

-

586 

-

587 def _generate_reformatted_field_content(self): 

-

588 # type: () -> str 

-

589 separator_token = self._default_separator_factory() 

-

590 vtype = self._vtype 

-

591 stype = self._stype 

-

592 token_list = self._token_list 

-

593 

-

594 def _token_iter(): 

-

595 # type: () -> Iterator[FormatterContentToken] 

-

596 text = "" # type: str 

-

597 for te in token_list: 

-

598 if isinstance(te, Deb822Token): 

-

599 if te.is_comment: 

-

600 yield FormatterContentToken.comment_token(te.text) 

-

601 elif isinstance(te, stype): 

-

602 text = te.text 

-

603 yield FormatterContentToken.separator_token(text) 

-

604 else: 

-

605 assert isinstance(te, vtype) 

-

606 text = te.convert_to_text() 

-

607 yield FormatterContentToken.value_token(text) 

-

608 

-

609 return format_field( 

-

610 self._formatter, 

-

611 self._kvpair_element.field_name, 

-

612 FormatterContentToken.separator_token(separator_token.text), 

-

613 _token_iter(), 

-

614 ) 

-

615 

-

616 def _generate_field_content(self): 

-

617 # type: () -> str 

-

618 return "".join(t.text for t in self._iter_content_as_tokens()) 

-

619 

-

620 def _update_field(self): 

-

621 # type: () -> None 

-

622 kvpair_element = self._kvpair_element 

-

623 field_name = kvpair_element.field_name 

-

624 token_list = self._token_list 

-

625 tail = token_list.tail 

-

626 had_tokens = False 

-

627 

-

628 for t in self._iter_content_as_tokens(): 628 ↛ 633line 628 didn't jump to line 633, because the loop on line 628 didn't complete

-

629 had_tokens = True 

-

630 if not t.is_comment and not t.is_whitespace: 

-

631 break 

-

632 else: 

-

633 if had_tokens: 

-

634 raise ValueError( 

-

635 "Field must be completely empty or have content " 

-

636 "(i.e. non-whitespace and non-comments)" 

-

637 ) 

-

638 if tail is not None: 638 ↛ 656line 638 didn't jump to line 656, because the condition on line 638 was never false

-

639 if isinstance(tail, Deb822Token) and tail.is_comment: 639 ↛ 640line 639 didn't jump to line 640, because the condition on line 639 was never true

-

640 raise ValueError("Fields must not end on a comment") 

-

641 if not tail.convert_to_text().endswith("\n"): 641 ↛ 645line 641 didn't jump to line 645, because the condition on line 641 was never false

-

642 # Always end on a newline 

-

643 self.append_newline() 

-

644 

-

645 if self._format_preserve_original_formatting: 

-

646 value_text = self._generate_field_content() 

-

647 text = ":".join((field_name, value_text)) 

-

648 else: 

-

649 text = self._generate_reformatted_field_content() 

-

650 

-

651 new_content = text.splitlines(keepends=True) 

-

652 else: 

-

653 # Special-case for the empty list which will be mapped to 

-

654 # an empty field. Always end on a newline (avoids errors 

-

655 # if there is a field after this) 

-

656 new_content = [field_name + ":\n"] 

-

657 

-

658 # As absurd as it might seem, it is easier to just use the parser to 

-

659 # construct the AST correctly 

-

660 deb822_file = parse_deb822_file(iter(new_content)) 

-

661 error_token = deb822_file.find_first_error_element() 

-

662 if error_token: 662 ↛ 664line 662 didn't jump to line 664, because the condition on line 662 was never true

-

663 # _print_ast(deb822_file) 

-

664 raise ValueError("Syntax error in new field value for " + field_name) 

-

665 paragraph = next(iter(deb822_file)) 

-

666 assert isinstance(paragraph, Deb822NoDuplicateFieldsParagraphElement) 

-

667 new_kvpair_element = paragraph.get_kvpair_element(field_name) 

-

668 assert new_kvpair_element is not None 

-

669 kvpair_element.value_element = new_kvpair_element.value_element 

-

670 self._changed = False 

-

671 

-

672 def sort_elements( 

-

673 self, 

-

674 *, 

-

675 key=None, # type: Optional[Callable[[VE], Any]] 

-

676 reverse=False, # type: bool 

-

677 ): 

-

678 # type: (...) -> None 

-

679 """Sort the elements (abstract values) in this list. 

-

680 

-

681 This method will sort the logical values of the list. It will 

-

682 attempt to preserve comments associated with a given value where 

-

683 possible. Whether space and separators are preserved depends on 

-

684 the contents of the field as well as the formatting settings. 

-

685 

-

686 Sorting (without reformatting) is likely to leave you with "awkward" 

-

687 whitespace. Therefore, you almost always want to apply reformatting 

-

688 such as the reformat_when_finished() method. 

-

689 

-

690 Sorting will invalidate all ValueReferences. 

-

691 """ 

-

692 comment_start_node = None 

-

693 vtype = self._vtype 

-

694 stype = self._stype 

-

695 

-

696 def key_func(x): 

-

697 # type: (Tuple[VE, List[TokenOrElement]]) -> Any 

-

698 if key: 698 ↛ 699line 698 didn't jump to line 699, because the condition on line 698 was never true

-

699 return key(x[0]) 

-

700 return x[0].convert_to_text() 

-

701 

-

702 parts = [] 

-

703 

-

704 for node in self._token_list.iter_nodes(): 

-

705 value = node.value 

-

706 if isinstance(value, Deb822Token) and value.is_comment: 

-

707 if comment_start_node is None: 707 ↛ 709line 707 didn't jump to line 709, because the condition on line 707 was never false

-

708 comment_start_node = node 

-

709 continue 

-

710 

-

711 if isinstance(value, vtype): 

-

712 comments = [] 

-

713 if comment_start_node is not None: 

-

714 for keep_node in comment_start_node.iter_next(skip_current=False): 714 ↛ 718line 714 didn't jump to line 718, because the loop on line 714 didn't complete

-

715 if keep_node is node: 

-

716 break 

-

717 comments.append(keep_node.value) 

-

718 parts.append((value, comments)) 

-

719 comment_start_node = None 

-

720 

-

721 parts.sort(key=key_func, reverse=reverse) 

-

722 

-

723 self._changed = True 

-

724 self._token_list.clear() 

-

725 first_value = True 

-

726 

-

727 separator_is_space = self._default_separator_factory().is_whitespace 

-

728 

-

729 for value, comments in parts: 

-

730 if first_value: 

-

731 first_value = False 

-

732 if comments: 732 ↛ 735line 732 didn't jump to line 735, because the condition on line 732 was never true

-

733 # While unlikely, there could be a separator between the comments. 

-

734 # It would be in the way and we remove it. 

-

735 comments = [x for x in comments if not isinstance(x, stype)] 

-

736 # Comments cannot start the field, so inject a newline to 

-

737 # work around that 

-

738 self.append_newline() 

-

739 else: 

-

740 if not separator_is_space and not any( 740 ↛ exit,   740 ↛ 7472 missed branches: 1) line 740 didn't run the generator expression on line 740, 2) line 740 didn't jump to line 747, because the condition on line 740 was never true

-

741 isinstance(x, stype) for x in comments 

-

742 ): 

-

743 # While unlikely, you can hide a comma between two comments and expect 

-

744 # us to preserve it. However, the more common case is that the separator 

-

745 # appeared before the comments and was thus omitted (leaving us to re-add 

-

746 # it here). 

-

747 self.append_separator(space_after_separator=False) 

-

748 if comments: 

-

749 self.append_newline() 

-

750 else: 

-

751 self._token_list.append(Deb822WhitespaceToken(" ")) 

-

752 

-

753 self._token_list.extend(comments) 

-

754 self.append_value(value) 

-

755 

-

756 def sort( 

-

757 self, 

-

758 *, 

-

759 key=None, # type: Optional[Callable[[str], Any]] 

-

760 **kwargs, # type: Any 

-

761 ): 

-

762 # type: (...) -> None 

-

763 """Sort the values (rendered as str) in this list. 

-

764 

-

765 This method will sort the logical values of the list. It will 

-

766 attempt to preserve comments associated with a given value where 

-

767 possible. Whether space and separators are preserved depends on 

-

768 the contents of the field as well as the formatting settings. 

-

769 

-

770 Sorting (without reformatting) is likely to leave you with "awkward" 

-

771 whitespace. Therefore, you almost always want to apply reformatting 

-

772 such as the reformat_when_finished() method. 

-

773 

-

774 Sorting will invalidate all ValueReferences. 

-

775 """ 

-

776 if key is not None: 776 ↛ 777line 776 didn't jump to line 777, because the condition on line 776 was never true

-

777 render = self._render 

-

778 kwargs["key"] = lambda vt: key(render(vt)) 

-

779 self.sort_elements(**kwargs) 

-

780 

-

781 

-

782class Interpretation(Generic[T]): 

-

783 

-

784 def interpret( 

-

785 self, 

-

786 kvpair_element, # type: Deb822KeyValuePairElement 

-

787 discard_comments_on_read=True, # type: bool 

-

788 ): 

-

789 # type: (...) -> T 

-

790 raise NotImplementedError # pragma: no cover 

-

791 

-

792 

-

793class GenericContentBasedInterpretation(Interpretation[T], Generic[T, VE]): 

-

794 

-

795 def __init__( 

-

796 self, 

-

797 tokenizer, # type: Callable[[str], Iterable['Deb822Token']] 

-

798 value_parser, # type: StreamingValueParser[VE] 

-

799 ): 

-

800 # type: (...) -> None 

-

801 super().__init__() 

-

802 self._tokenizer = tokenizer 

-

803 self._value_parser = value_parser 

-

804 

-

805 def _high_level_interpretation( 

-

806 self, 

-

807 kvpair_element, # type: Deb822KeyValuePairElement 

-

808 proxy_element, # type: Deb822InterpretationProxyElement 

-

809 discard_comments_on_read=True, # type: bool 

-

810 ): 

-

811 # type: (...) -> T 

-

812 raise NotImplementedError # pragma: no cover 

-

813 

-

814 def _parse_stream( 

-

815 self, buffered_iterator # type: BufferingIterator[Deb822Token] 

-

816 ): 

-

817 # type: (...) -> Iterable[Union[Deb822Token, VE]] 

-

818 

-

819 value_parser = self._value_parser 

-

820 for token in buffered_iterator: 

-

821 if isinstance(token, Deb822ValueToken): 

-

822 yield value_parser(token, buffered_iterator) 

-

823 else: 

-

824 yield token 

-

825 

-

826 def _parse_kvpair( 

-

827 self, kvpair # type: Deb822KeyValuePairElement 

-

828 ): 

-

829 # type: (...) -> Deb822InterpretationProxyElement 

-

830 value_element = kvpair.value_element 

-

831 content = value_element.convert_to_text() 

-

832 token_list = [] # type: List['TokenOrElement'] 

-

833 token_list.extend(self._parse_str(content)) 

-

834 return Deb822InterpretationProxyElement(value_element, token_list) 

-

835 

-

836 def _parse_str(self, content): 

-

837 # type: (str) -> Iterable[Union[Deb822Token, VE]] 

-

838 content_len = len(content) 

-

839 biter = BufferingIterator( 

-

840 len_check_iterator( 

-

841 content, 

-

842 self._tokenizer(content), 

-

843 content_len=content_len, 

-

844 ) 

-

845 ) 

-

846 yield from len_check_iterator( 

-

847 content, 

-

848 self._parse_stream(biter), 

-

849 content_len=content_len, 

-

850 ) 

-

851 

-

852 def interpret( 

-

853 self, 

-

854 kvpair_element, # type: Deb822KeyValuePairElement 

-

855 discard_comments_on_read=True, # type: bool 

-

856 ): 

-

857 # type: (...) -> T 

-

858 proxy_element = self._parse_kvpair(kvpair_element) 

-

859 return self._high_level_interpretation( 

-

860 kvpair_element, 

-

861 proxy_element, 

-

862 discard_comments_on_read=discard_comments_on_read, 

-

863 ) 

-

864 

-

865 

-

866def _parser_to_value_factory( 

-

867 parser, # type: StrToValueParser[VE] 

-

868 vtype, # type: Type[VE] 

-

869): 

-

870 # type: (...) -> Callable[[str], VE] 

-

871 def _value_factory(v): 

-

872 # type: (str) -> VE 

-

873 if v == "": 873 ↛ 874line 873 didn't jump to line 874, because the condition on line 873 was never true

-

874 raise ValueError("The empty string is not a value") 

-

875 token_iter = iter(parser(v)) 

-

876 t1 = next(token_iter, None) # type: Optional[Union[TokenOrElement]] 

-

877 t2 = next(token_iter, None) 

-

878 assert t1 is not None, ( 

-

879 'Bad parser - it returned None (or no TE) for "' + v + '"' 

-

880 ) 

-

881 if t2 is not None: 881 ↛ 882line 881 didn't jump to line 882, because the condition on line 881 was never true

-

882 msg = textwrap.dedent( 

-

883 """\ 

-

884 The input "{v}" should have been exactly one element, but the parser provided at 

-

885 least two. This can happen with unnecessary leading/trailing whitespace 

-

886 or including commas the value for a comma list. 

-

887 """ 

-

888 ).format(v=v) 

-

889 raise ValueError(msg) 

-

890 if not isinstance(t1, vtype): 890 ↛ 891line 890 didn't jump to line 891, because the condition on line 890 was never true

-

891 if isinstance(t1, Deb822Token) and (t1.is_comment or t1.is_whitespace): 

-

892 raise ValueError( 

-

893 'The input "{v}" is whitespace or a comment: Expected a value' 

-

894 ) 

-

895 msg = ( 

-

896 'The input "{v}" should have produced a element of type {vtype_name}, but' 

-

897 " instead it produced {t1}" 

-

898 ) 

-

899 raise ValueError(msg.format(v=v, vtype_name=vtype.__name__, t1=t1)) 

-

900 

-

901 assert len(t1.convert_to_text()) == len(v), ( 

-

902 "Bad tokenizer - the token did not cover the input text" 

-

903 " exactly ({t1_len} != {v_len}".format( 

-

904 t1_len=len(t1.convert_to_text()), v_len=len(v) 

-

905 ) 

-

906 ) 

-

907 return t1 

-

908 

-

909 return _value_factory 

-

910 

-

911 

-

912class ListInterpretation( 

-

913 GenericContentBasedInterpretation[Deb822ParsedTokenList[VE, ST], VE] 

-

914): 

-

915 

-

916 def __init__( 

-

917 self, 

-

918 tokenizer, # type: Callable[[str], Iterable['Deb822Token']] 

-

919 value_parser, # type: StreamingValueParser[VE] 

-

920 vtype, # type: Type[VE] 

-

921 stype, # type: Type[ST] 

-

922 default_separator_factory, # type: Callable[[], ST] 

-

923 render_factory, # type: Callable[[bool], Callable[[VE], str]] 

-

924 ): 

-

925 # type: (...) -> None 

-

926 super().__init__(tokenizer, value_parser) 

-

927 self._vtype = vtype 

-

928 self._stype = stype 

-

929 self._default_separator_factory = default_separator_factory 

-

930 self._render_factory = render_factory 

-

931 

-

932 def _high_level_interpretation( 

-

933 self, 

-

934 kvpair_element, # type: Deb822KeyValuePairElement 

-

935 proxy_element, # type: Deb822InterpretationProxyElement 

-

936 discard_comments_on_read=True, # type: bool 

-

937 ): 

-

938 # type: (...) -> Deb822ParsedTokenList[VE, ST] 

-

939 return Deb822ParsedTokenList( 

-

940 kvpair_element, 

-

941 proxy_element, 

-

942 self._vtype, 

-

943 self._stype, 

-

944 self._parse_str, 

-

945 self._default_separator_factory, 

-

946 self._render_factory(discard_comments_on_read), 

-

947 ) 

-

948 

-

949 

-

950def _parse_whitespace_list_value(token, _): 

-

951 # type: (Deb822Token, BufferingIterator[Deb822Token]) -> Deb822ParsedValueElement 

-

952 return Deb822ParsedValueElement([token]) 

-

953 

-

954 

-

955def _is_comma_token(v): 

-

956 # type: (TokenOrElement) -> bool 

-

957 # Consume tokens until the next comma 

-

958 return isinstance(v, Deb822CommaToken) 

-

959 

-

960 

-

961def _parse_comma_list_value(token, buffered_iterator): 

-

962 # type: (Deb822Token, BufferingIterator[Deb822Token]) -> Deb822ParsedValueElement 

-

963 comma_offset = buffered_iterator.peek_find(_is_comma_token) 

-

964 value_parts = [token] 

-

965 if comma_offset is not None: 

-

966 # The value is followed by a comma and now we know where it ends 

-

967 value_parts.extend(buffered_iterator.peek_many(comma_offset - 1)) 

-

968 else: 

-

969 # The value is the last value there is. Consume all remaining tokens 

-

970 # and then trim from the right. 

-

971 value_parts.extend(buffered_iterator.peek_buffer()) 

-

972 while value_parts and not isinstance(value_parts[-1], Deb822ValueToken): 

-

973 value_parts.pop() 

-

974 

-

975 buffered_iterator.consume_many(len(value_parts) - 1) 

-

976 return Deb822ParsedValueElement(value_parts) 

-

977 

-

978 

-

979def _parse_uploaders_list_value(token, buffered_iterator): 

-

980 # type: (Deb822Token, BufferingIterator[Deb822Token]) -> Deb822ParsedValueElement 

-

981 

-

982 # This is similar to _parse_comma_list_value *except* that there is an extra special 

-

983 # case. Namely comma only counts as a true separator if it follows ">" 

-

984 value_parts = [token] 

-

985 comma_offset = -1 # type: Optional[int] 

-

986 while comma_offset is not None: 

-

987 comma_offset = buffered_iterator.peek_find(_is_comma_token) 

-

988 if comma_offset is not None: 

-

989 # The value is followed by a comma. Verify that this is a terminating 

-

990 # comma (comma may appear in the name or email) 

-

991 # 

-

992 # We include value_parts[-1] to easily cope with the common case of 

-

993 # "foo <a@b.com>," where we will have 0 peeked element to examine. 

-

994 peeked_elements = [value_parts[-1]] 

-

995 peeked_elements.extend(buffered_iterator.peek_many(comma_offset - 1)) 

-

996 comma_was_separator = False 

-

997 i = len(peeked_elements) - 1 

-

998 while i >= 0: 

-

999 token = peeked_elements[i] 

-

1000 if isinstance(token, Deb822ValueToken): 

-

1001 if token.text.endswith(">"): 

-

1002 # The comma terminates the value 

-

1003 value_parts.extend(buffered_iterator.consume_many(i)) 

-

1004 assert isinstance( 

-

1005 value_parts[-1], Deb822ValueToken 

-

1006 ) and value_parts[-1].text.endswith(">"), "Got: " + str( 

-

1007 value_parts 

-

1008 ) 

-

1009 comma_was_separator = True 

-

1010 break 

-

1011 i -= 1 

-

1012 if comma_was_separator: 

-

1013 break 

-

1014 value_parts.extend(buffered_iterator.consume_many(comma_offset)) 

-

1015 assert isinstance(value_parts[-1], Deb822CommaToken) 

-

1016 else: 

-

1017 # The value is the last value there is. Consume all remaining tokens 

-

1018 # and then trim from the right. 

-

1019 remaining_part = buffered_iterator.peek_buffer() 

-

1020 consume_elements = len(remaining_part) 

-

1021 value_parts.extend(remaining_part) 

-

1022 while value_parts and not isinstance(value_parts[-1], Deb822ValueToken): 

-

1023 value_parts.pop() 

-

1024 consume_elements -= 1 

-

1025 buffered_iterator.consume_many(consume_elements) 

-

1026 

-

1027 return Deb822ParsedValueElement(value_parts) 

-

1028 

-

1029 

-

1030class Deb822Element(Locatable): 

-

1031 """Composite elements (consists of 1 or more tokens)""" 

-

1032 

-

1033 __slots__ = ("_parent_element", "_full_size_cache", "__weakref__") 

-

1034 

-

1035 def __init__(self): 

-

1036 # type: () -> None 

-

1037 self._parent_element = None # type: Optional[ReferenceType['Deb822Element']] 

-

1038 self._full_size_cache = None # type: Optional[Range] 

-

1039 

-

1040 def iter_parts(self): 

-

1041 # type: () -> Iterable[TokenOrElement] 

-

1042 raise NotImplementedError # pragma: no cover 

-

1043 

-

1044 def iter_parts_of_type(self, only_element_or_token_type): 

-

1045 # type: (Type[TE]) -> Iterable[TE] 

-

1046 for part in self.iter_parts(): 

-

1047 if isinstance(part, only_element_or_token_type): 

-

1048 yield part 

-

1049 

-

1050 def iter_tokens(self): 

-

1051 # type: () -> Iterable[Deb822Token] 

-

1052 for part in self.iter_parts(): 

-

1053 # Control check to catch bugs early 

-

1054 assert part._parent_element is not None 

-

1055 if isinstance(part, Deb822Element): 

-

1056 yield from part.iter_tokens() 

-

1057 else: 

-

1058 yield part 

-

1059 

-

1060 def iter_recurse( 

-

1061 self, *, only_element_or_token_type=None # type: Optional[Type[TE]] 

-

1062 ): 

-

1063 # type: (...) -> Iterable[TE] 

-

1064 for part in self.iter_parts(): 

-

1065 if only_element_or_token_type is None or isinstance( 1065 ↛ 1068line 1065 didn't jump to line 1068, because the condition on line 1065 was never true

-

1066 part, only_element_or_token_type 

-

1067 ): 

-

1068 yield cast("TE", part) 

-

1069 if isinstance(part, Deb822Element): 

-

1070 yield from part.iter_recurse( 

-

1071 only_element_or_token_type=only_element_or_token_type 

-

1072 ) 

-

1073 

-

1074 @property 

-

1075 def is_error(self): 

-

1076 # type: () -> bool 

-

1077 return False 

-

1078 

-

1079 @property 

-

1080 def is_comment(self): 

-

1081 # type: () -> bool 

-

1082 return False 

-

1083 

-

1084 @property 

-

1085 def parent_element(self): 

-

1086 # type: () -> Optional[Deb822Element] 

-

1087 return resolve_ref(self._parent_element) 

-

1088 

-

1089 @parent_element.setter 

-

1090 def parent_element(self, new_parent): 

-

1091 # type: (Optional[Deb822Element]) -> None 

-

1092 self._parent_element = ( 

-

1093 weakref.ref(new_parent) if new_parent is not None else None 

-

1094 ) 

-

1095 

-

1096 def _init_parent_of_parts(self): 

-

1097 # type: () -> None 

-

1098 for part in self.iter_parts(): 

-

1099 part.parent_element = self 

-

1100 

-

1101 # Deliberately not a "text" property, to signal that it is not necessary cheap. 

-

1102 def convert_to_text(self): 

-

1103 # type: () -> str 

-

1104 return "".join(t.text for t in self.iter_tokens()) 

-

1105 

-

1106 def clear_parent_if_parent(self, parent): 

-

1107 # type: (Deb822Element) -> None 

-

1108 if parent is self.parent_element: 1108 ↛ exitline 1108 didn't return from function 'clear_parent_if_parent', because the condition on line 1108 was never false

-

1109 self._parent_element = None 

-

1110 

-

1111 def size(self, *, skip_leading_comments: bool = True) -> Range: 

-

1112 size_cache = self._full_size_cache 

-

1113 if size_cache is None: 

-

1114 size_cache = Range.from_position_and_sizes( 

-

1115 START_POSITION, 

-

1116 (p.size(skip_leading_comments=False) for p in self.iter_parts()), 

-

1117 ) 

-

1118 self._full_size_cache = size_cache 

-

1119 return size_cache 

-

1120 

-

1121 

-

1122class Deb822InterpretationProxyElement(Deb822Element): 

-

1123 

-

1124 __slots__ = ("parts",) 

-

1125 

-

1126 def __init__( 

-

1127 self, real_element: Deb822Element, parts: List[TokenOrElement] 

-

1128 ) -> None: 

-

1129 super().__init__() 

-

1130 self.parent_element = real_element 

-

1131 self.parts = parts 

-

1132 for p in parts: 

-

1133 p.parent_element = self 

-

1134 

-

1135 def iter_parts(self): 

-

1136 # type: () -> Iterable[TokenOrElement] 

-

1137 return iter(self.parts) 

-

1138 

-

1139 def position_in_parent(self, *, skip_leading_comments: bool = True) -> Position: 

-

1140 parent = self.parent_element 

-

1141 if parent is None: 

-

1142 raise RuntimeError("parent was garbage collected") 

-

1143 return parent.position_in_parent() 

-

1144 

-

1145 def position_in_file(self, *, skip_leading_comments: bool = True) -> Position: 

-

1146 parent = self.parent_element 

-

1147 if parent is None: 

-

1148 raise RuntimeError("parent was garbage collected") 

-

1149 return parent.position_in_file() 

-

1150 

-

1151 def size(self, *, skip_leading_comments: bool = True) -> Range: 

-

1152 # Same as parent except we never use a cache. 

-

1153 sizes = (p.size(skip_leading_comments=False) for p in self.iter_parts()) 

-

1154 return Range.from_position_and_sizes(START_POSITION, sizes) 

-

1155 

-

1156 

-

1157class Deb822ErrorElement(Deb822Element): 

-

1158 """Element representing elements or tokens that are out of place 

-

1159 

-

1160 Commonly, it will just be instances of Deb822ErrorToken, but it can be other 

-

1161 things. As an example if a parser discovers out of order elements/tokens, 

-

1162 it can bundle them in a Deb822ErrorElement to signal that the sequence of 

-

1163 elements/tokens are invalid (even if the tokens themselves are valid). 

-

1164 """ 

-

1165 

-

1166 __slots__ = ("_parts",) 

-

1167 

-

1168 def __init__(self, parts): 

-

1169 # type: (Sequence[TokenOrElement]) -> None 

-

1170 super().__init__() 

-

1171 self._parts = tuple(parts) 

-

1172 self._init_parent_of_parts() 

-

1173 

-

1174 def iter_parts(self): 

-

1175 # type: () -> Iterable[TokenOrElement] 

-

1176 yield from self._parts 

-

1177 

-

1178 @property 

-

1179 def is_error(self): 

-

1180 # type: () -> bool 

-

1181 return True 

-

1182 

-

1183 

-

1184class Deb822ValueLineElement(Deb822Element): 

-

1185 """Consists of one "line" of a value""" 

-

1186 

-

1187 __slots__ = ( 

-

1188 "_comment_element", 

-

1189 "_continuation_line_token", 

-

1190 "_leading_whitespace_token", 

-

1191 "_value_tokens", 

-

1192 "_trailing_whitespace_token", 

-

1193 "_newline_token", 

-

1194 ) 

-

1195 

-

1196 def __init__( 

-

1197 self, 

-

1198 comment_element, # type: Optional[Deb822CommentElement] 

-

1199 continuation_line_token, # type: Optional[Deb822ValueContinuationToken] 

-

1200 leading_whitespace_token, # type: Optional[Deb822WhitespaceToken] 

-

1201 value_parts, # type: List[TokenOrElement] 

-

1202 trailing_whitespace_token, # type: Optional[Deb822WhitespaceToken] 

-

1203 # only optional if it is the last line of the file and the file does not 

-

1204 # end with a newline. 

-

1205 newline_token, # type: Optional[Deb822WhitespaceToken] 

-

1206 ): 

-

1207 # type: (...) -> None 

-

1208 super().__init__() 

-

1209 if comment_element is not None and continuation_line_token is None: 1209 ↛ 1210line 1209 didn't jump to line 1210, because the condition on line 1209 was never true

-

1210 raise ValueError("Only continuation lines can have comments") 

-

1211 self._comment_element = comment_element # type: Optional[Deb822CommentElement] 

-

1212 self._continuation_line_token = continuation_line_token 

-

1213 self._leading_whitespace_token = ( 

-

1214 leading_whitespace_token 

-

1215 ) # type: Optional[Deb822WhitespaceToken] 

-

1216 self._value_tokens = value_parts # type: List[TokenOrElement] 

-

1217 self._trailing_whitespace_token = trailing_whitespace_token 

-

1218 self._newline_token = newline_token # type: Optional[Deb822WhitespaceToken] 

-

1219 self._init_parent_of_parts() 

-

1220 

-

1221 @property 

-

1222 def comment_element(self): 

-

1223 # type: () -> Optional[Deb822CommentElement] 

-

1224 return self._comment_element 

-

1225 

-

1226 @property 

-

1227 def continuation_line_token(self): 

-

1228 # type: () -> Optional[Deb822ValueContinuationToken] 

-

1229 return self._continuation_line_token 

-

1230 

-

1231 @property 

-

1232 def newline_token(self): 

-

1233 # type: () -> Optional[Deb822WhitespaceToken] 

-

1234 return self._newline_token 

-

1235 

-

1236 def add_newline_if_missing(self): 

-

1237 # type: () -> bool 

-

1238 if self._newline_token is None: 

-

1239 self._newline_token = Deb822NewlineAfterValueToken() 

-

1240 self._newline_token.parent_element = self 

-

1241 self._full_size_cache = None 

-

1242 return True 

-

1243 return False 

-

1244 

-

1245 def _iter_content_parts(self): 

-

1246 # type: () -> Iterable[TokenOrElement] 

-

1247 if self._leading_whitespace_token: 1247 ↛ 1248line 1247 didn't jump to line 1248, because the condition on line 1247 was never true

-

1248 yield self._leading_whitespace_token 

-

1249 yield from self._value_tokens 

-

1250 if self._trailing_whitespace_token: 

-

1251 yield self._trailing_whitespace_token 

-

1252 

-

1253 def _iter_content_tokens(self): 

-

1254 # type: () -> Iterable[Deb822Token] 

-

1255 for part in self._iter_content_parts(): 

-

1256 if isinstance(part, Deb822Element): 

-

1257 yield from part.iter_tokens() 

-

1258 else: 

-

1259 yield part 

-

1260 

-

1261 def convert_content_to_text(self): 

-

1262 # type: () -> str 

-

1263 if ( 

-

1264 len(self._value_tokens) == 1 

-

1265 and not self._leading_whitespace_token 

-

1266 and not self._trailing_whitespace_token 

-

1267 and isinstance(self._value_tokens[0], Deb822Token) 

-

1268 ): 

-

1269 # By default, we get a single value spanning the entire line 

-

1270 # (minus continuation line and newline, but we are supposed to 

-

1271 # exclude those) 

-

1272 return self._value_tokens[0].text 

-

1273 

-

1274 return "".join(t.text for t in self._iter_content_tokens()) 

-

1275 

-

1276 def iter_parts(self): 

-

1277 # type: () -> Iterable[TokenOrElement] 

-

1278 if self._comment_element: 

-

1279 yield self._comment_element 

-

1280 if self._continuation_line_token: 

-

1281 yield self._continuation_line_token 

-

1282 yield from self._iter_content_parts() 

-

1283 if self._newline_token: 1283 ↛ exitline 1283 didn't return from function 'iter_parts', because the condition on line 1283 was never false

-

1284 yield self._newline_token 

-

1285 

-

1286 def size(self, *, skip_leading_comments: bool = True) -> Range: 

-

1287 if skip_leading_comments: 1287 ↛ 1288line 1287 didn't jump to line 1288, because the condition on line 1287 was never true

-

1288 return Range.from_position_and_sizes( 

-

1289 START_POSITION, 

-

1290 ( 

-

1291 p.size(skip_leading_comments=False) 

-

1292 for p in self.iter_parts() 

-

1293 if not p.is_comment 

-

1294 ), 

-

1295 ) 

-

1296 return super().size(skip_leading_comments=skip_leading_comments) 

-

1297 

-

1298 def position_in_parent(self, *, skip_leading_comments: bool = True) -> Position: 

-

1299 base_pos = super().position_in_parent(skip_leading_comments=False) 

-

1300 if skip_leading_comments: 

-

1301 for p in self.iter_parts(): 

-

1302 if p.is_comment: 

-

1303 continue 

-

1304 non_comment_pos = p.position_in_parent(skip_leading_comments=False) 

-

1305 base_pos = non_comment_pos.relative_to(base_pos) 

-

1306 return base_pos 

-

1307 

-

1308 

-

1309class Deb822ValueElement(Deb822Element): 

-

1310 __slots__ = ("_value_entry_elements",) 

-

1311 

-

1312 def __init__(self, value_entry_elements): 

-

1313 # type: (Sequence[Deb822ValueLineElement]) -> None 

-

1314 super().__init__() 

-

1315 # Split over two lines due to line length issues 

-

1316 v = tuple(value_entry_elements) 

-

1317 self._value_entry_elements = v # type: Sequence[Deb822ValueLineElement] 

-

1318 self._init_parent_of_parts() 

-

1319 

-

1320 @property 

-

1321 def value_lines(self): 

-

1322 # type: () -> Sequence[Deb822ValueLineElement] 

-

1323 """Read-only list of value entries""" 

-

1324 return self._value_entry_elements 

-

1325 

-

1326 def iter_parts(self): 

-

1327 # type: () -> Iterable[TokenOrElement] 

-

1328 yield from self._value_entry_elements 

-

1329 

-

1330 def add_final_newline_if_missing(self): 

-

1331 # type: () -> bool 

-

1332 if self._value_entry_elements: 

-

1333 changed = self._value_entry_elements[-1].add_newline_if_missing() 

-

1334 if changed: 

-

1335 self._full_size_cache = None 

-

1336 return changed 

-

1337 return False 

-

1338 

-

1339 

-

1340class Deb822ParsedValueElement(Deb822Element): 

-

1341 

-

1342 __slots__ = ("_text_cached", "_text_no_comments_cached", "_token_list") 

-

1343 

-

1344 def __init__(self, tokens): 

-

1345 # type: (List[Deb822Token]) -> None 

-

1346 super().__init__() 

-

1347 self._token_list = tokens 

-

1348 self._init_parent_of_parts() 

-

1349 if not isinstance(tokens[0], Deb822ValueToken) or not isinstance( 1349 ↛ 1352line 1349 didn't jump to line 1352, because the condition on line 1349 was never true

-

1350 tokens[-1], Deb822ValueToken 

-

1351 ): 

-

1352 raise ValueError( 

-

1353 self.__class__.__name__ + " MUST start and end on a Deb822ValueToken" 

-

1354 ) 

-

1355 if len(tokens) == 1: 1355 ↛ 1360line 1355 didn't jump to line 1360, because the condition on line 1355 was never false

-

1356 token = tokens[0] 

-

1357 self._text_cached = token.text # type: Optional[str] 

-

1358 self._text_no_comments_cached = token.text # type: Optional[str] 

-

1359 else: 

-

1360 self._text_cached = None 

-

1361 self._text_no_comments_cached = None 

-

1362 

-

1363 def convert_to_text(self): 

-

1364 # type: () -> str 

-

1365 if self._text_no_comments_cached is None: 1365 ↛ 1366line 1365 didn't jump to line 1366, because the condition on line 1365 was never true

-

1366 self._text_no_comments_cached = super().convert_to_text() 

-

1367 return self._text_no_comments_cached 

-

1368 

-

1369 def convert_to_text_without_comments(self): 

-

1370 # type: () -> str 

-

1371 if self._text_no_comments_cached is None: 1371 ↛ 1372line 1371 didn't jump to line 1372, because the condition on line 1371 was never true

-

1372 self._text_no_comments_cached = "".join( 

-

1373 t.text for t in self.iter_tokens() if not t.is_comment 

-

1374 ) 

-

1375 return self._text_no_comments_cached 

-

1376 

-

1377 def iter_parts(self): 

-

1378 # type: () -> Iterable[TokenOrElement] 

-

1379 yield from self._token_list 

-

1380 

-

1381 

-

1382class Deb822CommentElement(Deb822Element): 

-

1383 __slots__ = ("_comment_tokens",) 

-

1384 

-

1385 def __init__(self, comment_tokens): 

-

1386 # type: (Sequence[Deb822CommentToken]) -> None 

-

1387 super().__init__() 

-

1388 self._comment_tokens = tuple( 

-

1389 comment_tokens 

-

1390 ) # type: Sequence[Deb822CommentToken] 

-

1391 if not comment_tokens: # pragma: no cover 

-

1392 raise ValueError("Comment elements must have at least one comment token") 

-

1393 self._init_parent_of_parts() 

-

1394 

-

1395 @property 

-

1396 def is_comment(self): 

-

1397 # type: () -> bool 

-

1398 return True 

-

1399 

-

1400 def __len__(self): 

-

1401 # type: () -> int 

-

1402 return len(self._comment_tokens) 

-

1403 

-

1404 def __getitem__(self, item): 

-

1405 # type: (int) -> Deb822CommentToken 

-

1406 return self._comment_tokens[item] 

-

1407 

-

1408 def iter_parts(self): 

-

1409 # type: () -> Iterable[TokenOrElement] 

-

1410 yield from self._comment_tokens 

-

1411 

-

1412 

-

1413class Deb822KeyValuePairElement(Deb822Element): 

-

1414 __slots__ = ( 

-

1415 "_comment_element", 

-

1416 "_field_token", 

-

1417 "_separator_token", 

-

1418 "_value_element", 

-

1419 ) 

-

1420 

-

1421 def __init__( 

-

1422 self, 

-

1423 comment_element, # type: Optional[Deb822CommentElement] 

-

1424 field_token, # type: Deb822FieldNameToken 

-

1425 separator_token, # type: Deb822FieldSeparatorToken 

-

1426 value_element, # type: Deb822ValueElement 

-

1427 ): 

-

1428 # type: (...) -> None 

-

1429 super().__init__() 

-

1430 self._comment_element = comment_element # type: Optional[Deb822CommentElement] 

-

1431 self._field_token = field_token # type: Deb822FieldNameToken 

-

1432 self._separator_token = separator_token # type: Deb822FieldSeparatorToken 

-

1433 self._value_element = value_element # type: Deb822ValueElement 

-

1434 self._init_parent_of_parts() 

-

1435 

-

1436 @property 

-

1437 def field_name(self): 

-

1438 # type: () -> _strI 

-

1439 return self.field_token.text 

-

1440 

-

1441 @property 

-

1442 def field_token(self): 

-

1443 # type: () -> Deb822FieldNameToken 

-

1444 return self._field_token 

-

1445 

-

1446 @property 

-

1447 def value_element(self): 

-

1448 # type: () -> Deb822ValueElement 

-

1449 return self._value_element 

-

1450 

-

1451 @value_element.setter 

-

1452 def value_element(self, new_value): 

-

1453 # type: (Deb822ValueElement) -> None 

-

1454 self._full_size_cache = None 

-

1455 self._value_element.clear_parent_if_parent(self) 

-

1456 self._value_element = new_value 

-

1457 new_value.parent_element = self 

-

1458 

-

1459 def interpret_as( 

-

1460 self, 

-

1461 interpreter, # type: Interpretation[T] 

-

1462 discard_comments_on_read=True, # type: bool 

-

1463 ): 

-

1464 # type: (...) -> T 

-

1465 return interpreter.interpret( 

-

1466 self, discard_comments_on_read=discard_comments_on_read 

-

1467 ) 

-

1468 

-

1469 @property 

-

1470 def comment_element(self): 

-

1471 # type: () -> Optional[Deb822CommentElement] 

-

1472 return self._comment_element 

-

1473 

-

1474 @comment_element.setter 

-

1475 def comment_element(self, value): 

-

1476 # type: (Optional[Deb822CommentElement]) -> None 

-

1477 self._full_size_cache = None 

-

1478 if value is not None: 1478 ↛ 1479line 1478 didn't jump to line 1479, because the condition on line 1478 was never true

-

1479 if not value[-1].text.endswith("\n"): 

-

1480 raise ValueError("Field comments must end with a newline") 

-

1481 if self._comment_element: 1481 ↛ 1482line 1481 didn't jump to line 1482, because the condition on line 1481 was never true

-

1482 self._comment_element.clear_parent_if_parent(self) 

-

1483 if value is not None: 1483 ↛ 1484line 1483 didn't jump to line 1484, because the condition on line 1483 was never true

-

1484 value.parent_element = self 

-

1485 self._comment_element = value 

-

1486 

-

1487 def iter_parts(self): 

-

1488 # type: () -> Iterable[TokenOrElement] 

-

1489 if self._comment_element: 

-

1490 yield self._comment_element 

-

1491 yield self._field_token 

-

1492 yield self._separator_token 

-

1493 yield self._value_element 

-

1494 

-

1495 def position_in_parent( 

-

1496 self, 

-

1497 *, 

-

1498 skip_leading_comments: bool = True, 

-

1499 ) -> Position: 

-

1500 position = super().position_in_parent(skip_leading_comments=False) 

-

1501 if skip_leading_comments: 1501 ↛ 1505line 1501 didn't jump to line 1505, because the condition on line 1501 was never false

-

1502 if self._comment_element: 

-

1503 field_pos = self._field_token.position_in_parent() 

-

1504 position = field_pos.relative_to(position) 

-

1505 return position 

-

1506 

-

1507 def size(self, *, skip_leading_comments: bool = True) -> Range: 

-

1508 if skip_leading_comments: 

-

1509 return Range.from_position_and_sizes( 

-

1510 START_POSITION, 

-

1511 ( 

-

1512 p.size(skip_leading_comments=False) 

-

1513 for p in self.iter_parts() 

-

1514 if not p.is_comment 

-

1515 ), 

-

1516 ) 

-

1517 return super().size(skip_leading_comments=False) 

-

1518 

-

1519 

-

1520def _format_comment(c): 

-

1521 # type: (str) -> str 

-

1522 if c == "": 1522 ↛ 1524line 1522 didn't jump to line 1524, because the condition on line 1522 was never true

-

1523 # Special-case: Empty strings are mapped to an empty comment line 

-

1524 return "#\n" 

-

1525 if "\n" in c[:-1]: 1525 ↛ 1526line 1525 didn't jump to line 1526, because the condition on line 1525 was never true

-

1526 raise ValueError("Comment lines must not have embedded newlines") 

-

1527 if not c.endswith("\n"): 1527 ↛ 1529line 1527 didn't jump to line 1529, because the condition on line 1527 was never false

-

1528 c = c.rstrip() + "\n" 

-

1529 if not c.startswith("#"): 1529 ↛ 1531line 1529 didn't jump to line 1531, because the condition on line 1529 was never false

-

1530 c = "# " + c.lstrip() 

-

1531 return c 

-

1532 

-

1533 

-

1534def _unpack_key( 

-

1535 item, # type: ParagraphKey 

-

1536 raise_if_indexed=False, # type: bool 

-

1537): 

-

1538 # type: (...) -> Tuple[_strI, Optional[int], Optional[Deb822FieldNameToken]] 

-

1539 index = None # type: Optional[int] 

-

1540 name_token = None # type: Optional[Deb822FieldNameToken] 

-

1541 if isinstance(item, tuple): 

-

1542 key, index = item 

-

1543 if raise_if_indexed: 1543 ↛ 1550line 1543 didn't jump to line 1550, because the condition on line 1543 was never false

-

1544 # Fudge "(key, 0)" into a "key" callers to defensively support 

-

1545 # both paragraph styles with the same key. 

-

1546 if index != 0: 1546 ↛ 1547line 1546 didn't jump to line 1547, because the condition on line 1546 was never true

-

1547 msg = 'Cannot resolve key "{key}" with index {index}. The key is not indexed' 

-

1548 raise KeyError(msg.format(key=key, index=index)) 

-

1549 index = None 

-

1550 key = _strI(key) 

-

1551 else: 

-

1552 index = None 

-

1553 if isinstance(item, Deb822FieldNameToken): 1553 ↛ 1554line 1553 didn't jump to line 1554, because the condition on line 1553 was never true

-

1554 name_token = item 

-

1555 key = name_token.text 

-

1556 else: 

-

1557 key = _strI(item) 

-

1558 

-

1559 return key, index, name_token 

-

1560 

-

1561 

-

1562def _convert_value_lines_to_lines( 

-

1563 value_lines, # type: Iterable[Deb822ValueLineElement] 

-

1564 strip_comments, # type: bool 

-

1565): 

-

1566 # type: (...) -> Iterable[str] 

-

1567 if not strip_comments: 1567 ↛ 1568line 1567 didn't jump to line 1568, because the condition on line 1567 was never true

-

1568 yield from (v.convert_to_text() for v in value_lines) 

-

1569 else: 

-

1570 for element in value_lines: 

-

1571 yield "".join(x.text for x in element.iter_tokens() if not x.is_comment) 

-

1572 

-

1573 

-

1574if sys.version_info >= (3, 9) or TYPE_CHECKING: 1574 ↛ 1579line 1574 didn't jump to line 1579, because the condition on line 1574 was never false

-

1575 _ParagraphMapping_Base = collections.abc.Mapping[ParagraphKey, T] 

-

1576else: 

-

1577 # Python 3.5 - 3.8 compat - we are not allowed to subscript the abc.Iterator 

-

1578 # - use this little hack to work around it 

-

1579 class _ParagraphMapping_Base(collections.abc.Mapping, Generic[T], ABC): 

-

1580 pass 

-

1581 

-

1582 

-

1583# Deb822ParagraphElement uses this Mixin (by having `_paragraph` return self). 

-

1584# Therefore, the Mixin needs to call the "proper" methods on the paragraph to 

-

1585# avoid doing infinite recursion. 

-

1586class AutoResolvingMixin(Generic[T], _ParagraphMapping_Base[T]): 

-

1587 

-

1588 @property 

-

1589 def _auto_resolve_ambiguous_fields(self): 

-

1590 # type: () -> bool 

-

1591 return True 

-

1592 

-

1593 @property 

-

1594 def _paragraph(self): 

-

1595 # type: () -> Deb822ParagraphElement 

-

1596 raise NotImplementedError # pragma: no cover 

-

1597 

-

1598 def __len__(self): 

-

1599 # type: () -> int 

-

1600 return self._paragraph.kvpair_count 

-

1601 

-

1602 def __contains__(self, item): 

-

1603 # type: (object) -> bool 

-

1604 return self._paragraph.contains_kvpair_element(item) 

-

1605 

-

1606 def __iter__(self): 

-

1607 # type: () -> Iterator[ParagraphKey] 

-

1608 return iter(self._paragraph.iter_keys()) 

-

1609 

-

1610 def __getitem__(self, item): 

-

1611 # type: (ParagraphKey) -> T 

-

1612 if self._auto_resolve_ambiguous_fields and isinstance(item, str): 

-

1613 v = self._paragraph.get_kvpair_element((item, 0)) 

-

1614 else: 

-

1615 v = self._paragraph.get_kvpair_element(item) 

-

1616 assert v is not None 

-

1617 return self._interpret_value(item, v) 

-

1618 

-

1619 def __delitem__(self, item): 

-

1620 # type: (ParagraphKey) -> None 

-

1621 self._paragraph.remove_kvpair_element(item) 

-

1622 

-

1623 def _interpret_value(self, key, value): 

-

1624 # type: (ParagraphKey, Deb822KeyValuePairElement) -> T 

-

1625 raise NotImplementedError # pragma: no cover 

-

1626 

-

1627 

-

1628# Deb822ParagraphElement uses this Mixin (by having `_paragraph` return self). 

-

1629# Therefore, the Mixin needs to call the "proper" methods on the paragraph to 

-

1630# avoid doing infinite recursion. 

-

1631class Deb822ParagraphToStrWrapperMixin(AutoResolvingMixin[str], ABC): 

-

1632 

-

1633 @property 

-

1634 def _auto_map_initial_line_whitespace(self): 

-

1635 # type: () -> bool 

-

1636 return True 

-

1637 

-

1638 @property 

-

1639 def _discard_comments_on_read(self): 

-

1640 # type: () -> bool 

-

1641 return True 

-

1642 

-

1643 @property 

-

1644 def _auto_map_final_newline_in_multiline_values(self): 

-

1645 # type: () -> bool 

-

1646 return True 

-

1647 

-

1648 @property 

-

1649 def _preserve_field_comments_on_field_updates(self): 

-

1650 # type: () -> bool 

-

1651 return True 

-

1652 

-

1653 def _convert_value_to_str(self, kvpair_element): 

-

1654 # type: (Deb822KeyValuePairElement) -> str 

-

1655 value_element = kvpair_element.value_element 

-

1656 value_entries = value_element.value_lines 

-

1657 if len(value_entries) == 1: 

-

1658 # Special case single line entry (e.g. "Package: foo") as they never 

-

1659 # have comments and we can do some parts more efficient. 

-

1660 value_entry = value_entries[0] 

-

1661 t = value_entry.convert_to_text() 

-

1662 if self._auto_map_initial_line_whitespace: 

-

1663 t = t.strip() 

-

1664 return t 

-

1665 

-

1666 if self._auto_map_initial_line_whitespace or self._discard_comments_on_read: 

-

1667 converter = _convert_value_lines_to_lines( 

-

1668 value_entries, 

-

1669 self._discard_comments_on_read, 

-

1670 ) 

-

1671 

-

1672 auto_map_space = self._auto_map_initial_line_whitespace 

-

1673 

-

1674 # Because we know there are more than one line, we can unconditionally inject 

-

1675 # the newline after the first line 

-

1676 as_text = "".join( 

-

1677 line.strip() + "\n" if auto_map_space and i == 1 else line 

-

1678 for i, line in enumerate(converter, start=1) 

-

1679 ) 

-

1680 else: 

-

1681 # No rewrite necessary. 

-

1682 as_text = value_element.convert_to_text() 

-

1683 

-

1684 if self._auto_map_final_newline_in_multiline_values and as_text[-1] == "\n": 

-

1685 as_text = as_text[:-1] 

-

1686 return as_text 

-

1687 

-

1688 def __setitem__(self, item, value): 

-

1689 # type: (ParagraphKey, str) -> None 

-

1690 keep_comments = ( 

-

1691 self._preserve_field_comments_on_field_updates 

-

1692 ) # type: Optional[bool] 

-

1693 comment = None 

-

1694 if keep_comments and self._auto_resolve_ambiguous_fields: 

-

1695 # For ambiguous fields, we have to resolve the original field as 

-

1696 # the set_field_* methods do not cope with ambiguous fields. This 

-

1697 # means we might as well clear the keep_comments flag as we have 

-

1698 # resolved the comment. 

-

1699 keep_comments = None 

-

1700 key_lookup = item 

-

1701 if isinstance(item, str): 1701 ↛ 1703line 1701 didn't jump to line 1703, because the condition on line 1701 was never false

-

1702 key_lookup = (item, 0) 

-

1703 orig_kvpair = self._paragraph.get_kvpair_element(key_lookup, use_get=True) 

-

1704 if orig_kvpair is not None: 

-

1705 comment = orig_kvpair.comment_element 

-

1706 

-

1707 if self._auto_map_initial_line_whitespace: 

-

1708 try: 

-

1709 idx = value.index("\n") 

-

1710 except ValueError: 

-

1711 idx = -1 

-

1712 if idx == -1 or idx == len(value): 

-

1713 self._paragraph.set_field_to_simple_value( 

-

1714 item, 

-

1715 value.strip(), 

-

1716 preserve_original_field_comment=keep_comments, 

-

1717 field_comment=comment, 

-

1718 ) 

-

1719 return 

-

1720 # Regenerate the first line with normalized whitespace if necessary 

-

1721 first_line, rest = value.split("\n", 1) 

-

1722 if first_line and first_line[:1] not in ("\t", " "): 1722 ↛ 1723line 1722 didn't jump to line 1723, because the condition on line 1722 was never true

-

1723 value = "".join((" ", first_line.strip(), "\n", rest)) 

-

1724 else: 

-

1725 value = "".join((first_line, "\n", rest)) 

-

1726 if not value.endswith("\n"): 

-

1727 if not self._auto_map_final_newline_in_multiline_values: 1727 ↛ 1732line 1727 didn't jump to line 1732, because the condition on line 1727 was never false

-

1728 raise ValueError( 

-

1729 "Values must end with a newline (or be single line" 

-

1730 " values and use the auto whitespace mapping feature)" 

-

1731 ) 

-

1732 value += "\n" 

-

1733 self._paragraph.set_field_from_raw_string( 

-

1734 item, 

-

1735 value, 

-

1736 preserve_original_field_comment=keep_comments, 

-

1737 field_comment=comment, 

-

1738 ) 

-

1739 

-

1740 def _interpret_value(self, key, value): 

-

1741 # type: (ParagraphKey, Deb822KeyValuePairElement) -> str 

-

1742 # mypy is a bit dense and cannot see that T == str 

-

1743 return self._convert_value_to_str(value) 

-

1744 

-

1745 

-

1746class AbstractDeb822ParagraphWrapper(AutoResolvingMixin[T], ABC): 

-

1747 

-

1748 def __init__( 

-

1749 self, 

-

1750 paragraph, # type: Deb822ParagraphElement 

-

1751 *, 

-

1752 auto_resolve_ambiguous_fields=False, # type: bool 

-

1753 discard_comments_on_read=True, # type: bool 

-

1754 ): 

-

1755 # type: (...) -> None 

-

1756 self.__paragraph = paragraph 

-

1757 self.__auto_resolve_ambiguous_fields = auto_resolve_ambiguous_fields 

-

1758 self.__discard_comments_on_read = discard_comments_on_read 

-

1759 

-

1760 @property 

-

1761 def _paragraph(self): 

-

1762 # type: () -> Deb822ParagraphElement 

-

1763 return self.__paragraph 

-

1764 

-

1765 @property 

-

1766 def _discard_comments_on_read(self): 

-

1767 # type: () -> bool 

-

1768 return self.__discard_comments_on_read 

-

1769 

-

1770 @property 

-

1771 def _auto_resolve_ambiguous_fields(self): 

-

1772 # type: () -> bool 

-

1773 return self.__auto_resolve_ambiguous_fields 

-

1774 

-

1775 

-

1776class Deb822InterpretingParagraphWrapper(AbstractDeb822ParagraphWrapper[T]): 

-

1777 

-

1778 def __init__( 

-

1779 self, 

-

1780 paragraph, # type: Deb822ParagraphElement 

-

1781 interpretation, # type: Interpretation[T] 

-

1782 *, 

-

1783 auto_resolve_ambiguous_fields=False, # type: bool 

-

1784 discard_comments_on_read=True, # type: bool 

-

1785 ): 

-

1786 # type: (...) -> None 

-

1787 super().__init__( 

-

1788 paragraph, 

-

1789 auto_resolve_ambiguous_fields=auto_resolve_ambiguous_fields, 

-

1790 discard_comments_on_read=discard_comments_on_read, 

-

1791 ) 

-

1792 self._interpretation = interpretation 

-

1793 

-

1794 def _interpret_value(self, key, value): 

-

1795 # type: (ParagraphKey, Deb822KeyValuePairElement) -> T 

-

1796 return self._interpretation.interpret(value) 

-

1797 

-

1798 

-

1799class Deb822DictishParagraphWrapper( 

-

1800 AbstractDeb822ParagraphWrapper[str], Deb822ParagraphToStrWrapperMixin 

-

1801): 

-

1802 

-

1803 def __init__( 

-

1804 self, 

-

1805 paragraph, # type: Deb822ParagraphElement 

-

1806 *, 

-

1807 discard_comments_on_read=True, # type: bool 

-

1808 auto_map_initial_line_whitespace=True, # type: bool 

-

1809 auto_resolve_ambiguous_fields=False, # type: bool 

-

1810 preserve_field_comments_on_field_updates=True, # type: bool 

-

1811 auto_map_final_newline_in_multiline_values=True, # type: bool 

-

1812 ): 

-

1813 # type: (...) -> None 

-

1814 super().__init__( 

-

1815 paragraph, 

-

1816 auto_resolve_ambiguous_fields=auto_resolve_ambiguous_fields, 

-

1817 discard_comments_on_read=discard_comments_on_read, 

-

1818 ) 

-

1819 self.__auto_map_initial_line_whitespace = auto_map_initial_line_whitespace 

-

1820 self.__preserve_field_comments_on_field_updates = ( 

-

1821 preserve_field_comments_on_field_updates 

-

1822 ) 

-

1823 self.__auto_map_final_newline_in_multiline_values = ( 

-

1824 auto_map_final_newline_in_multiline_values 

-

1825 ) 

-

1826 

-

1827 @property 

-

1828 def _auto_map_initial_line_whitespace(self): 

-

1829 # type: () -> bool 

-

1830 return self.__auto_map_initial_line_whitespace 

-

1831 

-

1832 @property 

-

1833 def _preserve_field_comments_on_field_updates(self): 

-

1834 # type: () -> bool 

-

1835 return self.__preserve_field_comments_on_field_updates 

-

1836 

-

1837 @property 

-

1838 def _auto_map_final_newline_in_multiline_values(self): 

-

1839 # type: () -> bool 

-

1840 return self.__auto_map_final_newline_in_multiline_values 

-

1841 

-

1842 

-

1843class Deb822ParagraphElement(Deb822Element, Deb822ParagraphToStrWrapperMixin, ABC): 

-

1844 

-

1845 @classmethod 

-

1846 def new_empty_paragraph(cls): 

-

1847 # type: () -> Deb822ParagraphElement 

-

1848 return Deb822NoDuplicateFieldsParagraphElement([], OrderedSet()) 

-

1849 

-

1850 @classmethod 

-

1851 def from_dict(cls, mapping): 

-

1852 # type: (Mapping[str, str]) -> Deb822ParagraphElement 

-

1853 paragraph = cls.new_empty_paragraph() 

-

1854 for k, v in mapping.items(): 

-

1855 paragraph[k] = v 

-

1856 return paragraph 

-

1857 

-

1858 @classmethod 

-

1859 def from_kvpairs(cls, kvpair_elements): 

-

1860 # type: (List[Deb822KeyValuePairElement]) -> Deb822ParagraphElement 

-

1861 if not kvpair_elements: 1861 ↛ 1862line 1861 didn't jump to line 1862, because the condition on line 1861 was never true

-

1862 raise ValueError( 

-

1863 "A paragraph must consist of at least one field/value pair" 

-

1864 ) 

-

1865 kvpair_order = OrderedSet(kv.field_name for kv in kvpair_elements) 

-

1866 if len(kvpair_order) == len(kvpair_elements): 1866 ↛ 1875line 1866 didn't jump to line 1875, because the condition on line 1866 was never false

-

1867 # Each field occurs at most once, which is good because that 

-

1868 # means it is a valid paragraph and we can use the optimized 

-

1869 # implementation. 

-

1870 return Deb822NoDuplicateFieldsParagraphElement( 

-

1871 kvpair_elements, kvpair_order 

-

1872 ) 

-

1873 # Fallback implementation, that can cope with the repeated field names 

-

1874 # at the cost of complexity. 

-

1875 return Deb822DuplicateFieldsParagraphElement(kvpair_elements) 

-

1876 

-

1877 @property 

-

1878 def has_duplicate_fields(self): 

-

1879 # type: () -> bool 

-

1880 """Tell whether this paragraph has duplicate fields""" 

-

1881 return False 

-

1882 

-

1883 def as_interpreted_dict_view( 

-

1884 self, 

-

1885 interpretation, # type: Interpretation[T] 

-

1886 *, 

-

1887 auto_resolve_ambiguous_fields=True, # type: bool 

-

1888 ): 

-

1889 # type: (...) -> Deb822InterpretingParagraphWrapper[T] 

-

1890 r"""Provide a Dict-like view of the paragraph 

-

1891 

-

1892 This method returns a dict-like object representing this paragraph and 

-

1893 is useful for accessing fields in a given interpretation. It is possible 

-

1894 to use multiple versions of this dict-like view with different interpretations 

-

1895 on the same paragraph at the same time (for different fields). 

-

1896 

-

1897 >>> example_deb822_paragraph = ''' 

-

1898 ... Package: foo 

-

1899 ... # Field comment (because it becomes just before a field) 

-

1900 ... Architecture: amd64 

-

1901 ... # Inline comment (associated with the next line) 

-

1902 ... i386 

-

1903 ... # We also support arm 

-

1904 ... arm64 

-

1905 ... armel 

-

1906 ... ''' 

-

1907 >>> dfile = parse_deb822_file(example_deb822_paragraph.splitlines()) 

-

1908 >>> paragraph = next(iter(dfile)) 

-

1909 >>> list_view = paragraph.as_interpreted_dict_view(LIST_SPACE_SEPARATED_INTERPRETATION) 

-

1910 >>> # With the defaults, you only deal with the semantic values 

-

1911 >>> # - no leading or trailing whitespace on the first part of the value 

-

1912 >>> list(list_view["Package"]) 

-

1913 ['foo'] 

-

1914 >>> with list_view["Architecture"] as arch_list: 

-

1915 ... orig_arch_list = list(arch_list) 

-

1916 ... arch_list.replace('i386', 'kfreebsd-amd64') 

-

1917 >>> orig_arch_list 

-

1918 ['amd64', 'i386', 'arm64', 'armel'] 

-

1919 >>> list(list_view["Architecture"]) 

-

1920 ['amd64', 'kfreebsd-amd64', 'arm64', 'armel'] 

-

1921 >>> print(paragraph.dump(), end='') 

-

1922 Package: foo 

-

1923 # Field comment (because it becomes just before a field) 

-

1924 Architecture: amd64 

-

1925 # Inline comment (associated with the next line) 

-

1926 kfreebsd-amd64 

-

1927 # We also support arm 

-

1928 arm64 

-

1929 armel 

-

1930 >>> # Format preserved and architecture replaced 

-

1931 >>> with list_view["Architecture"] as arch_list: 

-

1932 ... # Prettify the result as sorting will cause awkward whitespace 

-

1933 ... arch_list.reformat_when_finished() 

-

1934 ... arch_list.sort() 

-

1935 >>> print(paragraph.dump(), end='') 

-

1936 Package: foo 

-

1937 # Field comment (because it becomes just before a field) 

-

1938 Architecture: amd64 

-

1939 # We also support arm 

-

1940 arm64 

-

1941 armel 

-

1942 # Inline comment (associated with the next line) 

-

1943 kfreebsd-amd64 

-

1944 >>> list(list_view["Architecture"]) 

-

1945 ['amd64', 'arm64', 'armel', 'kfreebsd-amd64'] 

-

1946 >>> # Format preserved and architecture values sorted 

-

1947 

-

1948 :param interpretation: Decides how the field values are interpreted. As an example, 

-

1949 use LIST_SPACE_SEPARATED_INTERPRETATION for fields such as Architecture in the 

-

1950 debian/control file. 

-

1951 :param auto_resolve_ambiguous_fields: This parameter is only relevant for paragraphs 

-

1952 that contain the same field multiple times (these are generally invalid). If the 

-

1953 caller requests an ambiguous field from an invalid paragraph via a plain field name, 

-

1954 the return dict-like object will refuse to resolve the field (not knowing which 

-

1955 version to pick). This parameter (if set to True) instead changes the error into 

-

1956 assuming the caller wants the *first* variant. 

-

1957 """ 

-

1958 return Deb822InterpretingParagraphWrapper( 

-

1959 self, 

-

1960 interpretation, 

-

1961 auto_resolve_ambiguous_fields=auto_resolve_ambiguous_fields, 

-

1962 ) 

-

1963 

-

1964 def configured_view( 

-

1965 self, 

-

1966 *, 

-

1967 discard_comments_on_read=True, # type: bool 

-

1968 auto_map_initial_line_whitespace=True, # type: bool 

-

1969 auto_resolve_ambiguous_fields=True, # type: bool 

-

1970 preserve_field_comments_on_field_updates=True, # type: bool 

-

1971 auto_map_final_newline_in_multiline_values=True, # type: bool 

-

1972 ): 

-

1973 # type: (...) -> Deb822DictishParagraphWrapper 

-

1974 r"""Provide a Dict[str, str]-like view of this paragraph with non-standard parameters 

-

1975 

-

1976 This method returns a dict-like object representing this paragraph that is 

-

1977 optionally configured differently from the default view. 

-

1978 

-

1979 >>> example_deb822_paragraph = ''' 

-

1980 ... Package: foo 

-

1981 ... # Field comment (because it becomes just before a field) 

-

1982 ... Depends: libfoo, 

-

1983 ... # Inline comment (associated with the next line) 

-

1984 ... libbar, 

-

1985 ... ''' 

-

1986 >>> dfile = parse_deb822_file(example_deb822_paragraph.splitlines()) 

-

1987 >>> paragraph = next(iter(dfile)) 

-

1988 >>> # With the defaults, you only deal with the semantic values 

-

1989 >>> # - no leading or trailing whitespace on the first part of the value 

-

1990 >>> paragraph["Package"] 

-

1991 'foo' 

-

1992 >>> # - no inline comments in multiline values (but whitespace will be present 

-

1993 >>> # subsequent lines.) 

-

1994 >>> print(paragraph["Depends"]) 

-

1995 libfoo, 

-

1996 libbar, 

-

1997 >>> paragraph['Foo'] = 'bar' 

-

1998 >>> paragraph.get('Foo') 

-

1999 'bar' 

-

2000 >>> paragraph.get('Unknown-Field') is None 

-

2001 True 

-

2002 >>> # But you get asymmetric behaviour with set vs. get 

-

2003 >>> paragraph['Foo'] = ' bar\n' 

-

2004 >>> paragraph['Foo'] 

-

2005 'bar' 

-

2006 >>> paragraph['Bar'] = ' bar\n#Comment\n another value\n' 

-

2007 >>> # Note that the whitespace on the first line has been normalized. 

-

2008 >>> print("Bar: " + paragraph['Bar']) 

-

2009 Bar: bar 

-

2010 another value 

-

2011 >>> # The comment is present (in case you where wondering) 

-

2012 >>> print(paragraph.get_kvpair_element('Bar').convert_to_text(), end='') 

-

2013 Bar: bar 

-

2014 #Comment 

-

2015 another value 

-

2016 >>> # On the other hand, you can choose to see the values as they are 

-

2017 >>> # - We will just reset the paragraph as a "nothing up my sleeve" 

-

2018 >>> dfile = parse_deb822_file(example_deb822_paragraph.splitlines()) 

-

2019 >>> paragraph = next(iter(dfile)) 

-

2020 >>> nonstd_dictview = paragraph.configured_view( 

-

2021 ... discard_comments_on_read=False, 

-

2022 ... auto_map_initial_line_whitespace=False, 

-

2023 ... # For paragraphs with duplicate fields, you can choose to get an error 

-

2024 ... # rather than the dict picking the first value available. 

-

2025 ... auto_resolve_ambiguous_fields=False, 

-

2026 ... auto_map_final_newline_in_multiline_values=False, 

-

2027 ... ) 

-

2028 >>> # Because we have reset the state, Foo and Bar are no longer there. 

-

2029 >>> 'Bar' not in paragraph and 'Foo' not in paragraph 

-

2030 True 

-

2031 >>> # We can now see the comments (discard_comments_on_read=False) 

-

2032 >>> # (The leading whitespace in front of "libfoo" is due to 

-

2033 >>> # auto_map_initial_line_whitespace=False) 

-

2034 >>> print(nonstd_dictview["Depends"], end='') 

-

2035 libfoo, 

-

2036 # Inline comment (associated with the next line) 

-

2037 libbar, 

-

2038 >>> # And all the optional whitespace on the first value line 

-

2039 >>> # (auto_map_initial_line_whitespace=False) 

-

2040 >>> nonstd_dictview["Package"] == ' foo\n' 

-

2041 True 

-

2042 >>> # ... which will give you symmetric behaviour with set vs. get 

-

2043 >>> nonstd_dictview['Foo'] = ' bar \n' 

-

2044 >>> nonstd_dictview['Foo'] 

-

2045 ' bar \n' 

-

2046 >>> nonstd_dictview['Bar'] = ' bar \n#Comment\n another value\n' 

-

2047 >>> nonstd_dictview['Bar'] 

-

2048 ' bar \n#Comment\n another value\n' 

-

2049 >>> # But then you get no help either. 

-

2050 >>> try: 

-

2051 ... nonstd_dictview["Baz"] = "foo" 

-

2052 ... except ValueError: 

-

2053 ... print("Rejected") 

-

2054 Rejected 

-

2055 >>> # With auto_map_initial_line_whitespace=False, you have to include minimum a newline 

-

2056 >>> nonstd_dictview["Baz"] = "foo\n" 

-

2057 >>> # The absence of leading whitespace gives you the terse variant at the expensive 

-

2058 >>> # readability 

-

2059 >>> paragraph.get_kvpair_element('Baz').convert_to_text() 

-

2060 'Baz:foo\n' 

-

2061 >>> # But because they are views, changes performed via one view is visible in the other 

-

2062 >>> paragraph['Foo'] 

-

2063 'bar' 

-

2064 >>> # The views show the values according to their own rules. Therefore, there is an 

-

2065 >>> # asymmetric between paragraph['Foo'] and nonstd_dictview['Foo'] 

-

2066 >>> # Nevertheless, you can read or write the fields via either - enabling you to use 

-

2067 >>> # the view that best suit your use-case for the given field. 

-

2068 >>> 'Baz' in paragraph and nonstd_dictview.get('Baz') is not None 

-

2069 True 

-

2070 >>> # Deletion via the view also works 

-

2071 >>> del nonstd_dictview['Baz'] 

-

2072 >>> 'Baz' not in paragraph and nonstd_dictview.get('Baz') is None 

-

2073 True 

-

2074 

-

2075 

-

2076 :param discard_comments_on_read: When getting a field value from the dict, 

-

2077 this parameter decides how in-line comments are handled. When setting 

-

2078 the value, inline comments are still allowed and will be retained. 

-

2079 However, keep in mind that this option makes getter and setter asymmetric 

-

2080 as a "get" following a "set" with inline comments will omit the comments 

-

2081 even if they are there (see the code example). 

-

2082 :param auto_map_initial_line_whitespace: Special-case the first value line 

-

2083 by trimming unnecessary whitespace leaving only the value. For single-line 

-

2084 values, all space including newline is pruned. For multi-line values, the 

-

2085 newline is preserved / needed to distinguish the first line from the 

-

2086 following lines. When setting a value, this option normalizes the 

-

2087 whitespace of the initial line of the value field. 

-

2088 When this option is set to True makes the dictionary behave more like the 

-

2089 original Deb822 module. 

-

2090 :param preserve_field_comments_on_field_updates: Whether to preserve the field 

-

2091 comments when mutating the field. 

-

2092 :param auto_resolve_ambiguous_fields: This parameter is only relevant for paragraphs 

-

2093 that contain the same field multiple times (these are generally invalid). If the 

-

2094 caller requests an ambiguous field from an invalid paragraph via a plain field name, 

-

2095 the return dict-like object will refuse to resolve the field (not knowing which 

-

2096 version to pick). This parameter (if set to True) instead changes the error into 

-

2097 assuming the caller wants the *first* variant. 

-

2098 :param auto_map_final_newline_in_multiline_values: This parameter controls whether 

-

2099 a multiline field with have / need a trailing newline. If True, the trailing 

-

2100 newline is hidden on get and automatically added in set (if missing). 

-

2101 When this option is set to True makes the dictionary behave more like the 

-

2102 original Deb822 module. 

-

2103 """ 

-

2104 return Deb822DictishParagraphWrapper( 

-

2105 self, 

-

2106 discard_comments_on_read=discard_comments_on_read, 

-

2107 auto_map_initial_line_whitespace=auto_map_initial_line_whitespace, 

-

2108 auto_resolve_ambiguous_fields=auto_resolve_ambiguous_fields, 

-

2109 preserve_field_comments_on_field_updates=preserve_field_comments_on_field_updates, 

-

2110 auto_map_final_newline_in_multiline_values=auto_map_final_newline_in_multiline_values, 

-

2111 ) 

-

2112 

-

2113 @property 

-

2114 def _paragraph(self): 

-

2115 # type: () -> Deb822ParagraphElement 

-

2116 return self 

-

2117 

-

2118 def order_last(self, field): 

-

2119 # type: (ParagraphKey) -> None 

-

2120 """Re-order the given field so it is "last" in the paragraph""" 

-

2121 raise NotImplementedError # pragma: no cover 

-

2122 

-

2123 def order_first(self, field): 

-

2124 # type: (ParagraphKey) -> None 

-

2125 """Re-order the given field so it is "first" in the paragraph""" 

-

2126 raise NotImplementedError # pragma: no cover 

-

2127 

-

2128 def order_before(self, field, reference_field): 

-

2129 # type: (ParagraphKey, ParagraphKey) -> None 

-

2130 """Re-order the given field so appears directly after the reference field in the paragraph 

-

2131 

-

2132 The reference field must be present.""" 

-

2133 raise NotImplementedError # pragma: no cover 

-

2134 

-

2135 def order_after(self, field, reference_field): 

-

2136 # type: (ParagraphKey, ParagraphKey) -> None 

-

2137 """Re-order the given field so appears directly before the reference field in the paragraph 

-

2138 

-

2139 The reference field must be present. 

-

2140 """ 

-

2141 raise NotImplementedError # pragma: no cover 

-

2142 

-

2143 @property 

-

2144 def kvpair_count(self): 

-

2145 # type: () -> int 

-

2146 raise NotImplementedError # pragma: no cover 

-

2147 

-

2148 def iter_keys(self): 

-

2149 # type: () -> Iterable[ParagraphKey] 

-

2150 raise NotImplementedError # pragma: no cover 

-

2151 

-

2152 def contains_kvpair_element(self, item): 

-

2153 # type: (object) -> bool 

-

2154 raise NotImplementedError # pragma: no cover 

-

2155 

-

2156 def get_kvpair_element( 

-

2157 self, 

-

2158 item, # type: ParagraphKey 

-

2159 use_get=False, # type: bool 

-

2160 ): 

-

2161 # type: (...) -> Optional[Deb822KeyValuePairElement] 

-

2162 raise NotImplementedError # pragma: no cover 

-

2163 

-

2164 def set_kvpair_element(self, key, value): 

-

2165 # type: (ParagraphKey, Deb822KeyValuePairElement) -> None 

-

2166 raise NotImplementedError # pragma: no cover 

-

2167 

-

2168 def remove_kvpair_element(self, key): 

-

2169 # type: (ParagraphKey) -> None 

-

2170 raise NotImplementedError # pragma: no cover 

-

2171 

-

2172 def sort_fields( 

-

2173 self, key=None # type: Optional[Callable[[str], Any]] 

-

2174 ): 

-

2175 # type: (...) -> None 

-

2176 """Re-order all fields 

-

2177 

-

2178 :param key: Provide a key function (same semantics as for sorted). Keep in mind that 

-

2179 the module preserve the cases for field names - in generally, callers are recommended 

-

2180 to use "lower()" to normalize the case. 

-

2181 """ 

-

2182 raise NotImplementedError # pragma: no cover 

-

2183 

-

2184 def set_field_to_simple_value( 

-

2185 self, 

-

2186 item, # type: ParagraphKey 

-

2187 simple_value, # type: str 

-

2188 *, 

-

2189 preserve_original_field_comment=None, # type: Optional[bool] 

-

2190 field_comment=None, # type: Optional[Commentish] 

-

2191 ): 

-

2192 # type: (...) -> None 

-

2193 r"""Sets a field in this paragraph to a simple "word" or "phrase" 

-

2194 

-

2195 In many cases, it is better for callers to just use the paragraph as 

-

2196 if it was a dictionary. However, this method does enable to you choose 

-

2197 the field comment (if any), which can be a reason for using it. 

-

2198 

-

2199 This is suitable for "simple" fields like "Package". Example: 

-

2200 

-

2201 >>> example_deb822_paragraph = ''' 

-

2202 ... Package: foo 

-

2203 ... ''' 

-

2204 >>> dfile = parse_deb822_file(example_deb822_paragraph.splitlines()) 

-

2205 >>> p = next(iter(dfile)) 

-

2206 >>> p.set_field_to_simple_value("Package", "mscgen") 

-

2207 >>> p.set_field_to_simple_value("Architecture", "linux-any kfreebsd-any", 

-

2208 ... field_comment=['Only ported to linux and kfreebsd']) 

-

2209 >>> p.set_field_to_simple_value("Priority", "optional") 

-

2210 >>> print(p.dump(), end='') 

-

2211 Package: mscgen 

-

2212 # Only ported to linux and kfreebsd 

-

2213 Architecture: linux-any kfreebsd-any 

-

2214 Priority: optional 

-

2215 >>> # Values are formatted nicely by default, but it does not work with 

-

2216 >>> # multi-line values 

-

2217 >>> p.set_field_to_simple_value("Foo", "bar\nbin\n") 

-

2218 Traceback (most recent call last): 

-

2219 ... 

-

2220 ValueError: Cannot use set_field_to_simple_value for values with newlines 

-

2221 

-

2222 :param item: Name of the field to set. If the paragraph already 

-

2223 contains the field, then it will be replaced. If the field exists, 

-

2224 then it will preserve its order in the paragraph. Otherwise, it is 

-

2225 added to the end of the paragraph. 

-

2226 Note this can be a "paragraph key", which enables you to control 

-

2227 *which* instance of a field is being replaced (in case of duplicate 

-

2228 fields). 

-

2229 :param simple_value: The text to use as the value. The value must not 

-

2230 contain newlines. Leading and trailing will be stripped but space 

-

2231 within the value is preserved. The value cannot contain comments 

-

2232 (i.e. if the "#" token appears in the value, then it is considered 

-

2233 a value rather than "start of a comment) 

-

2234 :param preserve_original_field_comment: See the description for the 

-

2235 parameter with the same name in the set_field_from_raw_string method. 

-

2236 :param field_comment: See the description for the parameter with the same 

-

2237 name in the set_field_from_raw_string method. 

-

2238 """ 

-

2239 if "\n" in simple_value: 

-

2240 raise ValueError( 

-

2241 "Cannot use set_field_to_simple_value for values with newlines" 

-

2242 ) 

-

2243 

-

2244 # Reformat it with a leading space and trailing newline. The latter because it is 

-

2245 # necessary if there are any fields after it and the former because it looks nicer so 

-

2246 # have single space after the field separator 

-

2247 stripped = simple_value.strip() 

-

2248 if stripped: 2248 ↛ 2252line 2248 didn't jump to line 2252, because the condition on line 2248 was never false

-

2249 raw_value = " " + stripped + "\n" 

-

2250 else: 

-

2251 # Special-case for empty values 

-

2252 raw_value = "\n" 

-

2253 self.set_field_from_raw_string( 

-

2254 item, 

-

2255 raw_value, 

-

2256 preserve_original_field_comment=preserve_original_field_comment, 

-

2257 field_comment=field_comment, 

-

2258 ) 

-

2259 

-

2260 def set_field_from_raw_string( 

-

2261 self, 

-

2262 item, # type: ParagraphKey 

-

2263 raw_string_value, # type: str 

-

2264 *, 

-

2265 preserve_original_field_comment=None, # type: Optional[bool] 

-

2266 field_comment=None, # type: Optional[Commentish] 

-

2267 ): 

-

2268 # type: (...) -> None 

-

2269 """Sets a field in this paragraph to a given text value 

-

2270 

-

2271 In many cases, it is better for callers to just use the paragraph as 

-

2272 if it was a dictionary. However, this method does enable to you choose 

-

2273 the field comment (if any) and lets to have a higher degree of control 

-

2274 over whitespace (on the first line), which can be a reason for using it. 

-

2275 

-

2276 Example usage: 

-

2277 

-

2278 >>> example_deb822_paragraph = ''' 

-

2279 ... Package: foo 

-

2280 ... ''' 

-

2281 >>> dfile = parse_deb822_file(example_deb822_paragraph.splitlines()) 

-

2282 >>> p = next(iter(dfile)) 

-

2283 >>> raw_value = ''' 

-

2284 ... Build-Depends: debhelper-compat (= 12), 

-

2285 ... some-other-bd, 

-

2286 ... # Comment 

-

2287 ... another-bd, 

-

2288 ... '''.lstrip() # Remove leading newline, but *not* the trailing newline 

-

2289 >>> fname, new_value = raw_value.split(':', 1) 

-

2290 >>> p.set_field_from_raw_string(fname, new_value) 

-

2291 >>> print(p.dump(), end='') 

-

2292 Package: foo 

-

2293 Build-Depends: debhelper-compat (= 12), 

-

2294 some-other-bd, 

-

2295 # Comment 

-

2296 another-bd, 

-

2297 >>> # Format preserved 

-

2298 

-

2299 :param item: Name of the field to set. If the paragraph already 

-

2300 contains the field, then it will be replaced. Otherwise, it is 

-

2301 added to the end of the paragraph. 

-

2302 Note this can be a "paragraph key", which enables you to control 

-

2303 *which* instance of a field is being replaced (in case of duplicate 

-

2304 fields). 

-

2305 :param raw_string_value: The text to use as the value. The text must 

-

2306 be valid deb822 syntax and is used *exactly* as it is given. 

-

2307 Accordingly, multi-line values must include mandatory leading space 

-

2308 on continuation lines, newlines after the value, etc. On the 

-

2309 flip-side, any optional space or comments will be included. 

-

2310 

-

2311 Note that the first line will *never* be read as a comment (if the 

-

2312 first line of the value starts with a "#" then it will result 

-

2313 in "Field-Name:#..." which is parsed as a value starting with "#" 

-

2314 rather than a comment). 

-

2315 :param preserve_original_field_comment: If True, then if there is an 

-

2316 existing field and that has a comment, then the comment will remain 

-

2317 after this operation. This is the default is the `field_comment` 

-

2318 parameter is omitted. 

-

2319 Note that if the parameter is True and the item is ambiguous, this 

-

2320 will raise an AmbiguousDeb822FieldKeyError. When the parameter is 

-

2321 omitted, the ambiguity is resolved automatically and if the resolved 

-

2322 field has a comment then that will be preserved (assuming 

-

2323 field_comment is None). 

-

2324 :param field_comment: If not None, add or replace the comment for 

-

2325 the field. Each string in the list will become one comment 

-

2326 line (inserted directly before the field name). Will appear in the 

-

2327 same order as they do in the list. 

-

2328 

-

2329 If you want complete control over the formatting of the comments, 

-

2330 then ensure that each line start with "#" and end with "\\n" before 

-

2331 the call. Otherwise, leading/trailing whitespace is normalized 

-

2332 and the missing "#"/"\\n" character is inserted. 

-

2333 """ 

-

2334 

-

2335 new_content = [] # type: List[str] 

-

2336 if preserve_original_field_comment is not None: 

-

2337 if field_comment is not None: 2337 ↛ 2338line 2337 didn't jump to line 2338, because the condition on line 2337 was never true

-

2338 raise ValueError( 

-

2339 'The "preserve_original_field_comment" conflicts with' 

-

2340 ' "field_comment" parameter' 

-

2341 ) 

-

2342 elif field_comment is not None: 

-

2343 if not isinstance(field_comment, Deb822CommentElement): 2343 ↛ 2346line 2343 didn't jump to line 2346, because the condition on line 2343 was never false

-

2344 new_content.extend(_format_comment(x) for x in field_comment) 

-

2345 field_comment = None 

-

2346 preserve_original_field_comment = False 

-

2347 

-

2348 field_name, _, _ = _unpack_key(item) 

-

2349 

-

2350 cased_field_name = field_name 

-

2351 try: 

-

2352 original = self.get_kvpair_element(item, use_get=True) 

-

2353 except AmbiguousDeb822FieldKeyError: 

-

2354 if preserve_original_field_comment: 

-

2355 # If we were asked to preserve the original comment, then we 

-

2356 # require a strict lookup 

-

2357 raise 

-

2358 original = self.get_kvpair_element((field_name, 0), use_get=True) 

-

2359 

-

2360 if preserve_original_field_comment is None: 

-

2361 # We simplify preserve_original_field_comment after the lookup of the field. 

-

2362 # Otherwise, we can get ambiguous key errors when updating an ambiguous field 

-

2363 # when the caller did not explicitly ask for that behaviour. 

-

2364 preserve_original_field_comment = True 

-

2365 

-

2366 if original: 

-

2367 # If we already have the field, then preserve the original case 

-

2368 cased_field_name = original.field_name 

-

2369 raw = ":".join((cased_field_name, raw_string_value)) 

-

2370 raw_lines = raw.splitlines(keepends=True) 

-

2371 for i, line in enumerate(raw_lines, start=1): 

-

2372 if not line.endswith("\n"): 2372 ↛ 2373line 2372 didn't jump to line 2373, because the condition on line 2372 was never true

-

2373 raise ValueError( 

-

2374 "Line {i} in new value was missing trailing newline".format(i=i) 

-

2375 ) 

-

2376 if i != 1 and line[0] not in (" ", "\t", "#"): 2376 ↛ 2377line 2376 didn't jump to line 2377

-

2377 msg = ( 

-

2378 "Line {i} in new value was invalid. It must either start" 

-

2379 ' with " " space (continuation line) or "#" (comment line).' 

-

2380 ' The line started with "{line}"' 

-

2381 ) 

-

2382 raise ValueError(msg.format(i=i, line=line[0])) 

-

2383 if len(raw_lines) > 1 and raw_lines[-1].startswith("#"): 2383 ↛ 2384line 2383 didn't jump to line 2384, because the condition on line 2383 was never true

-

2384 raise ValueError("The last line in a value field cannot be a comment") 

-

2385 new_content.extend(raw_lines) 

-

2386 # As absurd as it might seem, it is easier to just use the parser to 

-

2387 # construct the AST correctly 

-

2388 deb822_file = parse_deb822_file(iter(new_content)) 

-

2389 error_token = deb822_file.find_first_error_element() 

-

2390 if error_token: 2390 ↛ 2391line 2390 didn't jump to line 2391, because the condition on line 2390 was never true

-

2391 raise ValueError("Syntax error in new field value for " + field_name) 

-

2392 paragraph = next(iter(deb822_file)) 

-

2393 assert isinstance(paragraph, Deb822NoDuplicateFieldsParagraphElement) 

-

2394 value = paragraph.get_kvpair_element(field_name) 

-

2395 assert value is not None 

-

2396 if preserve_original_field_comment: 

-

2397 if original: 

-

2398 value.comment_element = original.comment_element 

-

2399 original.comment_element = None 

-

2400 elif field_comment is not None: 2400 ↛ 2401line 2400 didn't jump to line 2401, because the condition on line 2400 was never true

-

2401 value.comment_element = field_comment 

-

2402 self.set_kvpair_element(item, value) 

-

2403 

-

2404 @overload 

-

2405 def dump( 

-

2406 self, fd # type: IO[bytes] 

-

2407 ): 

-

2408 # type: (...) -> None 

-

2409 pass 

-

2410 

-

2411 @overload 

-

2412 def dump(self): 

-

2413 # type: () -> str 

-

2414 pass 

-

2415 

-

2416 def dump( 

-

2417 self, fd=None # type: Optional[IO[bytes]] 

-

2418 ): 

-

2419 # type: (...) -> Optional[str] 

-

2420 if fd is None: 2420 ↛ 2422line 2420 didn't jump to line 2422, because the condition on line 2420 was never false

-

2421 return "".join(t.text for t in self.iter_tokens()) 

-

2422 for token in self.iter_tokens(): 

-

2423 fd.write(token.text.encode("utf-8")) 

-

2424 return None 

-

2425 

-

2426 

-

2427class Deb822NoDuplicateFieldsParagraphElement(Deb822ParagraphElement): 

-

2428 """Paragraph implementation optimized for valid deb822 files 

-

2429 

-

2430 When there are no duplicated fields, we can use simpler and faster 

-

2431 datastructures for common operations. 

-

2432 """ 

-

2433 

-

2434 def __init__( 

-

2435 self, 

-

2436 kvpair_elements, # type: List[Deb822KeyValuePairElement] 

-

2437 kvpair_order, # type: OrderedSet 

-

2438 ): 

-

2439 # type: (...) -> None 

-

2440 super().__init__() 

-

2441 self._kvpair_elements = {kv.field_name: kv for kv in kvpair_elements} 

-

2442 self._kvpair_order = kvpair_order 

-

2443 self._init_parent_of_parts() 

-

2444 

-

2445 @property 

-

2446 def kvpair_count(self): 

-

2447 # type: () -> int 

-

2448 return len(self._kvpair_elements) 

-

2449 

-

2450 def order_last(self, field): 

-

2451 # type: (ParagraphKey) -> None 

-

2452 """Re-order the given field so it is "last" in the paragraph""" 

-

2453 unpacked_field, _, _ = _unpack_key(field, raise_if_indexed=True) 

-

2454 self._kvpair_order.order_last(unpacked_field) 

-

2455 

-

2456 def order_first(self, field): 

-

2457 # type: (ParagraphKey) -> None 

-

2458 """Re-order the given field so it is "first" in the paragraph""" 

-

2459 unpacked_field, _, _ = _unpack_key(field, raise_if_indexed=True) 

-

2460 self._kvpair_order.order_first(unpacked_field) 

-

2461 

-

2462 def order_before(self, field, reference_field): 

-

2463 # type: (ParagraphKey, ParagraphKey) -> None 

-

2464 """Re-order the given field so appears directly after the reference field in the paragraph 

-

2465 

-

2466 The reference field must be present.""" 

-

2467 unpacked_field, _, _ = _unpack_key(field, raise_if_indexed=True) 

-

2468 unpacked_ref_field, _, _ = _unpack_key(reference_field, raise_if_indexed=True) 

-

2469 self._kvpair_order.order_before(unpacked_field, unpacked_ref_field) 

-

2470 

-

2471 def order_after(self, field, reference_field): 

-

2472 # type: (ParagraphKey, ParagraphKey) -> None 

-

2473 """Re-order the given field so appears directly before the reference field in the paragraph 

-

2474 

-

2475 The reference field must be present. 

-

2476 """ 

-

2477 unpacked_field, _, _ = _unpack_key(field, raise_if_indexed=True) 

-

2478 unpacked_ref_field, _, _ = _unpack_key(reference_field, raise_if_indexed=True) 

-

2479 self._kvpair_order.order_after(unpacked_field, unpacked_ref_field) 

-

2480 

-

2481 # Overload to narrow the type to just str. 

-

2482 def __iter__(self): 

-

2483 # type: () -> Iterator[str] 

-

2484 return iter(str(k) for k in self._kvpair_order) 

-

2485 

-

2486 def iter_keys(self): 

-

2487 # type: () -> Iterable[str] 

-

2488 yield from (str(k) for k in self._kvpair_order) 

-

2489 

-

2490 def remove_kvpair_element(self, key): 

-

2491 # type: (ParagraphKey) -> None 

-

2492 self._full_size_cache = None 

-

2493 key, _, _ = _unpack_key(key, raise_if_indexed=True) 

-

2494 del self._kvpair_elements[key] 

-

2495 self._kvpair_order.remove(key) 

-

2496 

-

2497 def contains_kvpair_element(self, item): 

-

2498 # type: (object) -> bool 

-

2499 if not isinstance(item, (str, tuple, Deb822FieldNameToken)): 2499 ↛ 2500line 2499 didn't jump to line 2500, because the condition on line 2499 was never true

-

2500 return False 

-

2501 item = cast("ParagraphKey", item) 

-

2502 key, _, _ = _unpack_key(item, raise_if_indexed=True) 

-

2503 return key in self._kvpair_elements 

-

2504 

-

2505 def get_kvpair_element( 

-

2506 self, 

-

2507 item, # type: ParagraphKey 

-

2508 use_get=False, # type: bool 

-

2509 ): 

-

2510 # type: (...) -> Optional[Deb822KeyValuePairElement] 

-

2511 item, _, _ = _unpack_key(item, raise_if_indexed=True) 

-

2512 if use_get: 

-

2513 return self._kvpair_elements.get(item) 

-

2514 return self._kvpair_elements[item] 

-

2515 

-

2516 def set_kvpair_element(self, key, value): 

-

2517 # type: (ParagraphKey, Deb822KeyValuePairElement) -> None 

-

2518 key, _, _ = _unpack_key(key, raise_if_indexed=True) 

-

2519 if isinstance(key, Deb822FieldNameToken): 2519 ↛ 2520line 2519 didn't jump to line 2520, because the condition on line 2519 was never true

-

2520 if key is not value.field_token: 

-

2521 raise ValueError( 

-

2522 "Key is a Deb822FieldNameToken, but not *the* Deb822FieldNameToken" 

-

2523 " for the value" 

-

2524 ) 

-

2525 key = value.field_name 

-

2526 else: 

-

2527 if key != value.field_name: 2527 ↛ 2528line 2527 didn't jump to line 2528, because the condition on line 2527 was never true

-

2528 raise ValueError( 

-

2529 "Cannot insert value under a different field value than field name" 

-

2530 " from its Deb822FieldNameToken implies" 

-

2531 ) 

-

2532 # Use the string from the Deb822FieldNameToken as we need to keep that in memory either 

-

2533 # way 

-

2534 key = value.field_name 

-

2535 original_value = self._kvpair_elements.get(key) 

-

2536 self._full_size_cache = None 

-

2537 self._kvpair_elements[key] = value 

-

2538 self._kvpair_order.append(key) 

-

2539 if original_value is not None: 

-

2540 original_value.parent_element = None 

-

2541 value.parent_element = self 

-

2542 

-

2543 def sort_fields(self, key=None): 

-

2544 # type: (Optional[Callable[[str], Any]]) -> None 

-

2545 """Re-order all fields 

-

2546 

-

2547 :param key: Provide a key function (same semantics as for sorted). Keep in mind that 

-

2548 the module preserve the cases for field names - in generally, callers are recommended 

-

2549 to use "lower()" to normalize the case. 

-

2550 """ 

-

2551 for last_field_name in reversed(self._kvpair_order): 

-

2552 last_kvpair = self._kvpair_elements[cast("_strI", last_field_name)] 

-

2553 if last_kvpair.value_element.add_final_newline_if_missing(): 

-

2554 self._full_size_cache = None 

-

2555 break 

-

2556 

-

2557 if key is None: 

-

2558 key = default_field_sort_key 

-

2559 

-

2560 self._kvpair_order = OrderedSet(sorted(self._kvpair_order, key=key)) 

-

2561 

-

2562 def iter_parts(self): 

-

2563 # type: () -> Iterable[TokenOrElement] 

-

2564 yield from ( 

-

2565 self._kvpair_elements[x] 

-

2566 for x in cast("Iterable[_strI]", self._kvpair_order) 

-

2567 ) 

-

2568 

-

2569 

-

2570class Deb822DuplicateFieldsParagraphElement(Deb822ParagraphElement): 

-

2571 

-

2572 def __init__(self, kvpair_elements): 

-

2573 # type: (List[Deb822KeyValuePairElement]) -> None 

-

2574 super().__init__() 

-

2575 self._kvpair_order = LinkedList() # type: LinkedList[Deb822KeyValuePairElement] 

-

2576 self._kvpair_elements = {} # type: Dict[_strI, List[KVPNode]] 

-

2577 self._init_kvpair_fields(kvpair_elements) 

-

2578 self._init_parent_of_parts() 

-

2579 

-

2580 @property 

-

2581 def has_duplicate_fields(self): 

-

2582 # type: () -> bool 

-

2583 # Most likely, the answer is "True" but if the caller "fixes" the problem 

-

2584 # then this can return "False" 

-

2585 return len(self._kvpair_order) > len(self._kvpair_elements) 

-

2586 

-

2587 def _init_kvpair_fields(self, kvpairs): 

-

2588 # type: (Iterable[Deb822KeyValuePairElement]) -> None 

-

2589 assert not self._kvpair_order 

-

2590 assert not self._kvpair_elements 

-

2591 for kv in kvpairs: 

-

2592 field_name = kv.field_name 

-

2593 node = self._kvpair_order.append(kv) 

-

2594 if field_name not in self._kvpair_elements: 

-

2595 self._kvpair_elements[field_name] = [node] 

-

2596 else: 

-

2597 self._kvpair_elements[field_name].append(node) 

-

2598 

-

2599 def _nodes_being_relocated(self, field): 

-

2600 # type: (ParagraphKey) -> Tuple[List[KVPNode], List[KVPNode]] 

-

2601 key, index, name_token = _unpack_key(field) 

-

2602 nodes = self._kvpair_elements[key] 

-

2603 nodes_being_relocated = [] 

-

2604 

-

2605 if name_token is not None or index is not None: 

-

2606 single_node = self._resolve_to_single_node(nodes, key, index, name_token) 

-

2607 assert single_node is not None 

-

2608 nodes_being_relocated.append(single_node) 

-

2609 else: 

-

2610 nodes_being_relocated = nodes 

-

2611 return nodes, nodes_being_relocated 

-

2612 

-

2613 def order_last(self, field): 

-

2614 # type: (ParagraphKey) -> None 

-

2615 """Re-order the given field so it is "last" in the paragraph""" 

-

2616 nodes, nodes_being_relocated = self._nodes_being_relocated(field) 

-

2617 assert len(nodes_being_relocated) == 1 or len(nodes) == len( 

-

2618 nodes_being_relocated 

-

2619 ) 

-

2620 

-

2621 kvpair_order = self._kvpair_order 

-

2622 for node in nodes_being_relocated: 

-

2623 if kvpair_order.tail_node is node: 

-

2624 # Special case for relocating a single node that happens to be the last. 

-

2625 continue 

-

2626 kvpair_order.remove_node(node) 

-

2627 # assertion for mypy 

-

2628 assert kvpair_order.tail_node is not None 

-

2629 kvpair_order.insert_node_after(node, kvpair_order.tail_node) 

-

2630 

-

2631 if ( 

-

2632 len(nodes_being_relocated) == 1 

-

2633 and nodes_being_relocated[0] is not nodes[-1] 

-

2634 ): 

-

2635 single_node = nodes_being_relocated[0] 

-

2636 nodes.remove(single_node) 

-

2637 nodes.append(single_node) 

-

2638 

-

2639 def order_first(self, field): 

-

2640 # type: (ParagraphKey) -> None 

-

2641 """Re-order the given field so it is "first" in the paragraph""" 

-

2642 nodes, nodes_being_relocated = self._nodes_being_relocated(field) 

-

2643 assert len(nodes_being_relocated) == 1 or len(nodes) == len( 

-

2644 nodes_being_relocated 

-

2645 ) 

-

2646 

-

2647 kvpair_order = self._kvpair_order 

-

2648 for node in nodes_being_relocated: 

-

2649 if kvpair_order.head_node is node: 

-

2650 # Special case for relocating a single node that happens to be the first. 

-

2651 continue 

-

2652 kvpair_order.remove_node(node) 

-

2653 # assertion for mypy 

-

2654 assert kvpair_order.head_node is not None 

-

2655 kvpair_order.insert_node_before(node, kvpair_order.head_node) 

-

2656 

-

2657 if len(nodes_being_relocated) == 1 and nodes_being_relocated[0] is not nodes[0]: 

-

2658 single_node = nodes_being_relocated[0] 

-

2659 nodes.remove(single_node) 

-

2660 nodes.insert(0, single_node) 

-

2661 

-

2662 def order_before(self, field, reference_field): 

-

2663 # type: (ParagraphKey, ParagraphKey) -> None 

-

2664 """Re-order the given field so appears directly after the reference field in the paragraph 

-

2665 

-

2666 The reference field must be present.""" 

-

2667 nodes, nodes_being_relocated = self._nodes_being_relocated(field) 

-

2668 assert len(nodes_being_relocated) == 1 or len(nodes) == len( 

-

2669 nodes_being_relocated 

-

2670 ) 

-

2671 # For "before" we always use the "first" variant as reference in case of doubt 

-

2672 _, reference_nodes = self._nodes_being_relocated(reference_field) 

-

2673 reference_node = reference_nodes[0] 

-

2674 if reference_node in nodes_being_relocated: 

-

2675 raise ValueError("Cannot re-order a field relative to itself") 

-

2676 

-

2677 kvpair_order = self._kvpair_order 

-

2678 for node in nodes_being_relocated: 

-

2679 kvpair_order.remove_node(node) 

-

2680 kvpair_order.insert_node_before(node, reference_node) 

-

2681 

-

2682 if len(nodes_being_relocated) == 1 and len(nodes) > 1: 

-

2683 # Regenerate the (new) relative field order. 

-

2684 field_name = nodes_being_relocated[0].value.field_name 

-

2685 self._regenerate_relative_kvapir_order(field_name) 

-

2686 

-

2687 def order_after(self, field, reference_field): 

-

2688 # type: (ParagraphKey, ParagraphKey) -> None 

-

2689 """Re-order the given field so appears directly before the reference field in the paragraph 

-

2690 

-

2691 The reference field must be present. 

-

2692 """ 

-

2693 nodes, nodes_being_relocated = self._nodes_being_relocated(field) 

-

2694 assert len(nodes_being_relocated) == 1 or len(nodes) == len( 

-

2695 nodes_being_relocated 

-

2696 ) 

-

2697 _, reference_nodes = self._nodes_being_relocated(reference_field) 

-

2698 # For "after" we always use the "last" variant as reference in case of doubt 

-

2699 reference_node = reference_nodes[-1] 

-

2700 if reference_node in nodes_being_relocated: 

-

2701 raise ValueError("Cannot re-order a field relative to itself") 

-

2702 

-

2703 kvpair_order = self._kvpair_order 

-

2704 # Use "reversed" to preserve the relative order of the nodes assuming a bulk reorder 

-

2705 for node in reversed(nodes_being_relocated): 

-

2706 kvpair_order.remove_node(node) 

-

2707 kvpair_order.insert_node_after(node, reference_node) 

-

2708 

-

2709 if len(nodes_being_relocated) == 1 and len(nodes) > 1: 

-

2710 # Regenerate the (new) relative field order. 

-

2711 field_name = nodes_being_relocated[0].value.field_name 

-

2712 self._regenerate_relative_kvapir_order(field_name) 

-

2713 

-

2714 def _regenerate_relative_kvapir_order(self, field_name): 

-

2715 # type: (_strI) -> None 

-

2716 nodes = [] 

-

2717 for node in self._kvpair_order.iter_nodes(): 

-

2718 if node.value.field_name == field_name: 

-

2719 nodes.append(node) 

-

2720 self._kvpair_elements[field_name] = nodes 

-

2721 

-

2722 def iter_parts(self): 

-

2723 # type: () -> Iterable[TokenOrElement] 

-

2724 yield from self._kvpair_order 

-

2725 

-

2726 @property 

-

2727 def kvpair_count(self): 

-

2728 # type: () -> int 

-

2729 return len(self._kvpair_order) 

-

2730 

-

2731 def iter_keys(self): 

-

2732 # type: () -> Iterable[ParagraphKey] 

-

2733 yield from (kv.field_name for kv in self._kvpair_order) 

-

2734 

-

2735 def _resolve_to_single_node( 

-

2736 self, 

-

2737 nodes, # type: List[KVPNode] 

-

2738 key, # type: str 

-

2739 index, # type: Optional[int] 

-

2740 name_token, # type: Optional[Deb822FieldNameToken] 

-

2741 use_get=False, # type: bool 

-

2742 ): 

-

2743 # type: (...) -> Optional[KVPNode] 

-

2744 if index is None: 

-

2745 if len(nodes) != 1: 

-

2746 if name_token is not None: 

-

2747 node = self._find_node_via_name_token(name_token, nodes) 

-

2748 if node is not None: 

-

2749 return node 

-

2750 msg = ( 

-

2751 "Ambiguous key {key} - the field appears {res_len} times. Use" 

-

2752 " ({key}, index) to denote which instance of the field you want. (Index" 

-

2753 " can be 0..{res_len_1} or e.g. -1 to denote the last field)" 

-

2754 ) 

-

2755 raise AmbiguousDeb822FieldKeyError( 

-

2756 msg.format(key=key, res_len=len(nodes), res_len_1=len(nodes) - 1) 

-

2757 ) 

-

2758 index = 0 

-

2759 try: 

-

2760 return nodes[index] 

-

2761 except IndexError: 

-

2762 if use_get: 

-

2763 return None 

-

2764 msg = 'Field "{key}" was present but the index "{index}" was invalid.' 

-

2765 raise KeyError(msg.format(key=key, index=index)) 

-

2766 

-

2767 def get_kvpair_element( 

-

2768 self, 

-

2769 item, # type: ParagraphKey 

-

2770 use_get=False, # type: bool 

-

2771 ): 

-

2772 # type: (...) -> Optional[Deb822KeyValuePairElement] 

-

2773 key, index, name_token = _unpack_key(item) 

-

2774 if use_get: 

-

2775 nodes = self._kvpair_elements.get(key) 

-

2776 if nodes is None: 

-

2777 return None 

-

2778 else: 

-

2779 nodes = self._kvpair_elements[key] 

-

2780 node = self._resolve_to_single_node( 

-

2781 nodes, key, index, name_token, use_get=use_get 

-

2782 ) 

-

2783 if node is not None: 

-

2784 return node.value 

-

2785 return None 

-

2786 

-

2787 @staticmethod 

-

2788 def _find_node_via_name_token( 

-

2789 name_token, # type: Deb822FieldNameToken 

-

2790 elements, # type: Iterable[KVPNode] 

-

2791 ): 

-

2792 # type: (...) -> Optional[KVPNode] 

-

2793 # if we are given a name token, then it is non-ambiguous if we have exactly 

-

2794 # that name token in our list of nodes. It will be an O(n) lookup but we 

-

2795 # probably do not have that many duplicate fields (and even if do, it is not 

-

2796 # exactly a valid file, so there little reason to optimize for it) 

-

2797 for node in elements: 

-

2798 if name_token is node.value.field_token: 

-

2799 return node 

-

2800 return None 

-

2801 

-

2802 def contains_kvpair_element(self, item): 

-

2803 # type: (object) -> bool 

-

2804 if not isinstance(item, (str, tuple, Deb822FieldNameToken)): 

-

2805 return False 

-

2806 item = cast("ParagraphKey", item) 

-

2807 try: 

-

2808 return self.get_kvpair_element(item, use_get=True) is not None 

-

2809 except AmbiguousDeb822FieldKeyError: 

-

2810 return True 

-

2811 

-

2812 def set_kvpair_element(self, key, value): 

-

2813 # type: (ParagraphKey, Deb822KeyValuePairElement) -> None 

-

2814 key, index, name_token = _unpack_key(key) 

-

2815 if name_token: 

-

2816 if name_token is not value.field_token: 

-

2817 original_nodes = self._kvpair_elements.get(value.field_name) 

-

2818 original_node = None 

-

2819 if original_nodes is not None: 

-

2820 original_node = self._find_node_via_name_token( 

-

2821 name_token, original_nodes 

-

2822 ) 

-

2823 

-

2824 if original_node is None: 

-

2825 raise ValueError( 

-

2826 "Key is a Deb822FieldNameToken, but not *the*" 

-

2827 " Deb822FieldNameToken for the value nor the" 

-

2828 " Deb822FieldNameToken for an existing field in the paragraph" 

-

2829 ) 

-

2830 # Primarily for mypy's sake 

-

2831 assert original_nodes is not None 

-

2832 # Rely on the index-based code below to handle update. 

-

2833 index = original_nodes.index(original_node) 

-

2834 key = value.field_name 

-

2835 else: 

-

2836 if key != value.field_name: 

-

2837 raise ValueError( 

-

2838 "Cannot insert value under a different field value than field name" 

-

2839 " from its Deb822FieldNameToken implies" 

-

2840 ) 

-

2841 # Use the string from the Deb822FieldNameToken as it is a _strI and has the same value 

-

2842 # (memory optimization) 

-

2843 key = value.field_name 

-

2844 self._full_size_cache = None 

-

2845 original_nodes = self._kvpair_elements.get(key) 

-

2846 if original_nodes is None or not original_nodes: 

-

2847 if index is not None and index != 0: 

-

2848 msg = ( 

-

2849 "Cannot replace field ({key}, {index}) as the field does not exist" 

-

2850 " in the first place. Please index-less key or ({key}, 0) if you" 

-

2851 " want to add the field." 

-

2852 ) 

-

2853 raise KeyError(msg.format(key=key, index=index)) 

-

2854 node = self._kvpair_order.append(value) 

-

2855 if key not in self._kvpair_elements: 

-

2856 self._kvpair_elements[key] = [node] 

-

2857 else: 

-

2858 self._kvpair_elements[key].append(node) 

-

2859 return 

-

2860 

-

2861 replace_all = False 

-

2862 if index is None: 

-

2863 replace_all = True 

-

2864 node = original_nodes[0] 

-

2865 if len(original_nodes) != 1: 

-

2866 self._kvpair_elements[key] = [node] 

-

2867 else: 

-

2868 # We insist on there being an original node, which as a side effect ensures 

-

2869 # you cannot add additional copies of the field. This means that you cannot 

-

2870 # make the problem worse. 

-

2871 node = original_nodes[index] 

-

2872 

-

2873 # Replace the value of the existing node plus do a little dance 

-

2874 # for the parent element part. 

-

2875 node.value.parent_element = None 

-

2876 value.parent_element = self 

-

2877 node.value = value 

-

2878 

-

2879 if replace_all and len(original_nodes) != 1: 

-

2880 # If we were in a replace-all mode, discard any remaining nodes 

-

2881 for n in original_nodes[1:]: 

-

2882 n.value.parent_element = None 

-

2883 self._kvpair_order.remove_node(n) 

-

2884 

-

2885 def remove_kvpair_element(self, key): 

-

2886 # type: (ParagraphKey) -> None 

-

2887 key, idx, name_token = _unpack_key(key) 

-

2888 field_list = self._kvpair_elements[key] 

-

2889 

-

2890 if name_token is None and idx is None: 

-

2891 self._full_size_cache = None 

-

2892 # Remove all case 

-

2893 for node in field_list: 

-

2894 node.value.parent_element = None 

-

2895 self._kvpair_order.remove_node(node) 

-

2896 del self._kvpair_elements[key] 

-

2897 return 

-

2898 

-

2899 if name_token is not None: 

-

2900 # Indirection between original_node and node for mypy's sake 

-

2901 original_node = self._find_node_via_name_token(name_token, field_list) 

-

2902 if original_node is None: 

-

2903 msg = 'The field "{key}" is present but key used to access it is not.' 

-

2904 raise KeyError(msg.format(key=key)) 

-

2905 node = original_node 

-

2906 else: 

-

2907 assert idx is not None 

-

2908 try: 

-

2909 node = field_list[idx] 

-

2910 except KeyError: 

-

2911 msg = 'The field "{key}" is present, but the index "{idx}" was invalid.' 

-

2912 raise KeyError(msg.format(key=key, idx=idx)) 

-

2913 

-

2914 self._full_size_cache = None 

-

2915 if len(field_list) == 1: 

-

2916 del self._kvpair_elements[key] 

-

2917 else: 

-

2918 field_list.remove(node) 

-

2919 node.value.parent_element = None 

-

2920 self._kvpair_order.remove_node(node) 

-

2921 

-

2922 def sort_fields(self, key=None): 

-

2923 # type: (Optional[Callable[[str], Any]]) -> None 

-

2924 """Re-order all fields 

-

2925 

-

2926 :param key: Provide a key function (same semantics as for sorted). Keep in mind that 

-

2927 the module preserve the cases for field names - in generally, callers are recommended 

-

2928 to use "lower()" to normalize the case. 

-

2929 """ 

-

2930 

-

2931 if key is None: 

-

2932 key = default_field_sort_key 

-

2933 

-

2934 # Work around mypy that cannot seem to shred the Optional notion 

-

2935 # without this little indirection 

-

2936 key_impl = key 

-

2937 

-

2938 def _actual_key(kvpair): 

-

2939 # type: (Deb822KeyValuePairElement) -> Any 

-

2940 return key_impl(kvpair.field_name) 

-

2941 

-

2942 for last_kvpair in reversed(self._kvpair_order): 

-

2943 if last_kvpair.value_element.add_final_newline_if_missing(): 

-

2944 self._full_size_cache = None 

-

2945 break 

-

2946 

-

2947 sorted_kvpair_list = sorted(self._kvpair_order, key=_actual_key) 

-

2948 self._kvpair_order = LinkedList() 

-

2949 self._kvpair_elements = {} 

-

2950 self._init_kvpair_fields(sorted_kvpair_list) 

-

2951 

-

2952 

-

2953class Deb822FileElement(Deb822Element): 

-

2954 """Represents the entire deb822 file""" 

-

2955 

-

2956 def __init__(self, token_and_elements): 

-

2957 # type: (LinkedList[TokenOrElement]) -> None 

-

2958 super().__init__() 

-

2959 self._token_and_elements = token_and_elements 

-

2960 self._init_parent_of_parts() 

-

2961 

-

2962 @classmethod 

-

2963 def new_empty_file(cls): 

-

2964 # type: () -> Deb822FileElement 

-

2965 """Creates a new Deb822FileElement with no contents 

-

2966 

-

2967 Note that a deb822 file must be non-empty to be considered valid 

-

2968 """ 

-

2969 return cls(LinkedList()) 

-

2970 

-

2971 @property 

-

2972 def is_valid_file(self): 

-

2973 # type: () -> bool 

-

2974 """Returns true if the file is valid 

-

2975 

-

2976 Invalid elements include error elements (Deb822ErrorElement) but also 

-

2977 issues such as paragraphs with duplicate fields or "empty" files 

-

2978 (a valid deb822 file contains at least one paragraph). 

-

2979 """ 

-

2980 had_paragraph = False 

-

2981 for paragraph in self: 

-

2982 had_paragraph = True 

-

2983 if not paragraph or paragraph.has_duplicate_fields: 

-

2984 return False 

-

2985 

-

2986 if not had_paragraph: 

-

2987 return False 

-

2988 

-

2989 return self.find_first_error_element() is None 

-

2990 

-

2991 def find_first_error_element(self): 

-

2992 # type: () -> Optional[Deb822ErrorElement] 

-

2993 """Returns the first Deb822ErrorElement (or None) in the file""" 

-

2994 return next( 

-

2995 iter(self.iter_recurse(only_element_or_token_type=Deb822ErrorElement)), None 

-

2996 ) 

-

2997 

-

2998 def __iter__(self): 

-

2999 # type: () -> Iterator[Deb822ParagraphElement] 

-

3000 return iter(self.iter_parts_of_type(Deb822ParagraphElement)) 

-

3001 

-

3002 def iter_parts(self): 

-

3003 # type: () -> Iterable[TokenOrElement] 

-

3004 yield from self._token_and_elements 

-

3005 

-

3006 def insert(self, idx, para): 

-

3007 # type: (int, Deb822ParagraphElement) -> None 

-

3008 """Inserts a paragraph into the file at the given "index" of paragraphs 

-

3009 

-

3010 Note that if the index is between two paragraphs containing a "free 

-

3011 floating" comment (e.g. paragraph/start-of-file, empty line, comment, 

-

3012 empty line, paragraph) then it is unspecified which "side" of the 

-

3013 comment the new paragraph will appear and this may change between 

-

3014 versions of python-debian. 

-

3015 

-

3016 

-

3017 >>> original = ''' 

-

3018 ... Package: libfoo-dev 

-

3019 ... Depends: libfoo1 (= ${binary:Version}), ${shlib:Depends}, ${misc:Depends} 

-

3020 ... '''.lstrip() 

-

3021 >>> deb822_file = parse_deb822_file(original.splitlines()) 

-

3022 >>> para1 = Deb822ParagraphElement.new_empty_paragraph() 

-

3023 >>> para1["Source"] = "foo" 

-

3024 >>> para1["Build-Depends"] = "debhelper-compat (= 13)" 

-

3025 >>> para2 = Deb822ParagraphElement.new_empty_paragraph() 

-

3026 >>> para2["Package"] = "libfoo1" 

-

3027 >>> para2["Depends"] = "${shlib:Depends}, ${misc:Depends}" 

-

3028 >>> deb822_file.insert(0, para1) 

-

3029 >>> deb822_file.insert(1, para2) 

-

3030 >>> expected = ''' 

-

3031 ... Source: foo 

-

3032 ... Build-Depends: debhelper-compat (= 13) 

-

3033 ... 

-

3034 ... Package: libfoo1 

-

3035 ... Depends: ${shlib:Depends}, ${misc:Depends} 

-

3036 ... 

-

3037 ... Package: libfoo-dev 

-

3038 ... Depends: libfoo1 (= ${binary:Version}), ${shlib:Depends}, ${misc:Depends} 

-

3039 ... '''.lstrip() 

-

3040 >>> deb822_file.dump() == expected 

-

3041 True 

-

3042 """ 

-

3043 

-

3044 anchor_node = None 

-

3045 needs_newline = True 

-

3046 self._full_size_cache = None 

-

3047 if idx == 0: 

-

3048 # Special-case, if idx is 0, then we insert it before everything else. 

-

3049 # This is mostly a cosmetic choice for corner cases involving free-floating 

-

3050 # comments in the file. 

-

3051 if not self._token_and_elements: 3051 ↛ 3052line 3051 didn't jump to line 3052, because the condition on line 3051 was never true

-

3052 self.append(para) 

-

3053 return 

-

3054 anchor_node = self._token_and_elements.head_node 

-

3055 needs_newline = bool(self._token_and_elements) 

-

3056 else: 

-

3057 i = 0 

-

3058 for node in self._token_and_elements.iter_nodes(): 3058 ↛ 3066line 3058 didn't jump to line 3066, because the loop on line 3058 didn't complete

-

3059 entry = node.value 

-

3060 if isinstance(entry, Deb822ParagraphElement): 

-

3061 i += 1 

-

3062 if idx == i - 1: 

-

3063 anchor_node = node 

-

3064 break 

-

3065 

-

3066 if anchor_node is None: 3066 ↛ 3068line 3066 didn't jump to line 3068, because the condition on line 3066 was never true

-

3067 # Empty list or idx after the last paragraph both degenerate into append 

-

3068 self.append(para) 

-

3069 else: 

-

3070 if needs_newline: 3070 ↛ 3076line 3070 didn't jump to line 3076, because the condition on line 3070 was never false

-

3071 # Remember to inject the "separating" newline between two paragraphs 

-

3072 nl_token = self._set_parent(Deb822WhitespaceToken("\n")) 

-

3073 anchor_node = self._token_and_elements.insert_before( 

-

3074 nl_token, anchor_node 

-

3075 ) 

-

3076 self._token_and_elements.insert_before(self._set_parent(para), anchor_node) 

-

3077 

-

3078 def append(self, paragraph): 

-

3079 # type: (Deb822ParagraphElement) -> None 

-

3080 """Appends a paragraph to the file 

-

3081 

-

3082 >>> deb822_file = Deb822FileElement.new_empty_file() 

-

3083 >>> para1 = Deb822ParagraphElement.new_empty_paragraph() 

-

3084 >>> para1["Source"] = "foo" 

-

3085 >>> para1["Build-Depends"] = "debhelper-compat (= 13)" 

-

3086 >>> para2 = Deb822ParagraphElement.new_empty_paragraph() 

-

3087 >>> para2["Package"] = "foo" 

-

3088 >>> para2["Depends"] = "${shlib:Depends}, ${misc:Depends}" 

-

3089 >>> deb822_file.append(para1) 

-

3090 >>> deb822_file.append(para2) 

-

3091 >>> expected = ''' 

-

3092 ... Source: foo 

-

3093 ... Build-Depends: debhelper-compat (= 13) 

-

3094 ... 

-

3095 ... Package: foo 

-

3096 ... Depends: ${shlib:Depends}, ${misc:Depends} 

-

3097 ... '''.lstrip() 

-

3098 >>> deb822_file.dump() == expected 

-

3099 True 

-

3100 """ 

-

3101 tail_element = self._token_and_elements.tail 

-

3102 if paragraph.parent_element is not None: 3102 ↛ 3103line 3102 didn't jump to line 3103, because the condition on line 3102 was never true

-

3103 if paragraph.parent_element is self: 

-

3104 raise ValueError("Paragraph is already a part of this file") 

-

3105 raise ValueError("Paragraph is already part of another Deb822File") 

-

3106 

-

3107 self._full_size_cache = None 

-

3108 # We need a separating newline if there is not a whitespace token at the end of the file. 

-

3109 # Note the special case where the file ends on a comment; here we insert a whitespace too 

-

3110 # to be sure. Otherwise, we would have to check that there is an empty line before that 

-

3111 # comment and that is too much effort. 

-

3112 if tail_element and not isinstance(tail_element, Deb822WhitespaceToken): 

-

3113 self._token_and_elements.append( 

-

3114 self._set_parent(Deb822WhitespaceToken("\n")) 

-

3115 ) 

-

3116 self._token_and_elements.append(self._set_parent(paragraph)) 

-

3117 

-

3118 def remove(self, paragraph): 

-

3119 # type: (Deb822ParagraphElement) -> None 

-

3120 if paragraph.parent_element is not self: 

-

3121 raise ValueError("Paragraph is part of a different file") 

-

3122 node = None 

-

3123 for node in self._token_and_elements.iter_nodes(): 

-

3124 if node.value is paragraph: 

-

3125 break 

-

3126 if node is None: 

-

3127 raise RuntimeError("unable to find paragraph") 

-

3128 self._full_size_cache = None 

-

3129 previous_node = node.previous_node 

-

3130 next_node = node.next_node 

-

3131 self._token_and_elements.remove_node(node) 

-

3132 if next_node is None: 

-

3133 if previous_node and isinstance(previous_node.value, Deb822WhitespaceToken): 

-

3134 self._token_and_elements.remove_node(previous_node) 

-

3135 else: 

-

3136 if isinstance(next_node.value, Deb822WhitespaceToken): 

-

3137 self._token_and_elements.remove_node(next_node) 

-

3138 paragraph.parent_element = None 

-

3139 

-

3140 def _set_parent(self, t): 

-

3141 # type: (TE) -> TE 

-

3142 t.parent_element = self 

-

3143 return t 

-

3144 

-

3145 def position_in_parent(self, *, skip_leading_comments: bool = True) -> Position: 

-

3146 # Recursive base-case 

-

3147 return START_POSITION 

-

3148 

-

3149 def position_in_file(self, *, skip_leading_comments: bool = True) -> Position: 

-

3150 # By definition 

-

3151 return START_POSITION 

-

3152 

-

3153 @overload 

-

3154 def dump( 

-

3155 self, fd # type: IO[bytes] 

-

3156 ): 

-

3157 # type: (...) -> None 

-

3158 pass 

-

3159 

-

3160 @overload 

-

3161 def dump(self): 

-

3162 # type: () -> str 

-

3163 pass 

-

3164 

-

3165 def dump( 

-

3166 self, fd=None # type: Optional[IO[bytes]] 

-

3167 ): 

-

3168 # type: (...) -> Optional[str] 

-

3169 if fd is None: 3169 ↛ 3171line 3169 didn't jump to line 3171, because the condition on line 3169 was never false

-

3170 return "".join(t.text for t in self.iter_tokens()) 

-

3171 for token in self.iter_tokens(): 

-

3172 fd.write(token.text.encode("utf-8")) 

-

3173 return None 

-

3174 

-

3175 

-

3176_combine_error_tokens_into_elements = combine_into_replacement( 

-

3177 Deb822ErrorToken, Deb822ErrorElement 

-

3178) 

-

3179_combine_comment_tokens_into_elements = combine_into_replacement( 

-

3180 Deb822CommentToken, Deb822CommentElement 

-

3181) 

-

3182_combine_vl_elements_into_value_elements = combine_into_replacement( 

-

3183 Deb822ValueLineElement, Deb822ValueElement 

-

3184) 

-

3185_combine_kvp_elements_into_paragraphs = combine_into_replacement( 

-

3186 Deb822KeyValuePairElement, 

-

3187 Deb822ParagraphElement, 

-

3188 constructor=Deb822ParagraphElement.from_kvpairs, 

-

3189) 

-

3190 

-

3191 

-

3192def _parsed_value_render_factory(discard_comments): 

-

3193 # type: (bool) -> Callable[[Deb822ParsedValueElement], str] 

-

3194 return ( 

-

3195 Deb822ParsedValueElement.convert_to_text_without_comments 

-

3196 if discard_comments 

-

3197 else Deb822ParsedValueElement.convert_to_text 

-

3198 ) 

-

3199 

-

3200 

-

3201LIST_SPACE_SEPARATED_INTERPRETATION = ListInterpretation( 

-

3202 whitespace_split_tokenizer, 

-

3203 _parse_whitespace_list_value, 

-

3204 Deb822ParsedValueElement, 

-

3205 Deb822SemanticallySignificantWhiteSpace, 

-

3206 lambda: Deb822SpaceSeparatorToken(" "), 

-

3207 _parsed_value_render_factory, 

-

3208) 

-

3209LIST_COMMA_SEPARATED_INTERPRETATION = ListInterpretation( 

-

3210 comma_split_tokenizer, 

-

3211 _parse_comma_list_value, 

-

3212 Deb822ParsedValueElement, 

-

3213 Deb822CommaToken, 

-

3214 Deb822CommaToken, 

-

3215 _parsed_value_render_factory, 

-

3216) 

-

3217LIST_UPLOADERS_INTERPRETATION = ListInterpretation( 

-

3218 comma_split_tokenizer, 

-

3219 _parse_uploaders_list_value, 

-

3220 Deb822ParsedValueElement, 

-

3221 Deb822CommaToken, 

-

3222 Deb822CommaToken, 

-

3223 _parsed_value_render_factory, 

-

3224) 

-

3225 

-

3226 

-

3227def _non_end_of_line_token(v): 

-

3228 # type: (TokenOrElement) -> bool 

-

3229 # Consume tokens until the newline 

-

3230 return not isinstance(v, Deb822WhitespaceToken) or v.text != "\n" 

-

3231 

-

3232 

-

3233def _build_value_line( 

-

3234 token_stream, # type: Iterable[Union[TokenOrElement, Deb822CommentElement]] 

-

3235): 

-

3236 # type: (...) -> Iterable[Union[TokenOrElement, Deb822ValueLineElement]] 

-

3237 """Parser helper - consumes tokens part of a Deb822ValueEntryElement and turns them into one""" 

-

3238 buffered_stream = BufferingIterator(token_stream) 

-

3239 

-

3240 # Deb822ValueLineElement is a bit tricky because of how we handle whitespace 

-

3241 # and comments. 

-

3242 # 

-

3243 # In relation to comments, then only continuation lines can have comments. 

-

3244 # If there is a comment before a "K: V" line, then the comment is associated 

-

3245 # with the field rather than the value. 

-

3246 # 

-

3247 # On the whitespace front, then we separate syntactical mandatory whitespace 

-

3248 # from optional whitespace. As an example: 

-

3249 # 

-

3250 # """ 

-

3251 # # some comment associated with the Depends field 

-

3252 # Depends:_foo_$ 

-

3253 # # some comment associated with the line containing "bar" 

-

3254 # !________bar_$ 

-

3255 # """ 

-

3256 # 

-

3257 # Where "$" and "!" represents mandatory whitespace (the newline and the first 

-

3258 # space are required for the file to be parsed correctly), where as "_" is 

-

3259 # "optional" whitespace (from a syntactical point of view). 

-

3260 # 

-

3261 # This distinction enable us to facilitate APIs for easy removal/normalization 

-

3262 # of redundant whitespaces without having programmers worry about trashing 

-

3263 # the file. 

-

3264 # 

-

3265 # 

-

3266 

-

3267 comment_element = None 

-

3268 continuation_line_token = None 

-

3269 token = None # type: Optional[TokenOrElement] 

-

3270 

-

3271 for token in buffered_stream: 

-

3272 start_of_value_entry = False 

-

3273 if isinstance(token, Deb822ValueContinuationToken): 

-

3274 continuation_line_token = token 

-

3275 start_of_value_entry = True 

-

3276 token = None 

-

3277 elif isinstance(token, Deb822FieldSeparatorToken): 

-

3278 start_of_value_entry = True 

-

3279 elif isinstance(token, Deb822CommentElement): 

-

3280 next_token = buffered_stream.peek() 

-

3281 # If the next token is a continuation line token, then this comment 

-

3282 # belong to a value and we might as well just start the value 

-

3283 # parsing now. 

-

3284 # 

-

3285 # Note that we rely on this behaviour to avoid emitting the comment 

-

3286 # token (failing to do so would cause the comment to appear twice 

-

3287 # in the file). 

-

3288 if isinstance(next_token, Deb822ValueContinuationToken): 

-

3289 start_of_value_entry = True 

-

3290 comment_element = token 

-

3291 token = None 

-

3292 # Use next with None to avoid raising StopIteration inside a generator 

-

3293 # It won't happen, but pylint cannot see that, so we do this instead. 

-

3294 continuation_line_token = cast( 

-

3295 "Deb822ValueContinuationToken", next(buffered_stream, None) 

-

3296 ) 

-

3297 assert continuation_line_token is not None 

-

3298 

-

3299 if token is not None: 

-

3300 yield token 

-

3301 if start_of_value_entry: 

-

3302 tokens_in_value = list(buffered_stream.takewhile(_non_end_of_line_token)) 

-

3303 eol_token = cast("Deb822WhitespaceToken", next(buffered_stream, None)) 

-

3304 assert eol_token is None or eol_token.text == "\n" 

-

3305 leading_whitespace = None 

-

3306 trailing_whitespace = None 

-

3307 # "Depends:\n foo" would cause tokens_in_value to be empty for the 

-

3308 # first "value line" (the empty part between ":" and "\n") 

-

3309 if tokens_in_value: 3309 ↛ 3323line 3309 didn't jump to line 3323, because the condition on line 3309 was never false

-

3310 # Another special-case, "Depends: \n foo" (i.e. space after colon) 

-

3311 # should not introduce an IndexError 

-

3312 if isinstance(tokens_in_value[-1], Deb822WhitespaceToken): 

-

3313 trailing_whitespace = cast( 

-

3314 "Deb822WhitespaceToken", tokens_in_value.pop() 

-

3315 ) 

-

3316 if tokens_in_value and isinstance( 3316 ↛ 3319line 3316 didn't jump to line 3319, because the condition on line 3316 was never true

-

3317 tokens_in_value[-1], Deb822WhitespaceToken 

-

3318 ): 

-

3319 leading_whitespace = cast( 

-

3320 "Deb822WhitespaceToken", tokens_in_value[0] 

-

3321 ) 

-

3322 tokens_in_value = tokens_in_value[1:] 

-

3323 yield Deb822ValueLineElement( 

-

3324 comment_element, 

-

3325 continuation_line_token, 

-

3326 leading_whitespace, 

-

3327 tokens_in_value, 

-

3328 trailing_whitespace, 

-

3329 eol_token, 

-

3330 ) 

-

3331 comment_element = None 

-

3332 continuation_line_token = None 

-

3333 

-

3334 

-

3335def _build_field_with_value( 

-

3336 token_stream, # type: Iterable[Union[TokenOrElement, Deb822ValueElement]] 

-

3337): 

-

3338 # type: (...) -> Iterable[Union[TokenOrElement, Deb822KeyValuePairElement]] 

-

3339 buffered_stream = BufferingIterator(token_stream) 

-

3340 for token_or_element in buffered_stream: 

-

3341 start_of_field = False 

-

3342 comment_element = None 

-

3343 if isinstance(token_or_element, Deb822FieldNameToken): 

-

3344 start_of_field = True 

-

3345 elif isinstance(token_or_element, Deb822CommentElement): 

-

3346 comment_element = token_or_element 

-

3347 next_token = buffered_stream.peek() 

-

3348 start_of_field = isinstance(next_token, Deb822FieldNameToken) 

-

3349 if start_of_field: 3349 ↛ 3356line 3349 didn't jump to line 3356, because the condition on line 3349 was never false

-

3350 # Remember to consume the field token 

-

3351 try: 

-

3352 token_or_element = next(buffered_stream) 

-

3353 except StopIteration: # pragma: no cover 

-

3354 raise AssertionError 

-

3355 

-

3356 if start_of_field: 

-

3357 field_name = token_or_element 

-

3358 separator = next(buffered_stream, None) 

-

3359 value_element = next(buffered_stream, None) 

-

3360 if separator is None or value_element is None: 3360 ↛ 3363line 3360 didn't jump to line 3363, because the condition on line 3360 was never true

-

3361 # Early EOF - should not be possible with how the tokenizer works 

-

3362 # right now, but now it is future-proof. 

-

3363 if comment_element: 

-

3364 yield comment_element 

-

3365 error_elements = [field_name] 

-

3366 if separator is not None: 

-

3367 error_elements.append(separator) 

-

3368 yield Deb822ErrorElement(error_elements) 

-

3369 return 

-

3370 

-

3371 if isinstance(separator, Deb822FieldSeparatorToken) and isinstance( 3371 ↛ 3382line 3371 didn't jump to line 3382, because the condition on line 3371 was never false

-

3372 value_element, Deb822ValueElement 

-

3373 ): 

-

3374 yield Deb822KeyValuePairElement( 

-

3375 comment_element, 

-

3376 cast("Deb822FieldNameToken", field_name), 

-

3377 separator, 

-

3378 value_element, 

-

3379 ) 

-

3380 else: 

-

3381 # We had a parse error, consume until the newline. 

-

3382 error_tokens = [token_or_element] # type: List[TokenOrElement] 

-

3383 error_tokens.extend(buffered_stream.takewhile(_non_end_of_line_token)) 

-

3384 nl = buffered_stream.peek() 

-

3385 # Take the newline as well if present 

-

3386 if nl and isinstance(nl, Deb822NewlineAfterValueToken): 

-

3387 next(buffered_stream, None) 

-

3388 error_tokens.append(nl) 

-

3389 yield Deb822ErrorElement(error_tokens) 

-

3390 else: 

-

3391 # Token is not part of a field, emit it as-is 

-

3392 yield token_or_element 

-

3393 

-

3394 

-

3395def _abort_on_error_tokens(sequence): 

-

3396 # type: (Iterable[TokenOrElement]) -> Iterable[TokenOrElement] 

-

3397 line_no = 1 

-

3398 for token in sequence: 

-

3399 # We are always called while the sequence consists entirely of tokens 

-

3400 if token.is_error: 3400 ↛ 3401line 3400 didn't jump to line 3401, because the condition on line 3400 was never true

-

3401 error_as_text = token.convert_to_text().replace("\n", "\\n") 

-

3402 raise SyntaxOrParseError( 

-

3403 'Syntax or Parse error on or near line {line_no}: "{error_as_text}"'.format( 

-

3404 error_as_text=error_as_text, line_no=line_no 

-

3405 ) 

-

3406 ) 

-

3407 line_no += token.convert_to_text().count("\n") 

-

3408 yield token 

-

3409 

-

3410 

-

3411def parse_deb822_file( 

-

3412 sequence, # type: Union[Iterable[Union[str, bytes]], str] 

-

3413 *, 

-

3414 accept_files_with_error_tokens=False, # type: bool 

-

3415 accept_files_with_duplicated_fields=False, # type: bool 

-

3416 encoding="utf-8", # type: str 

-

3417): 

-

3418 # type: (...) -> Deb822FileElement 

-

3419 """ 

-

3420 

-

3421 :param sequence: An iterable over lines of str or bytes (an open file for 

-

3422 reading will do). If line endings are provided in the input, then they 

-

3423 must be present on every line (except the last) will be preserved as-is. 

-

3424 If omitted and the content is at least 2 lines, then parser will assume 

-

3425 implicit newlines. 

-

3426 :param accept_files_with_error_tokens: If True, files with critical syntax 

-

3427 or parse errors will be returned as "successfully" parsed. Usually, 

-

3428 working on files with this kind of errors are not desirable as it is 

-

3429 hard to make sense of such files (and they might in fact not be a deb822 

-

3430 file at all). When set to False (the default) a ValueError is raised if 

-

3431 there is a critical syntax or parse error. 

-

3432 Note that duplicated fields in a paragraph is not considered a critical 

-

3433 parse error by this parser as the implementation can gracefully cope 

-

3434 with these. Use accept_files_with_duplicated_fields to determine if 

-

3435 such files should be accepted. 

-

3436 :param accept_files_with_duplicated_fields: If True, then 

-

3437 files containing paragraphs with duplicated fields will be returned as 

-

3438 "successfully" parsed even though they are invalid according to the 

-

3439 specification. The paragraphs will prefer the first appearance of the 

-

3440 field unless caller explicitly requests otherwise (e.g., via 

-

3441 Deb822ParagraphElement.configured_view). If False, then this method 

-

3442 will raise a ValueError if any duplicated fields are seen inside any 

-

3443 paragraph. 

-

3444 :param encoding: The encoding to use (this is here to support Deb822-like 

-

3445 APIs, new code should not use this parameter). 

-

3446 """ 

-

3447 

-

3448 if isinstance(sequence, (str, bytes)): 3448 ↛ 3450line 3448 didn't jump to line 3450, because the condition on line 3448 was never true

-

3449 # Match the deb822 API. 

-

3450 sequence = sequence.splitlines(True) 

-

3451 

-

3452 # The order of operations are important here. As an example, 

-

3453 # _build_value_line assumes that all comment tokens have been merged 

-

3454 # into comment elements. Likewise, _build_field_and_value assumes 

-

3455 # that value tokens (along with their comments) have been combined 

-

3456 # into elements. 

-

3457 tokens = tokenize_deb822_file( 

-

3458 sequence, encoding=encoding 

-

3459 ) # type: Iterable[TokenOrElement] 

-

3460 if not accept_files_with_error_tokens: 

-

3461 tokens = _abort_on_error_tokens(tokens) 

-

3462 tokens = _combine_comment_tokens_into_elements(tokens) 

-

3463 tokens = _build_value_line(tokens) 

-

3464 tokens = _combine_vl_elements_into_value_elements(tokens) 

-

3465 tokens = _build_field_with_value(tokens) 

-

3466 tokens = _combine_kvp_elements_into_paragraphs(tokens) 

-

3467 # Combine any free-floating error tokens into error elements. We do 

-

3468 # this last as it enables other parts of the parser to include error 

-

3469 # tokens in their error elements if they discover something is wrong. 

-

3470 tokens = _combine_error_tokens_into_elements(tokens) 

-

3471 

-

3472 deb822_file = Deb822FileElement(LinkedList(tokens)) 

-

3473 

-

3474 if not accept_files_with_duplicated_fields: 

-

3475 for no, paragraph in enumerate(deb822_file): 

-

3476 if isinstance(paragraph, Deb822DuplicateFieldsParagraphElement): 3476 ↛ 3477line 3476 didn't jump to line 3477, because the condition on line 3476 was never true

-

3477 field_names = set() 

-

3478 dup_field = None 

-

3479 for field in paragraph.keys(): 

-

3480 field_name, _, _ = _unpack_key(field) 

-

3481 # assert for mypy 

-

3482 assert isinstance(field_name, str) 

-

3483 if field_name in field_names: 

-

3484 dup_field = field_name 

-

3485 break 

-

3486 field_names.add(field_name) 

-

3487 if dup_field is not None: 

-

3488 msg = 'Duplicate field "{dup_field}" in paragraph number {no}' 

-

3489 raise ValueError(msg.format(dup_field=dup_field, no=no)) 

-

3490 

-

3491 return deb822_file 

-

3492 

-

3493 

-

3494if __name__ == "__main__": # pragma: no cover 

-

3495 import doctest 

-

3496 

-

3497 doctest.testmod() 

-
- - - diff --git a/coverage-report/d_e9c451f4ae334f76_tokens_py.html b/coverage-report/d_e9c451f4ae334f76_tokens_py.html deleted file mode 100644 index 0df9eb4..0000000 --- a/coverage-report/d_e9c451f4ae334f76_tokens_py.html +++ /dev/null @@ -1,615 +0,0 @@ - - - - - Coverage for src/debputy/lsp/vendoring/_deb822_repro/tokens.py: 83% - - - - - -
-
-

- Coverage for src/debputy/lsp/vendoring/_deb822_repro/tokens.py: - 83% -

- -

- 230 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1import re 

-

2import sys 

-

3import weakref 

-

4from weakref import ReferenceType 

-

5 

-

6from ._util import BufferingIterator 

-

7from .locatable import ( 

-

8 Locatable, 

-

9 START_POSITION, 

-

10 Range, 

-

11 ONE_CHAR_RANGE, 

-

12 ONE_LINE_RANGE, 

-

13 Position, 

-

14) 

-

15from debian._util import resolve_ref, _strI 

-

16 

-

17try: 

-

18 from typing import Optional, cast, TYPE_CHECKING, Iterable, Union, Dict, Callable 

-

19except ImportError: 

-

20 # pylint: disable=unnecessary-lambda-assignment 

-

21 TYPE_CHECKING = False 

-

22 cast = lambda t, v: v 

-

23 

-

24if TYPE_CHECKING: 

-

25 from .parsing import Deb822Element 

-

26 

-

27 

-

28# Consume whitespace and a single word. 

-

29_RE_WHITESPACE_SEPARATED_WORD_LIST = re.compile( 

-

30 r""" 

-

31 (?P<space_before>\s*) # Consume any whitespace before the word 

-

32 # The space only occurs in practise if the line starts 

-

33 # with space. 

-

34 

-

35 # Optionally consume a word (needed to handle the case 

-

36 # when there are no words left and someone applies this 

-

37 # pattern to the remaining text). This is mostly here as 

-

38 # a fail-safe. 

-

39 

-

40 (?P<word>\S+) # Consume the word (if present) 

-

41 (?P<trailing_whitespace>\s*) # Consume trailing whitespace 

-

42""", 

-

43 re.VERBOSE, 

-

44) 

-

45_RE_COMMA_SEPARATED_WORD_LIST = re.compile( 

-

46 r""" 

-

47 # This regex is slightly complicated by the fact that it should work with 

-

48 # finditer and consume the entire value. 

-

49 # 

-

50 # To do this, we structure the regex so it always starts on a comma (except 

-

51 # for the first iteration, where we permit the absence of a comma) 

-

52 

-

53 (?: # Optional space followed by a mandatory comma unless 

-

54 # it is the start of the "line" (in which case, we 

-

55 # allow the comma to be omitted) 

-

56 ^ 

-

57 | 

-

58 (?: 

-

59 (?P<space_before_comma>\s*) # This space only occurs in practise if the line 

-

60 # starts with space + comma. 

-

61 (?P<comma> ,) 

-

62 ) 

-

63 ) 

-

64 

-

65 # From here it is "optional space, maybe a word and then optional space" again. One reason why 

-

66 # all of it is optional is to gracefully cope with trailing commas. 

-

67 (?P<space_before_word>\s*) 

-

68 (?P<word> [^,\s] (?: [^,]*[^,\s])? )? # "Words" can contain spaces for comma separated list. 

-

69 # But surrounding whitespace is ignored 

-

70 (?P<space_after_word>\s*) 

-

71""", 

-

72 re.VERBOSE, 

-

73) 

-

74 

-

75# From Policy 5.1: 

-

76# 

-

77# The field name is composed of US-ASCII characters excluding control 

-

78# characters, space, and colon (i.e., characters in the ranges U+0021 

-

79# (!) through U+0039 (9), and U+003B (;) through U+007E (~), 

-

80# inclusive). Field names must not begin with the comment character 

-

81# (U+0023 #), nor with the hyphen character (U+002D -). 

-

82# 

-

83# That combines to this regex of questionable readability 

-

84_RE_FIELD_LINE = re.compile( 

-

85 r""" 

-

86 ^ # Start of line 

-

87 (?P<field_name> # Capture group for the field name 

-

88 [\x21\x22\x24-\x2C\x2F-\x39\x3B-\x7F] # First character 

-

89 [\x21-\x39\x3B-\x7F]* # Subsequent characters (if any) 

-

90 ) 

-

91 (?P<separator> : ) 

-

92 (?P<space_before_value> \s* ) 

-

93 (?: # Field values are not mandatory on the same line 

-

94 # as the field name. 

-

95 

-

96 (?P<value> \S(?:.*\S)? ) # Values must start and end on a "non-space" 

-

97 (?P<space_after_value> \s* ) # We can have optional space after the value 

-

98 )? 

-

99""", 

-

100 re.VERBOSE, 

-

101) 

-

102 

-

103 

-

104class Deb822Token(Locatable): 

-

105 """A token is an atomic syntactical element from a deb822 file 

-

106 

-

107 A file is parsed into a series of tokens. If these tokens are converted to 

-

108 text in exactly the same order, you get exactly the same file - bit-for-bit. 

-

109 Accordingly ever bit of text in a file must be assigned to exactly one 

-

110 Deb822Token. 

-

111 """ 

-

112 

-

113 __slots__ = ("_text", "_parent_element", "_token_size", "__weakref__") 

-

114 

-

115 def __init__(self, text): 

-

116 # type: (str) -> None 

-

117 if text == "": # pragma: no cover 

-

118 raise ValueError("Tokens must have content") 

-

119 self._text = text # type: str 

-

120 self._parent_element = None # type: Optional[ReferenceType['Deb822Element']] 

-

121 self._token_size = None # type: Optional[Range] 

-

122 self._verify_token_text() 

-

123 

-

124 def __repr__(self): 

-

125 # type: () -> str 

-

126 return "{clsname}('{text}')".format( 

-

127 clsname=self.__class__.__name__, text=self._text.replace("\n", "\\n") 

-

128 ) 

-

129 

-

130 def _verify_token_text(self): 

-

131 # type: () -> None 

-

132 if "\n" in self._text: 

-

133 is_single_line_token = False 

-

134 if self.is_comment or self.is_error: 

-

135 is_single_line_token = True 

-

136 if not is_single_line_token and not self.is_whitespace: 136 ↛ 137line 136 didn't jump to line 137, because the condition on line 136 was never true

-

137 raise ValueError( 

-

138 "Only whitespace, error and comment tokens may contain newlines" 

-

139 ) 

-

140 if not self.text.endswith("\n"): 140 ↛ 141line 140 didn't jump to line 141, because the condition on line 140 was never true

-

141 raise ValueError("Tokens containing whitespace must end on a newline") 

-

142 if is_single_line_token and "\n" in self.text[:-1]: 142 ↛ 143line 142 didn't jump to line 143, because the condition on line 142 was never true

-

143 raise ValueError( 

-

144 "Comments and error tokens must not contain embedded newlines" 

-

145 " (only end on one)" 

-

146 ) 

-

147 

-

148 @property 

-

149 def is_whitespace(self): 

-

150 # type: () -> bool 

-

151 return False 

-

152 

-

153 @property 

-

154 def is_comment(self): 

-

155 # type: () -> bool 

-

156 return False 

-

157 

-

158 @property 

-

159 def is_error(self): 

-

160 # type: () -> bool 

-

161 return False 

-

162 

-

163 @property 

-

164 def text(self): 

-

165 # type: () -> str 

-

166 return self._text 

-

167 

-

168 # To support callers that want a simple interface for converting tokens and elements to text 

-

169 def convert_to_text(self): 

-

170 # type: () -> str 

-

171 return self._text 

-

172 

-

173 def size(self, *, skip_leading_comments: bool = False) -> Range: 

-

174 # As tokens are an atomic unit 

-

175 token_size = self._token_size 

-

176 if token_size is not None: 

-

177 return token_size 

-

178 token_len = len(self._text) 

-

179 if token_len == 1: 

-

180 # The indirection with `r` because mypy gets confused and thinks that `token_size` 

-

181 # cannot have any type at all. 

-

182 token_size = ONE_CHAR_RANGE if self._text != "\n" else ONE_LINE_RANGE 

-

183 else: 

-

184 new_lines = self._text.count("\n") 

-

185 assert not new_lines or self._text[-1] == "\n" 

-

186 end_pos = Position(new_lines, 0) if new_lines else Position(0, token_len) 

-

187 token_size = Range(START_POSITION, end_pos) 

-

188 self._token_size = token_size 

-

189 return token_size 

-

190 

-

191 @property 

-

192 def parent_element(self): 

-

193 # type: () -> Optional[Deb822Element] 

-

194 return resolve_ref(self._parent_element) 

-

195 

-

196 @parent_element.setter 

-

197 def parent_element(self, new_parent): 

-

198 # type: (Optional[Deb822Element]) -> None 

-

199 self._parent_element = ( 

-

200 weakref.ref(new_parent) if new_parent is not None else None 

-

201 ) 

-

202 

-

203 def clear_parent_if_parent(self, parent): 

-

204 # type: (Deb822Element) -> None 

-

205 if parent is self.parent_element: 

-

206 self._parent_element = None 

-

207 

-

208 

-

209class Deb822WhitespaceToken(Deb822Token): 

-

210 """The token is a kind of whitespace. 

-

211 

-

212 Some whitespace tokens are critical for the format (such as the Deb822ValueContinuationToken, 

-

213 spaces that separate words in list separated by spaces or newlines), while other whitespace 

-

214 tokens are truly insignificant (space before a newline, space after a comma in a comma 

-

215 list, etc.). 

-

216 """ 

-

217 

-

218 __slots__ = () 

-

219 

-

220 @property 

-

221 def is_whitespace(self): 

-

222 # type: () -> bool 

-

223 return True 

-

224 

-

225 

-

226class Deb822SemanticallySignificantWhiteSpace(Deb822WhitespaceToken): 

-

227 """Whitespace that (if removed) would change the meaning of the file (or cause syntax errors)""" 

-

228 

-

229 __slots__ = () 

-

230 

-

231 

-

232class Deb822NewlineAfterValueToken(Deb822SemanticallySignificantWhiteSpace): 

-

233 """The newline after a value token. 

-

234 

-

235 If not followed by a continuation token, this also marks the end of the field. 

-

236 """ 

-

237 

-

238 __slots__ = () 

-

239 

-

240 def __init__(self): 

-

241 # type: () -> None 

-

242 super().__init__("\n") 

-

243 

-

244 

-

245class Deb822ValueContinuationToken(Deb822SemanticallySignificantWhiteSpace): 

-

246 """The whitespace denoting a value spanning an additional line (the first space on a line)""" 

-

247 

-

248 __slots__ = () 

-

249 

-

250 

-

251class Deb822SpaceSeparatorToken(Deb822SemanticallySignificantWhiteSpace): 

-

252 """Whitespace between values in a space list (e.g. "Architectures")""" 

-

253 

-

254 __slots__ = () 

-

255 

-

256 

-

257class Deb822ErrorToken(Deb822Token): 

-

258 """Token that represents a syntactical error""" 

-

259 

-

260 __slots__ = () 

-

261 

-

262 @property 

-

263 def is_error(self): 

-

264 # type: () -> bool 

-

265 return True 

-

266 

-

267 

-

268class Deb822CommentToken(Deb822Token): 

-

269 

-

270 __slots__ = () 

-

271 

-

272 @property 

-

273 def is_comment(self): 

-

274 # type: () -> bool 

-

275 return True 

-

276 

-

277 

-

278class Deb822FieldNameToken(Deb822Token): 

-

279 

-

280 __slots__ = () 

-

281 

-

282 def __init__(self, text): 

-

283 # type: (str) -> None 

-

284 if not isinstance(text, _strI): 284 ↛ 285line 284 didn't jump to line 285, because the condition on line 284 was never true

-

285 text = _strI(sys.intern(text)) 

-

286 super().__init__(text) 

-

287 

-

288 @property 

-

289 def text(self): 

-

290 # type: () -> _strI 

-

291 return cast("_strI", self._text) 

-

292 

-

293 

-

294# The colon after the field name, parenthesis, etc. 

-

295class Deb822SeparatorToken(Deb822Token): 

-

296 

-

297 __slots__ = () 

-

298 

-

299 

-

300class Deb822FieldSeparatorToken(Deb822Token): 

-

301 

-

302 __slots__ = () 

-

303 

-

304 def __init__(self): 

-

305 # type: () -> None 

-

306 super().__init__(":") 

-

307 

-

308 

-

309class Deb822CommaToken(Deb822SeparatorToken): 

-

310 """Used by the comma-separated list value parsers to denote a comma between two value tokens.""" 

-

311 

-

312 __slots__ = () 

-

313 

-

314 def __init__(self): 

-

315 # type: () -> None 

-

316 super().__init__(",") 

-

317 

-

318 

-

319class Deb822PipeToken(Deb822SeparatorToken): 

-

320 """Used in some dependency fields as OR relation""" 

-

321 

-

322 __slots__ = () 

-

323 

-

324 def __init__(self): 

-

325 # type: () -> None 

-

326 super().__init__("|") 

-

327 

-

328 

-

329class Deb822ValueToken(Deb822Token): 

-

330 """A field value can be split into multi "Deb822ValueToken"s (as well as separator tokens)""" 

-

331 

-

332 __slots__ = () 

-

333 

-

334 

-

335class Deb822ValueDependencyToken(Deb822Token): 

-

336 """Package name, architecture name, a version number, or a profile name in a dependency field""" 

-

337 

-

338 __slots__ = () 

-

339 

-

340 

-

341class Deb822ValueDependencyVersionRelationOperatorToken(Deb822Token): 

-

342 

-

343 __slots__ = () 

-

344 

-

345 

-

346def tokenize_deb822_file(sequence, encoding="utf-8"): 

-

347 # type: (Iterable[Union[str, bytes]], str) -> Iterable[Deb822Token] 

-

348 """Tokenize a deb822 file 

-

349 

-

350 :param sequence: An iterable of lines (a file open for reading will do) 

-

351 :param encoding: The encoding to use (this is here to support Deb822-like 

-

352 APIs, new code should not use this parameter). 

-

353 """ 

-

354 current_field_name = None 

-

355 field_name_cache = {} # type: Dict[str, _strI] 

-

356 

-

357 def _normalize_input(s): 

-

358 # type: (Iterable[Union[str, bytes]]) -> Iterable[str] 

-

359 for x in s: 

-

360 if isinstance(x, bytes): 360 ↛ 361line 360 didn't jump to line 361, because the condition on line 360 was never true

-

361 x = x.decode(encoding) 

-

362 if not x.endswith("\n"): 

-

363 # We always end on a newline because it makes a lot of code simpler. The pain 

-

364 # points relates to mutations that add content after the last field. Sadly, these 

-

365 # mutations can happen via adding fields, reordering fields, etc. and are too hard 

-

366 # to track to make it worth it to support the special case that makes up missing 

-

367 # a newline at the end of the file. 

-

368 x += "\n" 

-

369 yield x 

-

370 

-

371 text_stream = BufferingIterator( 

-

372 _normalize_input(sequence) 

-

373 ) # type: BufferingIterator[str] 

-

374 

-

375 for line in text_stream: 

-

376 if line.isspace(): 

-

377 if current_field_name: 

-

378 # Blank lines terminate fields 

-

379 current_field_name = None 

-

380 

-

381 # If there are multiple whitespace-only lines, we combine them 

-

382 # into one token. 

-

383 r = list(text_stream.takewhile(str.isspace)) 

-

384 if r: 384 ↛ 385line 384 didn't jump to line 385, because the condition on line 384 was never true

-

385 line += "".join(r) 

-

386 

-

387 # whitespace tokens are likely to have duplicate cases (like 

-

388 # single newline tokens), so we intern the strings there. 

-

389 yield Deb822WhitespaceToken(sys.intern(line)) 

-

390 continue 

-

391 

-

392 if line[0] == "#": 

-

393 yield Deb822CommentToken(line) 

-

394 continue 

-

395 

-

396 if line[0] in (" ", "\t"): 

-

397 if current_field_name is not None: 397 ↛ 407line 397 didn't jump to line 407, because the condition on line 397 was never false

-

398 # We emit a separate whitespace token for the newline as it makes some 

-

399 # things easier later (see _build_value_line) 

-

400 leading = sys.intern(line[0]) 

-

401 # Pull out the leading space and newline 

-

402 line = line[1:-1] 

-

403 yield Deb822ValueContinuationToken(leading) 

-

404 yield Deb822ValueToken(line) 

-

405 yield Deb822NewlineAfterValueToken() 

-

406 else: 

-

407 yield Deb822ErrorToken(line) 

-

408 continue 

-

409 

-

410 field_line_match = _RE_FIELD_LINE.match(line) 

-

411 if field_line_match: 411 ↛ 455line 411 didn't jump to line 455, because the condition on line 411 was never false

-

412 # The line is a field, which means there is a bit to unpack 

-

413 # - note that by definition, leading and trailing whitespace is insignificant 

-

414 # on the value part directly after the field separator 

-

415 (field_name, _, space_before, value, space_after) = ( 

-

416 field_line_match.groups() 

-

417 ) 

-

418 

-

419 current_field_name = field_name_cache.get(field_name) 

-

420 

-

421 if value is None or value == "": 421 ↛ 424line 421 didn't jump to line 424

-

422 # If there is no value, then merge the two space elements into space_after 

-

423 # as it makes it easier to handle the newline. 

-

424 space_after = ( 

-

425 space_before + space_after if space_after else space_before 

-

426 ) 

-

427 space_before = "" 

-

428 

-

429 if space_after: 429 ↛ 435line 429 didn't jump to line 435, because the condition on line 429 was never false

-

430 # We emit a separate whitespace token for the newline as it makes some 

-

431 # things easier later (see _build_value_line) 

-

432 if space_after.endswith("\n"): 432 ↛ 435line 432 didn't jump to line 435, because the condition on line 432 was never false

-

433 space_after = space_after[:-1] 

-

434 

-

435 if current_field_name is None: 435 ↛ 443line 435 didn't jump to line 443, because the condition on line 435 was never false

-

436 field_name = sys.intern(field_name) 

-

437 current_field_name = _strI(field_name) 

-

438 field_name_cache[field_name] = current_field_name 

-

439 

-

440 # We use current_field_name from here as it is a _strI. 

-

441 # Delete field_name to avoid accidentally using it and getting bugs 

-

442 # that should not happen. 

-

443 del field_name 

-

444 

-

445 yield Deb822FieldNameToken(current_field_name) 

-

446 yield Deb822FieldSeparatorToken() 

-

447 if space_before: 

-

448 yield Deb822WhitespaceToken(sys.intern(space_before)) 

-

449 if value: 449 ↛ 451line 449 didn't jump to line 451, because the condition on line 449 was never false

-

450 yield Deb822ValueToken(value) 

-

451 if space_after: 

-

452 yield Deb822WhitespaceToken(sys.intern(space_after)) 

-

453 yield Deb822NewlineAfterValueToken() 

-

454 else: 

-

455 yield Deb822ErrorToken(line) 

-

456 

-

457 

-

458def _value_line_tokenizer(func): 

-

459 # type: (Callable[[str], Iterable[Deb822Token]]) -> (Callable[[str], Iterable[Deb822Token]]) 

-

460 def impl(v): 

-

461 # type: (str) -> Iterable[Deb822Token] 

-

462 first_line = True 

-

463 for no, line in enumerate(v.splitlines(keepends=True)): 

-

464 assert not v.isspace() or no == 0 

-

465 if line.startswith("#"): 

-

466 yield Deb822CommentToken(line) 

-

467 continue 

-

468 has_newline = False 

-

469 continuation_line_marker = None 

-

470 if not first_line: 

-

471 continuation_line_marker = line[0] 

-

472 line = line[1:] 

-

473 first_line = False 

-

474 if line.endswith("\n"): 

-

475 has_newline = True 

-

476 line = line[:-1] 

-

477 if continuation_line_marker is not None: 

-

478 yield Deb822ValueContinuationToken(sys.intern(continuation_line_marker)) 

-

479 yield from func(line) 

-

480 if has_newline: 

-

481 yield Deb822NewlineAfterValueToken() 

-

482 

-

483 return impl 

-

484 

-

485 

-

486@_value_line_tokenizer 

-

487def whitespace_split_tokenizer(v): 

-

488 # type: (str) -> Iterable[Deb822Token] 

-

489 assert "\n" not in v 

-

490 for match in _RE_WHITESPACE_SEPARATED_WORD_LIST.finditer(v): 

-

491 space_before, word, space_after = match.groups() 

-

492 if space_before: 

-

493 yield Deb822SpaceSeparatorToken(sys.intern(space_before)) 

-

494 yield Deb822ValueToken(word) 

-

495 if space_after: 

-

496 yield Deb822SpaceSeparatorToken(sys.intern(space_after)) 

-

497 

-

498 

-

499@_value_line_tokenizer 

-

500def comma_split_tokenizer(v): 

-

501 # type: (str) -> Iterable[Deb822Token] 

-

502 assert "\n" not in v 

-

503 for match in _RE_COMMA_SEPARATED_WORD_LIST.finditer(v): 

-

504 space_before_comma, comma, space_before_word, word, space_after_word = ( 

-

505 match.groups() 

-

506 ) 

-

507 if space_before_comma: 

-

508 yield Deb822WhitespaceToken(sys.intern(space_before_comma)) 

-

509 if comma: 

-

510 yield Deb822CommaToken() 

-

511 if space_before_word: 

-

512 yield Deb822WhitespaceToken(sys.intern(space_before_word)) 

-

513 if word: 

-

514 yield Deb822ValueToken(word) 

-

515 if space_after_word: 

-

516 yield Deb822WhitespaceToken(sys.intern(space_after_word)) 

-
- - - diff --git a/coverage-report/d_e9c451f4ae334f76_types_py.html b/coverage-report/d_e9c451f4ae334f76_types_py.html deleted file mode 100644 index f12c5ae..0000000 --- a/coverage-report/d_e9c451f4ae334f76_types_py.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - Coverage for src/debputy/lsp/vendoring/_deb822_repro/types.py: 85% - - - - - -
-
-

- Coverage for src/debputy/lsp/vendoring/_deb822_repro/types.py: - 85% -

- -

- 26 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1try: 

-

2 from typing import TypeVar, Union, Tuple, List, Callable, Iterator, TYPE_CHECKING 

-

3 

-

4 if TYPE_CHECKING: 

-

5 from .tokens import Deb822Token, Deb822FieldNameToken 

-

6 from .parsing import ( 

-

7 Deb822Element, 

-

8 Deb822CommentElement, 

-

9 Deb822ParsedValueElement, 

-

10 ) 

-

11 from .formatter import FormatterContentToken 

-

12 

-

13 TokenOrElement = Union["Deb822Element", "Deb822Token"] 

-

14 TE = TypeVar("TE", bound=TokenOrElement) 

-

15 

-

16 # Used as a resulting element for "mapping" functions that map TE -> R (see _combine_parts) 

-

17 R = TypeVar("R", bound="Deb822Element") 

-

18 

-

19 VE = TypeVar("VE", bound="Deb822Element") 

-

20 

-

21 ST = TypeVar("ST", bound="Deb822Token") 

-

22 

-

23 # Internal type for part of the paragraph key. Used to facility _unpack_key. 

-

24 ParagraphKeyBase = Union["Deb822FieldNameToken", str] 

-

25 

-

26 ParagraphKey = Union[ParagraphKeyBase, Tuple[str, int]] 

-

27 

-

28 Commentish = Union[List[str], "Deb822CommentElement"] 

-

29 

-

30 FormatterCallback = Callable[ 

-

31 [str, "FormatterContentToken", Iterator["FormatterContentToken"]], 

-

32 Iterator[Union["FormatterContentToken", str]], 

-

33 ] 

-

34 try: 

-

35 # Set __doc__ attributes if possible 

-

36 TE.__doc__ = """ 

-

37 Generic "Token or Element" type 

-

38 """ 

-

39 R.__doc__ = """ 

-

40 For internal usage in _deb822_repro 

-

41 """ 

-

42 VE.__doc__ = """ 

-

43 Value type/element in a list interpretation of a field value 

-

44 """ 

-

45 ST.__doc__ = """ 

-

46 Separator type/token in a list interpretation of a field value 

-

47 """ 

-

48 ParagraphKeyBase.__doc__ = """ 

-

49 For internal usage in _deb822_repro 

-

50 """ 

-

51 ParagraphKey.__doc__ = """ 

-

52 Anything accepted as a key for a paragraph field lookup. The simple case being 

-

53 a str. Alternative variants are mostly interesting for paragraphs with repeated 

-

54 fields (to enable unambiguous lookups) 

-

55 """ 

-

56 Commentish.__doc__ = """ 

-

57 Anything accepted as input for a Comment. The simple case is the list 

-

58 of string (each element being a line of comment). The alternative format is 

-

59 there for enable reuse of an existing element (e.g. to avoid "unpacking" 

-

60 only to "re-pack" an existing comment element). 

-

61 """ 

-

62 FormatterCallback.__doc__ = """\ 

-

63 Formatter callback used with the round-trip safe parser 

-

64 

-

65 See debian._repro_deb822.formatter.format_field for details 

-

66 """ 

-

67 except AttributeError: 

-

68 # Python 3.5 does not allow update to the __doc__ attribute - ignore that 

-

69 pass 

-

70except ImportError: 

-

71 pass 

-

72 

-

73 

-

74class AmbiguousDeb822FieldKeyError(KeyError): 

-

75 """Specialized version of KeyError to denote a valid but ambiguous field name 

-

76 

-

77 This exception occurs if: 

-

78 * the field is accessed via a str on a configured view that does not automatically 

-

79 resolve ambiguous field names (see Deb822ParagraphElement.configured_view), AND 

-

80 * a concrete paragraph contents a repeated field (which is not valid in deb822 

-

81 but the module supports parsing them) 

-

82 

-

83 Note that the default is to automatically resolve ambiguous fields. Accordingly 

-

84 you will only see this exception if you have "opted in" on wanting to know that 

-

85 the lookup was ambiguous. 

-

86 

-

87 The ambiguity can be resolved by using a tuple of (<field-name>, <filed-index>) 

-

88 instead of <field-name>. 

-

89 """ 

-

90 

-

91 

-

92class SyntaxOrParseError(ValueError): 

-

93 """Specialized version of ValueError for syntax/parse errors.""" 

-
- - - diff --git a/coverage-report/deb_materialization_py.html b/coverage-report/deb_materialization_py.html deleted file mode 100644 index bbd6295..0000000 --- a/coverage-report/deb_materialization_py.html +++ /dev/null @@ -1,110 +0,0 @@ - - - - - Coverage for deb_materialization.py: 0% - - - - - -
-
-

- Coverage for deb_materialization.py: - 0% -

- -

- 3 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1#!/usr/bin/python3 -B 

-

2import pathlib 

-

3import sys 

-

4 

-

5DEBPUTY_ROOT_DIR = pathlib.Path(__file__).parent # TODO: Subst during install 

-

6 

-

7if __name__ == '__main__': 

-

8 # setup PYTHONPATH: add our installation directory. 

-

9 sys.path.insert(0, str(DEBPUTY_ROOT_DIR)) 

-

10 from debputy.commands.deb_materialization import main 

-

11 main() 

-
- - - diff --git a/coverage-report/deb_packer_py.html b/coverage-report/deb_packer_py.html deleted file mode 100644 index 505e60e..0000000 --- a/coverage-report/deb_packer_py.html +++ /dev/null @@ -1,110 +0,0 @@ - - - - - Coverage for deb_packer.py: 0% - - - - - -
-
-

- Coverage for deb_packer.py: - 0% -

- -

- 3 statements   - - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

- -
-
-
-

1#!/usr/bin/python3 -B 

-

2import pathlib 

-

3import sys 

-

4 

-

5DEBPUTY_ROOT_DIR = pathlib.Path(__file__).parent # TODO: Subst during install 

-

6 

-

7if __name__ == '__main__': 

-

8 # setup PYTHONPATH: add our installation directory. 

-

9 sys.path.insert(0, str(DEBPUTY_ROOT_DIR)) 

-

10 from debputy.commands.deb_packer import main 

-

11 main() 

-
- - - diff --git a/coverage-report/favicon_32.png b/coverage-report/favicon_32.png deleted file mode 100644 index 8649f04..0000000 Binary files a/coverage-report/favicon_32.png and /dev/null differ diff --git a/coverage-report/index.html b/coverage-report/index.html deleted file mode 100644 index ca54ec0..0000000 --- a/coverage-report/index.html +++ /dev/null @@ -1,1118 +0,0 @@ - - - - - Coverage report - - - - - -
-
-

Coverage report: - 62% -

- -
- -
-

- coverage.py v7.2.7, - created at 2024-04-07 12:14 +0200 -

-
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Modulestatementsmissingexcludedbranchespartialcoverage
deb_materialization.py334000%
deb_packer.py334000%
debputy/plugins/gnome.py381012196%
debputy/plugins/numpy3.py346010086%
debputy/plugins/perl_openssl.py20404075%
self-hosting-plugins/debputy_self_hosting.py13302173%
self-hosting-plugins/debputy_self_hosting_test.py50000100%
src/debputy/__init__.py7102178%
src/debputy/_deb_options_profiles.py272118193%
src/debputy/_manifest_constants.py370000100%
src/debputy/architecture_support.py1075056195%
src/debputy/builtin_manifest_rules.py7910036486%
src/debputy/commands/__init__.py00000100%
src/debputy/commands/deb_materialization.py236207210409%
src/debputy/commands/deb_packer.py19781254758%
src/debputy/commands/debputy_cmd/__init__.py00000100%
src/debputy/commands/debputy_cmd/context.py283152497242%
src/debputy/commands/debputy_cmd/dc_util.py7404027%
src/debputy/commands/debputy_cmd/lint_and_lsp_cmds.py6550024026%
src/debputy/commands/debputy_cmd/output.py191150086017%
src/debputy/commands/debputy_cmd/plugin_cmds.py5414630287113%
src/debputy/deb_packaging_support.py7996661476113%
src/debputy/debhelper_emulation.py14336069373%
src/debputy/dh_migration/__init__.py00000100%
src/debputy/dh_migration/migration.py192172111407%
src/debputy/dh_migration/migrators.py70000100%
src/debputy/dh_migration/migrators_impl.py66911403953981%
src/debputy/dh_migration/models.py8611032484%
src/debputy/elf_util.py100170451276%
src/debputy/exceptions.py505212092%
src/debputy/filesystem_scan.py110426815487474%
src/debputy/highlevel_manifest.py80123733955267%
src/debputy/highlevel_manifest_parser.py2537121201368%
src/debputy/installations.py49915752913665%
src/debputy/intermediate_manifest.py172520982262%
src/debputy/interpreter.py822640396%
src/debputy/linting/__init__.py00000100%
src/debputy/linting/lint_impl.py152125064012%
src/debputy/linting/lint_util.py11766748142%
src/debputy/lsp/__init__.py00000100%
src/debputy/lsp/debputy_ls.py4815018274%
src/debputy/lsp/lsp_debian_changelog.py10881028021%
src/debputy/lsp/lsp_debian_control.py2165301021472%
src/debputy/lsp/lsp_debian_control_reference_data.py33512361241359%
src/debputy/lsp/lsp_debian_copyright.py173137080018%
src/debputy/lsp/lsp_debian_debputy_manifest.py4679502344377%
src/debputy/lsp/lsp_debian_rules.py188146082018%
src/debputy/lsp/lsp_debian_tests_control.py160124074020%
src/debputy/lsp/lsp_dispatch.py8250030043%
src/debputy/lsp/lsp_features.py10738242657%
src/debputy/lsp/lsp_generic_deb822.py2031010841146%
src/debputy/lsp/quickfixes.py6332020042%
src/debputy/lsp/spellchecking.py152422701471%
src/debputy/lsp/text_edit.py6657028010%
src/debputy/lsp/text_util.py5918020267%
src/debputy/lsp/vendoring/__init__.py00000100%
src/debputy/lsp/vendoring/_deb822_repro/__init__.py30100100%
src/debputy/lsp/vendoring/_deb822_repro/_util.py15462370557%
src/debputy/lsp/vendoring/_deb822_repro/formatter.py128240781280%
src/debputy/lsp/vendoring/_deb822_repro/locatable.py12211558590%
src/debputy/lsp/vendoring/_deb822_repro/parsing.py1464541307098059%
src/debputy/lsp/vendoring/_deb822_repro/tokens.py2303241081383%
src/debputy/lsp/vendoring/_deb822_repro/types.py26440085%
src/debputy/maintscript_snippet.py8728044663%
src/debputy/manifest_conditions.py13443238365%
src/debputy/manifest_parser/__init__.py00000100%
src/debputy/manifest_parser/base_types.py213339104784%
src/debputy/manifest_parser/declarative_parser.py78116104467076%
src/debputy/manifest_parser/exceptions.py50000100%
src/debputy/manifest_parser/mapper_code.py324010286%
src/debputy/manifest_parser/parser_data.py5471230285%
src/debputy/manifest_parser/parser_doc.py13225085979%
src/debputy/manifest_parser/util.py192183881289%
src/debputy/package_build/__init__.py00000100%
src/debputy/package_build/assemble_deb.py9879037014%
src/debputy/packager_provided_files.py140220821484%
src/debputy/packages.py16776296651%
src/debputy/packaging/__init__.py00000100%
src/debputy/packaging/alternatives.py75150361274%
src/debputy/packaging/debconf_templates.py321908032%
src/debputy/packaging/makeshlibs.py182139280218%
src/debputy/path_matcher.py2796631421372%
src/debputy/plugin/__init__.py00000100%
src/debputy/plugin/api/__init__.py30000100%
src/debputy/plugin/api/example_processing.py624030096%
src/debputy/plugin/api/feature_set.py358010073%
src/debputy/plugin/api/impl.py75330223205255%
src/debputy/plugin/api/impl_types.py5269861872378%
src/debputy/plugin/api/plugin_parser.py350020100%
src/debputy/plugin/api/spec.py2823666143787%
src/debputy/plugin/api/test_api/__init__.py30000100%
src/debputy/plugin/api/test_api/test_impl.py2964301322582%
src/debputy/plugin/api/test_api/test_spec.py79011160100%
src/debputy/plugin/debputy/__init__.py00000100%
src/debputy/plugin/debputy/binary_package_rules.py17323058682%
src/debputy/plugin/debputy/debputy_plugin.py780040100%
src/debputy/plugin/debputy/discard_rules.py341012196%
src/debputy/plugin/debputy/manifest_root_rules.py579216279%
src/debputy/plugin/debputy/metadata_detectors.py22860140796%
src/debputy/plugin/debputy/package_processors.py168700100654%
src/debputy/plugin/debputy/paths.py40000100%
src/debputy/plugin/debputy/private_api.py5417221384282%
src/debputy/plugin/debputy/service_management.py163202951782%
src/debputy/plugin/debputy/shlib_metadata_detectors.py170060100%
src/debputy/plugin/debputy/strip_non_determinism.py10931148968%
src/debputy/plugin/debputy/types.py70020100%
src/debputy/substitution.py15320662885%
src/debputy/transformation_rules.py2715721192473%
src/debputy/types.py30300100%
src/debputy/util.py42613691923165%
src/debputy/version.py38906275%
src/debputy/yaml/__init__.py30000100%
src/debputy/yaml/compat.py9400056%
Total188326614247876691962%
-

- No items found using the specified filter. -

-
- - - diff --git a/coverage-report/keybd_closed.png b/coverage-report/keybd_closed.png deleted file mode 100644 index 0a2e112..0000000 Binary files a/coverage-report/keybd_closed.png and /dev/null differ diff --git a/coverage-report/keybd_open.png b/coverage-report/keybd_open.png deleted file mode 100644 index 8bc77cc..0000000 Binary files a/coverage-report/keybd_open.png and /dev/null differ diff --git a/coverage-report/status.json b/coverage-report/status.json deleted file mode 100644 index 123f3f6..0000000 --- a/coverage-report/status.json +++ /dev/null @@ -1 +0,0 @@ -{"format":2,"version":"7.2.7","globals":"2c15d122d3f4577c3b4b4bdde2258d63","files":{"deb_materialization_py":{"hash":"ec494d518e48908fe881f7a92ed25940","index":{"nums":[0,1,3,4,3,0,0,0],"html_filename":"deb_materialization_py.html","relative_filename":"deb_materialization.py"}},"deb_packer_py":{"hash":"19eec50e51f6c66c6bb7133ba3e653b6","index":{"nums":[0,1,3,4,3,0,0,0],"html_filename":"deb_packer_py.html","relative_filename":"deb_packer.py"}},"d_9ae9c81fc31f2694_gnome_py":{"hash":"82fc1949c3dcebe56ae2af4bc5696f62","index":{"nums":[0,1,38,0,1,12,1,1],"html_filename":"d_9ae9c81fc31f2694_gnome_py.html","relative_filename":"debputy/plugins/gnome.py"}},"d_9ae9c81fc31f2694_numpy3_py":{"hash":"b25f738dfc2967da3cb052c9f54c580d","index":{"nums":[0,1,34,0,6,10,0,0],"html_filename":"d_9ae9c81fc31f2694_numpy3_py.html","relative_filename":"debputy/plugins/numpy3.py"}},"d_9ae9c81fc31f2694_perl_openssl_py":{"hash":"dc6ad0e0cd8feb9028c1620cedb10095","index":{"nums":[0,1,20,0,4,4,0,2],"html_filename":"d_9ae9c81fc31f2694_perl_openssl_py.html","relative_filename":"debputy/plugins/perl_openssl.py"}},"d_08cd1ad648464ded_debputy_self_hosting_py":{"hash":"34e3af127bd5e346b58597c381e09df8","index":{"nums":[0,1,13,0,3,2,1,1],"html_filename":"d_08cd1ad648464ded_debputy_self_hosting_py.html","relative_filename":"self-hosting-plugins/debputy_self_hosting.py"}},"d_08cd1ad648464ded_debputy_self_hosting_test_py":{"hash":"09dbb3c4981922e9f1a706b55f4ba767","index":{"nums":[0,1,5,0,0,0,0,0],"html_filename":"d_08cd1ad648464ded_debputy_self_hosting_test_py.html","relative_filename":"self-hosting-plugins/debputy_self_hosting_test.py"}},"d_267b6307937f1878___init___py":{"hash":"7d83d2758b4cf1c40b2752dd0418b1e9","index":{"nums":[0,1,7,0,1,2,1,1],"html_filename":"d_267b6307937f1878___init___py.html","relative_filename":"src/debputy/__init__.py"}},"d_267b6307937f1878__deb_options_profiles_py":{"hash":"b3bbc20caa78686e8b8043bee8a6af02","index":{"nums":[0,1,27,1,2,18,1,1],"html_filename":"d_267b6307937f1878__deb_options_profiles_py.html","relative_filename":"src/debputy/_deb_options_profiles.py"}},"d_267b6307937f1878__manifest_constants_py":{"hash":"091426b35352bf4e823647d027802ced","index":{"nums":[0,1,37,0,0,0,0,0],"html_filename":"d_267b6307937f1878__manifest_constants_py.html","relative_filename":"src/debputy/_manifest_constants.py"}},"d_267b6307937f1878_architecture_support_py":{"hash":"1cba32b21ffa2134faa640aa22cba92d","index":{"nums":[0,1,107,0,5,56,1,3],"html_filename":"d_267b6307937f1878_architecture_support_py.html","relative_filename":"src/debputy/architecture_support.py"}},"d_267b6307937f1878_builtin_manifest_rules_py":{"hash":"7bbf3f7e0757f1e39af99f592ad28433","index":{"nums":[0,1,79,0,10,36,4,6],"html_filename":"d_267b6307937f1878_builtin_manifest_rules_py.html","relative_filename":"src/debputy/builtin_manifest_rules.py"}},"d_2882d0a735873825___init___py":{"hash":"ca2411bb2d5283cba14da3ce60f5feec","index":{"nums":[0,1,0,0,0,0,0,0],"html_filename":"d_2882d0a735873825___init___py.html","relative_filename":"src/debputy/commands/__init__.py"}},"d_2882d0a735873825_deb_materialization_py":{"hash":"9b77648ecaf2b92e7848f1af1034b502","index":{"nums":[0,1,236,2,207,104,0,102],"html_filename":"d_2882d0a735873825_deb_materialization_py.html","relative_filename":"src/debputy/commands/deb_materialization.py"}},"d_2882d0a735873825_deb_packer_py":{"hash":"b711a595ea2e806c58c85e3c3b183ed5","index":{"nums":[0,1,197,2,81,54,7,25],"html_filename":"d_2882d0a735873825_deb_packer_py.html","relative_filename":"src/debputy/commands/deb_packer.py"}},"d_6e57078c9ef7177d___init___py":{"hash":"ca2411bb2d5283cba14da3ce60f5feec","index":{"nums":[0,1,0,0,0,0,0,0],"html_filename":"d_6e57078c9ef7177d___init___py.html","relative_filename":"src/debputy/commands/debputy_cmd/__init__.py"}},"d_6e57078c9ef7177d_context_py":{"hash":"aff701afa279cf1a3498774f83d15c30","index":{"nums":[0,1,283,4,152,97,2,67],"html_filename":"d_6e57078c9ef7177d_context_py.html","relative_filename":"src/debputy/commands/debputy_cmd/context.py"}},"d_6e57078c9ef7177d_dc_util_py":{"hash":"45fcc84ba7f2bc07531a76918d04d82c","index":{"nums":[0,1,7,0,4,4,0,4],"html_filename":"d_6e57078c9ef7177d_dc_util_py.html","relative_filename":"src/debputy/commands/debputy_cmd/dc_util.py"}},"d_6e57078c9ef7177d_lint_and_lsp_cmds_py":{"hash":"57ddc952e3ee1f2717ff16ac2b403dbb","index":{"nums":[0,1,65,0,50,24,0,16],"html_filename":"d_6e57078c9ef7177d_lint_and_lsp_cmds_py.html","relative_filename":"src/debputy/commands/debputy_cmd/lint_and_lsp_cmds.py"}},"d_6e57078c9ef7177d_output_py":{"hash":"f3f4b36bcb45b9a2fcfd0b02d3dffa55","index":{"nums":[0,1,191,0,150,86,0,80],"html_filename":"d_6e57078c9ef7177d_output_py.html","relative_filename":"src/debputy/commands/debputy_cmd/output.py"}},"d_6e57078c9ef7177d_plugin_cmds_py":{"hash":"c943bb6ad3cd4d1c2fcfca460f9d5397","index":{"nums":[0,1,541,0,463,287,1,258],"html_filename":"d_6e57078c9ef7177d_plugin_cmds_py.html","relative_filename":"src/debputy/commands/debputy_cmd/plugin_cmds.py"}},"d_267b6307937f1878_deb_packaging_support_py":{"hash":"78d6b9e6721fdc382739e8ac326b18a8","index":{"nums":[0,1,799,1,666,476,1,439],"html_filename":"d_267b6307937f1878_deb_packaging_support_py.html","relative_filename":"src/debputy/deb_packaging_support.py"}},"d_267b6307937f1878_debhelper_emulation_py":{"hash":"3036b9abd6eb01615288e32e5c449997","index":{"nums":[0,1,143,0,36,69,3,21],"html_filename":"d_267b6307937f1878_debhelper_emulation_py.html","relative_filename":"src/debputy/debhelper_emulation.py"}},"d_23db3c975895bd86___init___py":{"hash":"ca2411bb2d5283cba14da3ce60f5feec","index":{"nums":[0,1,0,0,0,0,0,0],"html_filename":"d_23db3c975895bd86___init___py.html","relative_filename":"src/debputy/dh_migration/__init__.py"}},"d_23db3c975895bd86_migration_py":{"hash":"a5b1ab39ec311689f491b322c55fdf25","index":{"nums":[0,1,192,1,172,114,0,114],"html_filename":"d_23db3c975895bd86_migration_py.html","relative_filename":"src/debputy/dh_migration/migration.py"}},"d_23db3c975895bd86_migrators_py":{"hash":"6735120d0251f26545aba49468d91e8e","index":{"nums":[0,1,7,0,0,0,0,0],"html_filename":"d_23db3c975895bd86_migrators_py.html","relative_filename":"src/debputy/dh_migration/migrators.py"}},"d_23db3c975895bd86_migrators_impl_py":{"hash":"381a8c757328cdbc9606df2cb809fdb0","index":{"nums":[0,1,669,0,114,395,39,87],"html_filename":"d_23db3c975895bd86_migrators_impl_py.html","relative_filename":"src/debputy/dh_migration/migrators_impl.py"}},"d_23db3c975895bd86_models_py":{"hash":"83cf7db4b5a088218934821ec5fca5ef","index":{"nums":[0,1,86,0,11,32,4,8],"html_filename":"d_23db3c975895bd86_models_py.html","relative_filename":"src/debputy/dh_migration/models.py"}},"d_267b6307937f1878_elf_util_py":{"hash":"675dea9eb87ebb9890bb566ea1cc8817","index":{"nums":[0,1,100,0,17,45,12,18],"html_filename":"d_267b6307937f1878_elf_util_py.html","relative_filename":"src/debputy/elf_util.py"}},"d_267b6307937f1878_exceptions_py":{"hash":"f5c831a0650c0d6cf43070a245873271","index":{"nums":[0,1,50,2,5,12,0,0],"html_filename":"d_267b6307937f1878_exceptions_py.html","relative_filename":"src/debputy/exceptions.py"}},"d_267b6307937f1878_filesystem_scan_py":{"hash":"4b48bcc8a65442b9e41911855ca8467b","index":{"nums":[0,1,1104,1,268,548,74,158],"html_filename":"d_267b6307937f1878_filesystem_scan_py.html","relative_filename":"src/debputy/filesystem_scan.py"}},"d_267b6307937f1878_highlevel_manifest_py":{"hash":"f444f0abe5d85bc21ff673bc5b1bab29","index":{"nums":[0,1,801,3,237,395,52,156],"html_filename":"d_267b6307937f1878_highlevel_manifest_py.html","relative_filename":"src/debputy/highlevel_manifest.py"}},"d_267b6307937f1878_highlevel_manifest_parser_py":{"hash":"f698b912249cd4a9d15e6280315f25eb","index":{"nums":[0,1,253,2,71,120,13,47],"html_filename":"d_267b6307937f1878_highlevel_manifest_parser_py.html","relative_filename":"src/debputy/highlevel_manifest_parser.py"}},"d_267b6307937f1878_installations_py":{"hash":"789b7c975a4b087e9f56f386f3c6ca28","index":{"nums":[0,1,499,5,157,291,36,122],"html_filename":"d_267b6307937f1878_installations_py.html","relative_filename":"src/debputy/installations.py"}},"d_267b6307937f1878_intermediate_manifest_py":{"hash":"3aea08002cf68315c96456fc9021ac11","index":{"nums":[0,1,172,0,52,98,22,50],"html_filename":"d_267b6307937f1878_intermediate_manifest_py.html","relative_filename":"src/debputy/intermediate_manifest.py"}},"d_267b6307937f1878_interpreter_py":{"hash":"29c5356a98b617c84315b7f2e2a60122","index":{"nums":[0,1,82,6,2,40,3,3],"html_filename":"d_267b6307937f1878_interpreter_py.html","relative_filename":"src/debputy/interpreter.py"}},"d_7764373ba25ba45b___init___py":{"hash":"ca2411bb2d5283cba14da3ce60f5feec","index":{"nums":[0,1,0,0,0,0,0,0],"html_filename":"d_7764373ba25ba45b___init___py.html","relative_filename":"src/debputy/linting/__init__.py"}},"d_7764373ba25ba45b_lint_impl_py":{"hash":"0e089ae5aefe6964606f500bd98836eb","index":{"nums":[0,1,152,0,125,64,0,64],"html_filename":"d_7764373ba25ba45b_lint_impl_py.html","relative_filename":"src/debputy/linting/lint_impl.py"}},"d_7764373ba25ba45b_lint_util_py":{"hash":"fcba0bcfafc89e551212e8c890996be3","index":{"nums":[0,1,117,7,66,48,1,29],"html_filename":"d_7764373ba25ba45b_lint_util_py.html","relative_filename":"src/debputy/linting/lint_util.py"}},"d_5d0ec0d5422112df___init___py":{"hash":"ca2411bb2d5283cba14da3ce60f5feec","index":{"nums":[0,1,0,0,0,0,0,0],"html_filename":"d_5d0ec0d5422112df___init___py.html","relative_filename":"src/debputy/lsp/__init__.py"}},"d_5d0ec0d5422112df_debputy_ls_py":{"hash":"4767e00ea86944aa9b7a7f747cc330c6","index":{"nums":[0,1,48,0,15,18,2,2],"html_filename":"d_5d0ec0d5422112df_debputy_ls_py.html","relative_filename":"src/debputy/lsp/debputy_ls.py"}},"d_5d0ec0d5422112df_lsp_debian_changelog_py":{"hash":"2ceb2fb149a267c0a0eec8e0550cd4fa","index":{"nums":[0,1,108,0,81,28,0,26],"html_filename":"d_5d0ec0d5422112df_lsp_debian_changelog_py.html","relative_filename":"src/debputy/lsp/lsp_debian_changelog.py"}},"d_5d0ec0d5422112df_lsp_debian_control_py":{"hash":"10abbb1f60e549e4e93f566bbc218cb0","index":{"nums":[0,1,216,0,53,102,14,36],"html_filename":"d_5d0ec0d5422112df_lsp_debian_control_py.html","relative_filename":"src/debputy/lsp/lsp_debian_control.py"}},"d_5d0ec0d5422112df_lsp_debian_control_reference_data_py":{"hash":"42a61732dbd8881d26f70516b3ddcc44","index":{"nums":[0,1,335,6,123,124,13,67],"html_filename":"d_5d0ec0d5422112df_lsp_debian_control_reference_data_py.html","relative_filename":"src/debputy/lsp/lsp_debian_control_reference_data.py"}},"d_5d0ec0d5422112df_lsp_debian_copyright_py":{"hash":"9e4dafd9d0cd67fe72657f0fd4b7e693","index":{"nums":[0,1,173,0,137,80,0,70],"html_filename":"d_5d0ec0d5422112df_lsp_debian_copyright_py.html","relative_filename":"src/debputy/lsp/lsp_debian_copyright.py"}},"d_5d0ec0d5422112df_lsp_debian_debputy_manifest_py":{"hash":"e27833e92bf333ef7a3f768c29c74400","index":{"nums":[0,1,467,0,95,234,43,65],"html_filename":"d_5d0ec0d5422112df_lsp_debian_debputy_manifest_py.html","relative_filename":"src/debputy/lsp/lsp_debian_debputy_manifest.py"}},"d_5d0ec0d5422112df_lsp_debian_rules_py":{"hash":"89dad59e6e3b26b2bbc38f2f730ca520","index":{"nums":[0,1,188,0,146,82,0,76],"html_filename":"d_5d0ec0d5422112df_lsp_debian_rules_py.html","relative_filename":"src/debputy/lsp/lsp_debian_rules.py"}},"d_5d0ec0d5422112df_lsp_debian_tests_control_py":{"hash":"f7aa8d7a7b06d6b7369ba8605fa79f46","index":{"nums":[0,1,160,0,124,74,0,64],"html_filename":"d_5d0ec0d5422112df_lsp_debian_tests_control_py.html","relative_filename":"src/debputy/lsp/lsp_debian_tests_control.py"}},"d_5d0ec0d5422112df_lsp_dispatch_py":{"hash":"f8b54e886f3703c2c40f403a634f9845","index":{"nums":[0,1,82,0,50,30,0,14],"html_filename":"d_5d0ec0d5422112df_lsp_dispatch_py.html","relative_filename":"src/debputy/lsp/lsp_dispatch.py"}},"d_5d0ec0d5422112df_lsp_features_py":{"hash":"c55ced2a13ce4bafa85f792a741c8915","index":{"nums":[0,1,107,2,38,42,6,26],"html_filename":"d_5d0ec0d5422112df_lsp_features_py.html","relative_filename":"src/debputy/lsp/lsp_features.py"}},"d_5d0ec0d5422112df_lsp_generic_deb822_py":{"hash":"41d268dcb612ee86700a1ef76b4bae53","index":{"nums":[0,1,203,0,101,84,11,53],"html_filename":"d_5d0ec0d5422112df_lsp_generic_deb822_py.html","relative_filename":"src/debputy/lsp/lsp_generic_deb822.py"}},"d_5d0ec0d5422112df_quickfixes_py":{"hash":"d5500c05c3042d75d93cf33c770f8dee","index":{"nums":[0,1,63,0,32,20,0,16],"html_filename":"d_5d0ec0d5422112df_quickfixes_py.html","relative_filename":"src/debputy/lsp/quickfixes.py"}},"d_5d0ec0d5422112df_spellchecking_py":{"hash":"cda3918a66c34cc79b79d668a3904734","index":{"nums":[0,1,152,2,42,70,14,22],"html_filename":"d_5d0ec0d5422112df_spellchecking_py.html","relative_filename":"src/debputy/lsp/spellchecking.py"}},"d_5d0ec0d5422112df_text_edit_py":{"hash":"1691605b37033b1ed5c758136d2679dc","index":{"nums":[0,1,66,0,57,28,0,28],"html_filename":"d_5d0ec0d5422112df_text_edit_py.html","relative_filename":"src/debputy/lsp/text_edit.py"}},"d_5d0ec0d5422112df_text_util_py":{"hash":"3c33657f417a40ed2af1764eaeabb13a","index":{"nums":[0,1,59,0,18,20,2,8],"html_filename":"d_5d0ec0d5422112df_text_util_py.html","relative_filename":"src/debputy/lsp/text_util.py"}},"d_50e3cc0df0cc5f51___init___py":{"hash":"ca2411bb2d5283cba14da3ce60f5feec","index":{"nums":[0,1,0,0,0,0,0,0],"html_filename":"d_50e3cc0df0cc5f51___init___py.html","relative_filename":"src/debputy/lsp/vendoring/__init__.py"}},"d_e9c451f4ae334f76___init___py":{"hash":"88e0b9e508dcf2840705ab5f4e91f99f","index":{"nums":[0,1,3,1,0,0,0,0],"html_filename":"d_e9c451f4ae334f76___init___py.html","relative_filename":"src/debputy/lsp/vendoring/_deb822_repro/__init__.py"}},"d_e9c451f4ae334f76__util_py":{"hash":"4d61ece6b7f751f58f2cda84693fde86","index":{"nums":[0,1,154,3,62,70,5,35],"html_filename":"d_e9c451f4ae334f76__util_py.html","relative_filename":"src/debputy/lsp/vendoring/_deb822_repro/_util.py"}},"d_e9c451f4ae334f76_formatter_py":{"hash":"efe88dd797b272baffcc026efb05a312","index":{"nums":[0,1,128,0,24,78,12,18],"html_filename":"d_e9c451f4ae334f76_formatter_py.html","relative_filename":"src/debputy/lsp/vendoring/_deb822_repro/formatter.py"}},"d_e9c451f4ae334f76_locatable_py":{"hash":"32b65d1875ea86f8191ac0ff9ee16ed1","index":{"nums":[0,1,122,5,11,58,5,7],"html_filename":"d_e9c451f4ae334f76_locatable_py.html","relative_filename":"src/debputy/lsp/vendoring/_deb822_repro/locatable.py"}},"d_e9c451f4ae334f76_parsing_py":{"hash":"efa5e7f40d30e0ae013047834ee934e5","index":{"nums":[0,1,1464,30,541,709,80,340],"html_filename":"d_e9c451f4ae334f76_parsing_py.html","relative_filename":"src/debputy/lsp/vendoring/_deb822_repro/parsing.py"}},"d_e9c451f4ae334f76_tokens_py":{"hash":"d1451737d674a5ad78ecbc01d997ad91","index":{"nums":[0,1,230,4,32,108,13,27],"html_filename":"d_e9c451f4ae334f76_tokens_py.html","relative_filename":"src/debputy/lsp/vendoring/_deb822_repro/tokens.py"}},"d_e9c451f4ae334f76_types_py":{"hash":"c2c5ab44598cf6a534a3b8b8c6ad011b","index":{"nums":[0,1,26,4,4,0,0,0],"html_filename":"d_e9c451f4ae334f76_types_py.html","relative_filename":"src/debputy/lsp/vendoring/_deb822_repro/types.py"}},"d_267b6307937f1878_maintscript_snippet_py":{"hash":"6cf199326e1c56b89dc47252de15abc4","index":{"nums":[0,1,87,0,28,44,6,20],"html_filename":"d_267b6307937f1878_maintscript_snippet_py.html","relative_filename":"src/debputy/maintscript_snippet.py"}},"d_267b6307937f1878_manifest_conditions_py":{"hash":"322ef450f66be66cd790fb546df3273c","index":{"nums":[0,1,134,2,43,38,3,17],"html_filename":"d_267b6307937f1878_manifest_conditions_py.html","relative_filename":"src/debputy/manifest_conditions.py"}},"d_4f754ff76d8638bb___init___py":{"hash":"ca2411bb2d5283cba14da3ce60f5feec","index":{"nums":[0,1,0,0,0,0,0,0],"html_filename":"d_4f754ff76d8638bb___init___py.html","relative_filename":"src/debputy/manifest_parser/__init__.py"}},"d_4f754ff76d8638bb_base_types_py":{"hash":"422e06effcc76e9124a373ab8ab09c4b","index":{"nums":[0,1,213,9,33,104,7,17],"html_filename":"d_4f754ff76d8638bb_base_types_py.html","relative_filename":"src/debputy/manifest_parser/base_types.py"}},"d_4f754ff76d8638bb_declarative_parser_py":{"hash":"fdda82628d29fa9c22775b99fae8d3f8","index":{"nums":[0,1,781,0,161,446,70,138],"html_filename":"d_4f754ff76d8638bb_declarative_parser_py.html","relative_filename":"src/debputy/manifest_parser/declarative_parser.py"}},"d_4f754ff76d8638bb_exceptions_py":{"hash":"68d16a3555cc6f63e08c195b8b026ef2","index":{"nums":[0,1,5,0,0,0,0,0],"html_filename":"d_4f754ff76d8638bb_exceptions_py.html","relative_filename":"src/debputy/manifest_parser/exceptions.py"}},"d_4f754ff76d8638bb_mapper_code_py":{"hash":"f8869c1090a47099867aff79962d0615","index":{"nums":[0,1,32,0,4,10,2,2],"html_filename":"d_4f754ff76d8638bb_mapper_code_py.html","relative_filename":"src/debputy/manifest_parser/mapper_code.py"}},"d_4f754ff76d8638bb_parser_data_py":{"hash":"284a3e6e3062916e8b24c50f20ad33b7","index":{"nums":[0,1,54,12,7,30,2,6],"html_filename":"d_4f754ff76d8638bb_parser_data_py.html","relative_filename":"src/debputy/manifest_parser/parser_data.py"}},"d_4f754ff76d8638bb_parser_doc_py":{"hash":"47368090e6c2adc535d089904df75088","index":{"nums":[0,1,132,0,25,85,9,21],"html_filename":"d_4f754ff76d8638bb_parser_doc_py.html","relative_filename":"src/debputy/manifest_parser/parser_doc.py"}},"d_4f754ff76d8638bb_util_py":{"hash":"6f6831dbb7b5d018362aea99054abd1c","index":{"nums":[0,1,192,3,18,88,12,14],"html_filename":"d_4f754ff76d8638bb_util_py.html","relative_filename":"src/debputy/manifest_parser/util.py"}},"d_128305113a77411b___init___py":{"hash":"ca2411bb2d5283cba14da3ce60f5feec","index":{"nums":[0,1,0,0,0,0,0,0],"html_filename":"d_128305113a77411b___init___py.html","relative_filename":"src/debputy/package_build/__init__.py"}},"d_128305113a77411b_assemble_deb_py":{"hash":"e15da1ebe84008e8dcf311dad8d955f6","index":{"nums":[0,1,98,0,79,37,0,37],"html_filename":"d_128305113a77411b_assemble_deb_py.html","relative_filename":"src/debputy/package_build/assemble_deb.py"}},"d_267b6307937f1878_packager_provided_files_py":{"hash":"7f8542b048c377e315e46ca18c648a42","index":{"nums":[0,1,140,0,22,82,14,14],"html_filename":"d_267b6307937f1878_packager_provided_files_py.html","relative_filename":"src/debputy/packager_provided_files.py"}},"d_267b6307937f1878_packages_py":{"hash":"b0c833e1faa1961b02c787656cf17163","index":{"nums":[0,1,167,2,76,96,6,52],"html_filename":"d_267b6307937f1878_packages_py.html","relative_filename":"src/debputy/packages.py"}},"d_36a196ce5f578895___init___py":{"hash":"ca2411bb2d5283cba14da3ce60f5feec","index":{"nums":[0,1,0,0,0,0,0,0],"html_filename":"d_36a196ce5f578895___init___py.html","relative_filename":"src/debputy/packaging/__init__.py"}},"d_36a196ce5f578895_alternatives_py":{"hash":"18c2a860c74a72a872096e6613560c3d","index":{"nums":[0,1,75,0,15,36,12,14],"html_filename":"d_36a196ce5f578895_alternatives_py.html","relative_filename":"src/debputy/packaging/alternatives.py"}},"d_36a196ce5f578895_debconf_templates_py":{"hash":"e372d10ff45d2d2b5b70a79b810f866f","index":{"nums":[0,1,32,0,19,8,0,8],"html_filename":"d_36a196ce5f578895_debconf_templates_py.html","relative_filename":"src/debputy/packaging/debconf_templates.py"}},"d_36a196ce5f578895_makeshlibs_py":{"hash":"9f0c915a9b925cad68e84028ae810a90","index":{"nums":[0,1,182,2,139,80,2,76],"html_filename":"d_36a196ce5f578895_makeshlibs_py.html","relative_filename":"src/debputy/packaging/makeshlibs.py"}},"d_267b6307937f1878_path_matcher_py":{"hash":"c382f8d2bd0d68c55be3a8e4c440ba5f","index":{"nums":[0,1,279,3,66,142,13,51],"html_filename":"d_267b6307937f1878_path_matcher_py.html","relative_filename":"src/debputy/path_matcher.py"}},"d_4faea183f900b252___init___py":{"hash":"ca2411bb2d5283cba14da3ce60f5feec","index":{"nums":[0,1,0,0,0,0,0,0],"html_filename":"d_4faea183f900b252___init___py.html","relative_filename":"src/debputy/plugin/__init__.py"}},"d_64287305fe0c6642___init___py":{"hash":"c17fed53095e6333637f71c409fd86de","index":{"nums":[0,1,3,0,0,0,0,0],"html_filename":"d_64287305fe0c6642___init___py.html","relative_filename":"src/debputy/plugin/api/__init__.py"}},"d_64287305fe0c6642_example_processing_py":{"hash":"9208b92a711e2f7a540d6d61bd10f5d7","index":{"nums":[0,1,62,0,4,30,0,0],"html_filename":"d_64287305fe0c6642_example_processing_py.html","relative_filename":"src/debputy/plugin/api/example_processing.py"}},"d_64287305fe0c6642_feature_set_py":{"hash":"786bbaae874c59411cb1d53fb3b53b42","index":{"nums":[0,1,35,0,8,10,0,4],"html_filename":"d_64287305fe0c6642_feature_set_py.html","relative_filename":"src/debputy/plugin/api/feature_set.py"}},"d_64287305fe0c6642_impl_py":{"hash":"31720d3fb8dd89ce61604353dc3f48c2","index":{"nums":[0,1,753,2,302,320,52,182],"html_filename":"d_64287305fe0c6642_impl_py.html","relative_filename":"src/debputy/plugin/api/impl.py"}},"d_64287305fe0c6642_impl_types_py":{"hash":"3d4c59dd03a781d184df9fdc90b7780f","index":{"nums":[0,1,526,6,98,187,23,60],"html_filename":"d_64287305fe0c6642_impl_types_py.html","relative_filename":"src/debputy/plugin/api/impl_types.py"}},"d_64287305fe0c6642_plugin_parser_py":{"hash":"b9dffd1632dd8d900a6355a4fc7395a2","index":{"nums":[0,1,35,0,0,2,0,0],"html_filename":"d_64287305fe0c6642_plugin_parser_py.html","relative_filename":"src/debputy/plugin/api/plugin_parser.py"}},"d_64287305fe0c6642_spec_py":{"hash":"c6569e7b7ec77a4afc83604f1e801c31","index":{"nums":[0,1,282,66,36,143,7,21],"html_filename":"d_64287305fe0c6642_spec_py.html","relative_filename":"src/debputy/plugin/api/spec.py"}},"d_4b9be07fb6071cd2___init___py":{"hash":"3de0ff9db73356b216779b7359cc7e76","index":{"nums":[0,1,3,0,0,0,0,0],"html_filename":"d_4b9be07fb6071cd2___init___py.html","relative_filename":"src/debputy/plugin/api/test_api/__init__.py"}},"d_4b9be07fb6071cd2_test_impl_py":{"hash":"f1a4b7c468b08ce9778f2931c139da3b","index":{"nums":[0,1,296,0,43,132,25,33],"html_filename":"d_4b9be07fb6071cd2_test_impl_py.html","relative_filename":"src/debputy/plugin/api/test_api/test_impl.py"}},"d_4b9be07fb6071cd2_test_spec_py":{"hash":"6079325e8e6d1b04eea8c5b473c3be01","index":{"nums":[0,1,79,11,0,16,0,0],"html_filename":"d_4b9be07fb6071cd2_test_spec_py.html","relative_filename":"src/debputy/plugin/api/test_api/test_spec.py"}},"d_d5d6843b45eec01e___init___py":{"hash":"ca2411bb2d5283cba14da3ce60f5feec","index":{"nums":[0,1,0,0,0,0,0,0],"html_filename":"d_d5d6843b45eec01e___init___py.html","relative_filename":"src/debputy/plugin/debputy/__init__.py"}},"d_d5d6843b45eec01e_binary_package_rules_py":{"hash":"a110c66383aa27918dfef839389275ef","index":{"nums":[0,1,173,0,23,58,6,18],"html_filename":"d_d5d6843b45eec01e_binary_package_rules_py.html","relative_filename":"src/debputy/plugin/debputy/binary_package_rules.py"}},"d_d5d6843b45eec01e_debputy_plugin_py":{"hash":"631718a061ad30a3643c5ad491bb3cf0","index":{"nums":[0,1,78,0,0,4,0,0],"html_filename":"d_d5d6843b45eec01e_debputy_plugin_py.html","relative_filename":"src/debputy/plugin/debputy/debputy_plugin.py"}},"d_d5d6843b45eec01e_discard_rules_py":{"hash":"cb880206078fbf7e29d1e5dd7aa26b74","index":{"nums":[0,1,34,0,1,12,1,1],"html_filename":"d_d5d6843b45eec01e_discard_rules_py.html","relative_filename":"src/debputy/plugin/debputy/discard_rules.py"}},"d_d5d6843b45eec01e_manifest_root_rules_py":{"hash":"bd58df4c6c7d838d1bdec17166abcc70","index":{"nums":[0,1,57,2,9,16,2,6],"html_filename":"d_d5d6843b45eec01e_manifest_root_rules_py.html","relative_filename":"src/debputy/plugin/debputy/manifest_root_rules.py"}},"d_d5d6843b45eec01e_metadata_detectors_py":{"hash":"fc813e3cd4a388fcebff1f8e1e634861","index":{"nums":[0,1,228,0,6,140,7,7],"html_filename":"d_d5d6843b45eec01e_metadata_detectors_py.html","relative_filename":"src/debputy/plugin/debputy/metadata_detectors.py"}},"d_d5d6843b45eec01e_package_processors_py":{"hash":"28d75879e9543dfa0f1321c5997fd865","index":{"nums":[0,1,168,0,70,100,6,52],"html_filename":"d_d5d6843b45eec01e_package_processors_py.html","relative_filename":"src/debputy/plugin/debputy/package_processors.py"}},"d_d5d6843b45eec01e_paths_py":{"hash":"daf78889ee63f4edb75c06394b84c682","index":{"nums":[0,1,4,0,0,0,0,0],"html_filename":"d_d5d6843b45eec01e_paths_py.html","relative_filename":"src/debputy/plugin/debputy/paths.py"}},"d_d5d6843b45eec01e_private_api_py":{"hash":"f287821ddbee295bceedce1990edcc5d","index":{"nums":[0,1,541,2,72,138,42,52],"html_filename":"d_d5d6843b45eec01e_private_api_py.html","relative_filename":"src/debputy/plugin/debputy/private_api.py"}},"d_d5d6843b45eec01e_service_management_py":{"hash":"f6a3c4b8d99581707411c37ea534885a","index":{"nums":[0,1,163,2,20,95,17,26],"html_filename":"d_d5d6843b45eec01e_service_management_py.html","relative_filename":"src/debputy/plugin/debputy/service_management.py"}},"d_d5d6843b45eec01e_shlib_metadata_detectors_py":{"hash":"8f49972bbdcc4c421ae1996f9d8a2f2b","index":{"nums":[0,1,17,0,0,6,0,0],"html_filename":"d_d5d6843b45eec01e_shlib_metadata_detectors_py.html","relative_filename":"src/debputy/plugin/debputy/shlib_metadata_detectors.py"}},"d_d5d6843b45eec01e_strip_non_determinism_py":{"hash":"b4dc3706155bfeb1f67626f676f66f2d","index":{"nums":[0,1,109,1,31,48,9,19],"html_filename":"d_d5d6843b45eec01e_strip_non_determinism_py.html","relative_filename":"src/debputy/plugin/debputy/strip_non_determinism.py"}},"d_d5d6843b45eec01e_types_py":{"hash":"bcb93ed8f41b8cc4822508201ca2c268","index":{"nums":[0,1,7,0,0,2,0,0],"html_filename":"d_d5d6843b45eec01e_types_py.html","relative_filename":"src/debputy/plugin/debputy/types.py"}},"d_267b6307937f1878_substitution_py":{"hash":"c3f56ff943c21a685a461046d7645adc","index":{"nums":[0,1,153,6,20,62,8,12],"html_filename":"d_267b6307937f1878_substitution_py.html","relative_filename":"src/debputy/substitution.py"}},"d_267b6307937f1878_transformation_rules_py":{"hash":"4a2a9b0e8734961e8d3cd6713b50e823","index":{"nums":[0,1,271,2,57,119,24,48],"html_filename":"d_267b6307937f1878_transformation_rules_py.html","relative_filename":"src/debputy/transformation_rules.py"}},"d_267b6307937f1878_types_py":{"hash":"239b2dd0bd6d5a429a3352e173882fa6","index":{"nums":[0,1,3,3,0,0,0,0],"html_filename":"d_267b6307937f1878_types_py.html","relative_filename":"src/debputy/types.py"}},"d_267b6307937f1878_util_py":{"hash":"5f2f0f86e176833214219e7a6135ddba","index":{"nums":[0,1,426,9,136,192,31,81],"html_filename":"d_267b6307937f1878_util_py.html","relative_filename":"src/debputy/util.py"}},"d_267b6307937f1878_version_py":{"hash":"508c8e794269632fe70a5f00ebc48457","index":{"nums":[0,1,38,0,9,6,2,2],"html_filename":"d_267b6307937f1878_version_py.html","relative_filename":"src/debputy/version.py"}},"d_6c155ce9dd9f7742___init___py":{"hash":"3c72eb02694220a793fdc161737285dc","index":{"nums":[0,1,3,0,0,0,0,0],"html_filename":"d_6c155ce9dd9f7742___init___py.html","relative_filename":"src/debputy/yaml/__init__.py"}},"d_6c155ce9dd9f7742_compat_py":{"hash":"3996b777c64c6cef6a265b542e6bf80c","index":{"nums":[0,1,9,0,4,0,0,0],"html_filename":"d_6c155ce9dd9f7742_compat_py.html","relative_filename":"src/debputy/yaml/compat.py"}}}} \ No newline at end of file diff --git a/coverage-report/style.css b/coverage-report/style.css deleted file mode 100644 index 11b24c4..0000000 --- a/coverage-report/style.css +++ /dev/null @@ -1,309 +0,0 @@ -@charset "UTF-8"; -/* Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 */ -/* For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt */ -/* Don't edit this .css file. Edit the .scss file instead! */ -html, body, h1, h2, h3, p, table, td, th { margin: 0; padding: 0; border: 0; font-weight: inherit; font-style: inherit; font-size: 100%; font-family: inherit; vertical-align: baseline; } - -body { font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; font-size: 1em; background: #fff; color: #000; } - -@media (prefers-color-scheme: dark) { body { background: #1e1e1e; } } - -@media (prefers-color-scheme: dark) { body { color: #eee; } } - -html > body { font-size: 16px; } - -a:active, a:focus { outline: 2px dashed #007acc; } - -p { font-size: .875em; line-height: 1.4em; } - -table { border-collapse: collapse; } - -td { vertical-align: top; } - -table tr.hidden { display: none !important; } - -p#no_rows { display: none; font-size: 1.2em; } - -a.nav { text-decoration: none; color: inherit; } - -a.nav:hover { text-decoration: underline; color: inherit; } - -.hidden { display: none; } - -header { background: #f8f8f8; width: 100%; z-index: 2; border-bottom: 1px solid #ccc; } - -@media (prefers-color-scheme: dark) { header { background: black; } } - -@media (prefers-color-scheme: dark) { header { border-color: #333; } } - -header .content { padding: 1rem 3.5rem; } - -header h2 { margin-top: .5em; font-size: 1em; } - -header p.text { margin: .5em 0 -.5em; color: #666; font-style: italic; } - -@media (prefers-color-scheme: dark) { header p.text { color: #aaa; } } - -header.sticky { position: fixed; left: 0; right: 0; height: 2.5em; } - -header.sticky .text { display: none; } - -header.sticky h1, header.sticky h2 { font-size: 1em; margin-top: 0; display: inline-block; } - -header.sticky .content { padding: 0.5rem 3.5rem; } - -header.sticky .content p { font-size: 1em; } - -header.sticky ~ #source { padding-top: 6.5em; } - -main { position: relative; z-index: 1; } - -footer { margin: 1rem 3.5rem; } - -footer .content { padding: 0; color: #666; font-style: italic; } - -@media (prefers-color-scheme: dark) { footer .content { color: #aaa; } } - -#index { margin: 1rem 0 0 3.5rem; } - -h1 { font-size: 1.25em; display: inline-block; } - -#filter_container { float: right; margin: 0 2em 0 0; } - -#filter_container input { width: 10em; padding: 0.2em 0.5em; border: 2px solid #ccc; background: #fff; color: #000; } - -@media (prefers-color-scheme: dark) { #filter_container input { border-color: #444; } } - -@media (prefers-color-scheme: dark) { #filter_container input { background: #1e1e1e; } } - -@media (prefers-color-scheme: dark) { #filter_container input { color: #eee; } } - -#filter_container input:focus { border-color: #007acc; } - -header button { font-family: inherit; font-size: inherit; border: 1px solid; border-radius: .2em; color: inherit; padding: .1em .5em; margin: 1px calc(.1em + 1px); cursor: pointer; border-color: #ccc; } - -@media (prefers-color-scheme: dark) { header button { border-color: #444; } } - -header button:active, header button:focus { outline: 2px dashed #007acc; } - -header button.run { background: #eeffee; } - -@media (prefers-color-scheme: dark) { header button.run { background: #373d29; } } - -header button.run.show_run { background: #dfd; border: 2px solid #00dd00; margin: 0 .1em; } - -@media (prefers-color-scheme: dark) { header button.run.show_run { background: #373d29; } } - -header button.mis { background: #ffeeee; } - -@media (prefers-color-scheme: dark) { header button.mis { background: #4b1818; } } - -header button.mis.show_mis { background: #fdd; border: 2px solid #ff0000; margin: 0 .1em; } - -@media (prefers-color-scheme: dark) { header button.mis.show_mis { background: #4b1818; } } - -header button.exc { background: #f7f7f7; } - -@media (prefers-color-scheme: dark) { header button.exc { background: #333; } } - -header button.exc.show_exc { background: #eee; border: 2px solid #808080; margin: 0 .1em; } - -@media (prefers-color-scheme: dark) { header button.exc.show_exc { background: #333; } } - -header button.par { background: #ffffd5; } - -@media (prefers-color-scheme: dark) { header button.par { background: #650; } } - -header button.par.show_par { background: #ffa; border: 2px solid #bbbb00; margin: 0 .1em; } - -@media (prefers-color-scheme: dark) { header button.par.show_par { background: #650; } } - -#help_panel, #source p .annotate.long { display: none; position: absolute; z-index: 999; background: #ffffcc; border: 1px solid #888; border-radius: .2em; color: #333; padding: .25em .5em; } - -#source p .annotate.long { white-space: normal; float: right; top: 1.75em; right: 1em; height: auto; } - -#help_panel_wrapper { float: right; position: relative; } - -#keyboard_icon { margin: 5px; } - -#help_panel_state { display: none; } - -#help_panel { top: 25px; right: 0; padding: .75em; border: 1px solid #883; color: #333; } - -#help_panel .keyhelp p { margin-top: .75em; } - -#help_panel .legend { font-style: italic; margin-bottom: 1em; } - -.indexfile #help_panel { width: 25em; } - -.pyfile #help_panel { width: 18em; } - -#help_panel_state:checked ~ #help_panel { display: block; } - -kbd { border: 1px solid black; border-color: #888 #333 #333 #888; padding: .1em .35em; font-family: SFMono-Regular, Menlo, Monaco, Consolas, monospace; font-weight: bold; background: #eee; border-radius: 3px; } - -#source { padding: 1em 0 1em 3.5rem; font-family: SFMono-Regular, Menlo, Monaco, Consolas, monospace; } - -#source p { position: relative; white-space: pre; } - -#source p * { box-sizing: border-box; } - -#source p .n { float: left; text-align: right; width: 3.5rem; box-sizing: border-box; margin-left: -3.5rem; padding-right: 1em; color: #999; } - -@media (prefers-color-scheme: dark) { #source p .n { color: #777; } } - -#source p .n.highlight { background: #ffdd00; } - -#source p .n a { margin-top: -4em; padding-top: 4em; text-decoration: none; color: #999; } - -@media (prefers-color-scheme: dark) { #source p .n a { color: #777; } } - -#source p .n a:hover { text-decoration: underline; color: #999; } - -@media (prefers-color-scheme: dark) { #source p .n a:hover { color: #777; } } - -#source p .t { display: inline-block; width: 100%; box-sizing: border-box; margin-left: -.5em; padding-left: 0.3em; border-left: 0.2em solid #fff; } - -@media (prefers-color-scheme: dark) { #source p .t { border-color: #1e1e1e; } } - -#source p .t:hover { background: #f2f2f2; } - -@media (prefers-color-scheme: dark) { #source p .t:hover { background: #282828; } } - -#source p .t:hover ~ .r .annotate.long { display: block; } - -#source p .t .com { color: #008000; font-style: italic; line-height: 1px; } - -@media (prefers-color-scheme: dark) { #source p .t .com { color: #6a9955; } } - -#source p .t .key { font-weight: bold; line-height: 1px; } - -#source p .t .str { color: #0451a5; } - -@media (prefers-color-scheme: dark) { #source p .t .str { color: #9cdcfe; } } - -#source p.mis .t { border-left: 0.2em solid #ff0000; } - -#source p.mis.show_mis .t { background: #fdd; } - -@media (prefers-color-scheme: dark) { #source p.mis.show_mis .t { background: #4b1818; } } - -#source p.mis.show_mis .t:hover { background: #f2d2d2; } - -@media (prefers-color-scheme: dark) { #source p.mis.show_mis .t:hover { background: #532323; } } - -#source p.run .t { border-left: 0.2em solid #00dd00; } - -#source p.run.show_run .t { background: #dfd; } - -@media (prefers-color-scheme: dark) { #source p.run.show_run .t { background: #373d29; } } - -#source p.run.show_run .t:hover { background: #d2f2d2; } - -@media (prefers-color-scheme: dark) { #source p.run.show_run .t:hover { background: #404633; } } - -#source p.exc .t { border-left: 0.2em solid #808080; } - -#source p.exc.show_exc .t { background: #eee; } - -@media (prefers-color-scheme: dark) { #source p.exc.show_exc .t { background: #333; } } - -#source p.exc.show_exc .t:hover { background: #e2e2e2; } - -@media (prefers-color-scheme: dark) { #source p.exc.show_exc .t:hover { background: #3c3c3c; } } - -#source p.par .t { border-left: 0.2em solid #bbbb00; } - -#source p.par.show_par .t { background: #ffa; } - -@media (prefers-color-scheme: dark) { #source p.par.show_par .t { background: #650; } } - -#source p.par.show_par .t:hover { background: #f2f2a2; } - -@media (prefers-color-scheme: dark) { #source p.par.show_par .t:hover { background: #6d5d0c; } } - -#source p .r { position: absolute; top: 0; right: 2.5em; font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; } - -#source p .annotate { font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; color: #666; padding-right: .5em; } - -@media (prefers-color-scheme: dark) { #source p .annotate { color: #ddd; } } - -#source p .annotate.short:hover ~ .long { display: block; } - -#source p .annotate.long { width: 30em; right: 2.5em; } - -#source p input { display: none; } - -#source p input ~ .r label.ctx { cursor: pointer; border-radius: .25em; } - -#source p input ~ .r label.ctx::before { content: "▶ "; } - -#source p input ~ .r label.ctx:hover { background: #e8f4ff; color: #666; } - -@media (prefers-color-scheme: dark) { #source p input ~ .r label.ctx:hover { background: #0f3a42; } } - -@media (prefers-color-scheme: dark) { #source p input ~ .r label.ctx:hover { color: #aaa; } } - -#source p input:checked ~ .r label.ctx { background: #d0e8ff; color: #666; border-radius: .75em .75em 0 0; padding: 0 .5em; margin: -.25em 0; } - -@media (prefers-color-scheme: dark) { #source p input:checked ~ .r label.ctx { background: #056; } } - -@media (prefers-color-scheme: dark) { #source p input:checked ~ .r label.ctx { color: #aaa; } } - -#source p input:checked ~ .r label.ctx::before { content: "▼ "; } - -#source p input:checked ~ .ctxs { padding: .25em .5em; overflow-y: scroll; max-height: 10.5em; } - -#source p label.ctx { color: #999; display: inline-block; padding: 0 .5em; font-size: .8333em; } - -@media (prefers-color-scheme: dark) { #source p label.ctx { color: #777; } } - -#source p .ctxs { display: block; max-height: 0; overflow-y: hidden; transition: all .2s; padding: 0 .5em; font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; white-space: nowrap; background: #d0e8ff; border-radius: .25em; margin-right: 1.75em; text-align: right; } - -@media (prefers-color-scheme: dark) { #source p .ctxs { background: #056; } } - -#index { font-family: SFMono-Regular, Menlo, Monaco, Consolas, monospace; font-size: 0.875em; } - -#index table.index { margin-left: -.5em; } - -#index td, #index th { text-align: right; width: 5em; padding: .25em .5em; border-bottom: 1px solid #eee; } - -@media (prefers-color-scheme: dark) { #index td, #index th { border-color: #333; } } - -#index td.name, #index th.name { text-align: left; width: auto; } - -#index th { font-style: italic; color: #333; cursor: pointer; } - -@media (prefers-color-scheme: dark) { #index th { color: #ddd; } } - -#index th:hover { background: #eee; } - -@media (prefers-color-scheme: dark) { #index th:hover { background: #333; } } - -#index th[aria-sort="ascending"], #index th[aria-sort="descending"] { white-space: nowrap; background: #eee; padding-left: .5em; } - -@media (prefers-color-scheme: dark) { #index th[aria-sort="ascending"], #index th[aria-sort="descending"] { background: #333; } } - -#index th[aria-sort="ascending"]::after { font-family: sans-serif; content: " ↑"; } - -#index th[aria-sort="descending"]::after { font-family: sans-serif; content: " ↓"; } - -#index td.name a { text-decoration: none; color: inherit; } - -#index tr.total td, #index tr.total_dynamic td { font-weight: bold; border-top: 1px solid #ccc; border-bottom: none; } - -#index tr.file:hover { background: #eee; } - -@media (prefers-color-scheme: dark) { #index tr.file:hover { background: #333; } } - -#index tr.file:hover td.name { text-decoration: underline; color: inherit; } - -#scroll_marker { position: fixed; z-index: 3; right: 0; top: 0; width: 16px; height: 100%; background: #fff; border-left: 1px solid #eee; will-change: transform; } - -@media (prefers-color-scheme: dark) { #scroll_marker { background: #1e1e1e; } } - -@media (prefers-color-scheme: dark) { #scroll_marker { border-color: #333; } } - -#scroll_marker .marker { background: #ccc; position: absolute; min-height: 3px; width: 100%; } - -@media (prefers-color-scheme: dark) { #scroll_marker .marker { background: #444; } } diff --git a/debputy.pod b/debputy.pod index 6017bf5..933b6d0 100644 --- a/debputy.pod +++ b/debputy.pod @@ -274,6 +274,54 @@ Folding ranges (multi-line comments). Note these features are subject to the editor supporting them, correct language IDs being passed to B, etc. +Options for this subcommand + +=over 4 + +=item B<--ignore-language-ids> + +When provided, B will ignore any language ID that the editor provides for any file. Instead, B +will only rely on the file name for determining how to interpret the file content. + +Since B supports multiple file formats, it is needs to know what kind of file it is working with. The +editor is supposed to provide this via a "Language ID" attribute. This enables you as a user in the editor +to override the file format and have proper editor support no matter the filename. Unfortunately, most Debian +packaging files do not have a language ID assigned in the LSP specification, so editors either provide a +custom language ID or no custom language ID at all (that is, an empty string). + +When the editor does not provide a language ID for file, B will since 0.1.25 automatically attempt +to derive the language from the filename. With this option (introduced in 0.1.29), B will always +derive the language from the filename even if the editor provided a language ID. This can be helpful if your +editor is providing language IDs that B does not recognize. + +As an example, in B with B the language ID is derived from the name of the buffer's major mode. If +you tried to use B with a major mode that B does not recognize then without this +option, B would "silently" do nothing. With this option, it would have worked provided the filename +matched B's expectation no matter the major mode. + +On the downside, B will not provide correct advice unless the paths matches F<< .../debian/I >>. +This can provide issues with some setups where the debian directory is implicit such as some "packaging-only" repos +or some editor scratch pads. + +=item B<--tcp> or B<--ws> + +By default, the B language server will use B for communication with the editor. These options provide +either the TCP integration mode (B<--tcp>) or the websocket integration mode (B<--ws>). In this mode, the B<--host> +and B<--port> options can be used to choose the bind address. + +These options are mutually exclusive. + +The B<--ws> option requires B Debian package. + +=item B<--host> I, B<--port> I + +With B<--tcp> or B<--ws>, these option determines the bind address. The default is 127.0.0.1 for host and 2087 for +the port. + +In integration modes that does not need a bind address (such as the B mode), this option is ignored. + +=back + =item lsp editor-config B Provide an example configuration glue for using the B with the given editor diff --git a/pyproject.toml b/pyproject.toml index 73119d7..f0e8427 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,3 +43,6 @@ testpaths = [ "tests", "self-hosting-plugins", ] + +[tool.mypy] +mypy_path = "typing-stubs" diff --git a/src/debputy/commands/debputy_cmd/__main__.py b/src/debputy/commands/debputy_cmd/__main__.py index 27edf49..1a7a737 100644 --- a/src/debputy/commands/debputy_cmd/__main__.py +++ b/src/debputy/commands/debputy_cmd/__main__.py @@ -71,6 +71,7 @@ except ImportError: from debputy.version import __version__ from debputy.filesystem_scan import ( FSROOverlay, + FSRootDir, ) from debputy.plugin.api.impl_types import ( PackagerProvidedFileClassSpec, @@ -754,7 +755,8 @@ def _dh_integration_generate_debs(context: CommandContext) -> None: continue # Ensure all fs's are read-only before we enable cross package checks. # This ensures that no metadata detector will never see a read-write FS - cast("FSRootDir", binary_data.fs_root).is_read_write = False + pkg_fs_root: "FSRootDir" = cast("FSRootDir", binary_data.fs_root) + pkg_fs_root.is_read_write = False package_data_table.enable_cross_package_checks = True assemble_debs( @@ -799,7 +801,7 @@ _POST_FORMATTING_REWRITE = { def _fake_PPFClassSpec( debputy_plugin_metadata: DebputyPluginMetadata, stem: str, - doc_uris: Sequence[str], + doc_uris: Optional[Sequence[str]], install_pattern: Optional[str], *, default_priority: Optional[int] = None, @@ -978,7 +980,7 @@ def _resolve_debhelper_config_files( post_formatting_rewrite=post_formatting_rewrite, packageless_is_fallback_for_all_packages=packageless_is_fallback_for_all_packages, ) - dh_ppfs = list( + all_dh_ppfs = list( flatten_ppfs( detect_all_packager_provided_files( dh_ppfs, @@ -988,13 +990,13 @@ def _resolve_debhelper_config_files( ) ) ) - return dh_ppfs, issues, exit_code + return all_dh_ppfs, issues, exit_code def _merge_list( existing_table: Dict[str, Any], key: str, - new_data: Optional[List[str]], + new_data: Optional[Sequence[str]], ) -> None: if not new_data: return @@ -1368,13 +1370,11 @@ def _annotate_debian_directory(context: CommandContext) -> None: def _json_output(data: Any) -> None: - format_options = {} if sys.stdout.isatty(): - format_options = { - "indent": 4, - # sort_keys might be tempting but generally insert order makes more sense in practice. - } - json.dump(data, sys.stdout, **format_options) + # sort_keys might be tempting but generally insert order makes more sense in practice. + json.dump(data, sys.stdout, indent=4) + else: + json.dump(data, sys.stdout) if sys.stdout.isatty(): # Looks better with a final newline. print() diff --git a/src/debputy/commands/debputy_cmd/lint_and_lsp_cmds.py b/src/debputy/commands/debputy_cmd/lint_and_lsp_cmds.py index 3eecb14..2f283e8 100644 --- a/src/debputy/commands/debputy_cmd/lint_and_lsp_cmds.py +++ b/src/debputy/commands/debputy_cmd/lint_and_lsp_cmds.py @@ -21,19 +21,19 @@ _EDITOR_SNIPPETS = { ;; Inform eglot about the debputy LSP (with-eval-after-load 'eglot (add-to-list 'eglot-server-programs - '(debian-control-mode . ("debputy" "lsp" "server"))) + '(debian-control-mode . ("debputy" "lsp" "server" "--ignore-language-ids"))) (add-to-list 'eglot-server-programs - '(debian-changelog-mode . ("debputy" "lsp" "server"))) + '(debian-changelog-mode . ("debputy" "lsp" "server" "--ignore-language-ids"))) (add-to-list 'eglot-server-programs - '(debian-copyright-mode . ("debputy" "lsp" "server"))) + '(debian-copyright-mode . ("debputy" "lsp" "server" "--ignore-language-ids"))) ;; Requires elpa-dpkg-dev-el (>> 37.11) ;; (add-to-list 'eglot-server-programs - ;; '(debian-autopkgtest-control-mode . ("debputy" "lsp" "server"))) + ;; '(debian-autopkgtest-control-mode . ("debputy" "lsp" "server" "--ignore-language-ids"))) ;; The debian/rules file uses the qmake mode. (add-to-list 'eglot-server-programs - '(makefile-gmake-mode . ("debputy" "lsp" "server"))) + '(makefile-gmake-mode . ("debputy" "lsp" "server" "--ignore-language-ids"))) (add-to-list 'eglot-server-programs - '(yaml-mode . ("debputy" "lsp" "server"))) + '(yaml-mode . ("debputy" "lsp" "server" "--ignore-language-ids"))) ) ;; Auto-start eglot for the relevant modes. @@ -64,7 +64,7 @@ _EDITOR_SNIPPETS = { let g:ycm_language_server = [ \\ { 'name': 'debputy', \\ 'filetypes': [ 'debcontrol', 'debcopyright', 'debchangelog', 'make', 'yaml'], - \\ 'cmdline': [ 'debputy', 'lsp', 'server' ] + \\ 'cmdline': [ 'debputy', 'lsp', 'server', '--ignore-language-ids' ] \\ }, \\ ] @@ -92,7 +92,7 @@ _EDITOR_SNIPPETS = { lspServers->add({ filetype: ['debcontrol', 'debcopyright', 'debchangelog', 'make', 'yaml'], path: 'debputy', - args: ['lsp', 'server'] + args: ['lsp', 'server', '--ignore-language-ids'] }) endif @@ -100,6 +100,19 @@ _EDITOR_SNIPPETS = { autocmd User LspSetup g:LspAddServer(lspServers) """ ), + "neovim": "neovim+nvim-lspconfig", + "neovim+nvim-lspconfig": textwrap.dedent( + """\ + # debputy lsp server glue for neovim with nvim-lspconfig. Add to ~/.config/nvim/init.lua + # + # Requires https://github.com/neovim/nvim-lspconfig to be in your packages path + + require("lspconfig").debputy.setup {capabilities = capabilities} + + # Make vim recognize debputy.manifest as YAML file + vim.filetype.add({filename = {["debputy.manifest"] = "yaml"}) + """ + ), } @@ -136,6 +149,13 @@ lsp_command = ROOT_COMMAND.add_dispatching_subcommand( default=2087, help="Bind to this port (Use with --tcp / --ws)", ), + add_arg( + "--ignore-language-ids", + dest="trust_language_ids", + default=True, + action="store_false", + help="Disregard language IDs from the editor (rely solely on filename instead)", + ), ], ) def lsp_server_cmd(context: CommandContext) -> None: @@ -156,6 +176,10 @@ def lsp_server_cmd(context: CommandContext) -> None: debputy_language_server = DEBPUTY_LANGUAGE_SERVER debputy_language_server.plugin_feature_set = feature_set debputy_language_server.dctrl_parser = context.dctrl_parser + debputy_language_server.trust_language_ids = parsed_args.trust_language_ids + + if parsed_args.tcp and parsed_args.ws: + _error("Sorry, --tcp and --ws are mutually exclusive") if parsed_args.tcp: debputy_language_server.start_tcp(parsed_args.host, parsed_args.port) diff --git a/src/debputy/commands/debputy_cmd/output.py b/src/debputy/commands/debputy_cmd/output.py index df8e6eb..2e117ba 100644 --- a/src/debputy/commands/debputy_cmd/output.py +++ b/src/debputy/commands/debputy_cmd/output.py @@ -133,10 +133,10 @@ class OutputStylingBase: row_format = f"| {row_format_inner} |" if self.supports_colors: - c = self._color_support - assert c is not None - header_color = c.Style.bold - header_color_reset = c.Style.reset + cs = self._color_support + assert cs is not None + header_color = cs.Style.bold + header_color_reset = cs.Style.reset else: header_color = "" header_color_reset = "" @@ -218,9 +218,9 @@ class ANSIOutputStylingBase(OutputStylingBase): self._check_color(fg) self._check_color(bg) self._check_text_style(style) - if not self.supports_colors: - return text _colored = self._color_support + if not self.supports_colors or _colored is None: + return text codes = [] if style is not None: code = getattr(_colored.Style, style) diff --git a/src/debputy/commands/debputy_cmd/plugin_cmds.py b/src/debputy/commands/debputy_cmd/plugin_cmds.py index a8103fb..60d3c70 100644 --- a/src/debputy/commands/debputy_cmd/plugin_cmds.py +++ b/src/debputy/commands/debputy_cmd/plugin_cmds.py @@ -1189,7 +1189,7 @@ def _render_value(v: Any) -> str: return str(v) -def ensure_plugin_commands_are_loaded(): +def ensure_plugin_commands_are_loaded() -> None: # Loading the module does the heavy lifting # However, having this function means that we do not have an "unused" import that some tool # gets tempted to remove diff --git a/src/debputy/deb_packaging_support.py b/src/debputy/deb_packaging_support.py index b38cbc2..6de61b4 100644 --- a/src/debputy/deb_packaging_support.py +++ b/src/debputy/deb_packaging_support.py @@ -930,7 +930,7 @@ def _relevant_service_definitions( if key in by_service_manager_key and service_rule.applies_to_service_manager(key[-1]) } - relevant_names = {} + relevant_names: Dict[Tuple[str, str, str, str], ServiceDefinition[Any]] = {} seen_keys = set() if not pending_queue: @@ -954,7 +954,7 @@ def _relevant_service_definitions( ): pending_queue.add(target_key) - return relevant_names + return relevant_names.items() def handle_service_management( @@ -982,7 +982,9 @@ def handle_service_management( ) for service_manager_details in feature_set.service_managers.values(): - service_registry = ServiceRegistryImpl(service_manager_details) + service_registry: ServiceRegistryImpl = ServiceRegistryImpl( + service_manager_details + ) service_manager_details.service_detector( fs_root, service_registry, @@ -1652,6 +1654,7 @@ def _generate_control_files( dctrl_file = "debian/control" if has_dbgsym: + assert dbgsym_root_fs is not None # mypy hint _generate_dbgsym_control_file_if_relevant( binary_package, dbgsym_root_fs, diff --git a/src/debputy/debhelper_emulation.py b/src/debputy/debhelper_emulation.py index 38d9a15..65a26f8 100644 --- a/src/debputy/debhelper_emulation.py +++ b/src/debputy/debhelper_emulation.py @@ -17,6 +17,8 @@ from typing import ( List, ) +from debian.deb822 import Deb822 + from debputy.packages import BinaryPackage from debputy.plugin.api import VirtualPath from debputy.substitution import Substitution @@ -251,7 +253,7 @@ def parse_drules_for_addons(lines: Iterable[str], sequences: Set[str]) -> None: def extract_dh_addons_from_control( - source_paragraph: Mapping[str, str], + source_paragraph: Union[Mapping[str, str], Deb822], sequences: Set[str], ) -> None: for f in ("Build-Depends", "Build-Depends-Indep", "Build-Depends-Arch"): diff --git a/src/debputy/dh_migration/migrators_impl.py b/src/debputy/dh_migration/migrators_impl.py index d7aa252..2ceefd5 100644 --- a/src/debputy/dh_migration/migrators_impl.py +++ b/src/debputy/dh_migration/migrators_impl.py @@ -432,7 +432,7 @@ def migrate_bash_completion( install_as_rules.append((source, dest_basename)) if install_dest_sources: - sources = ( + sources: Union[List[str], str] = ( install_dest_sources if len(install_dest_sources) > 1 else install_dest_sources[0] @@ -1502,7 +1502,7 @@ def read_dh_addon_sequences( ctrl_file = debian_dir.get("control") if ctrl_file: dr_sequences: Set[str] = set() - bd_sequences = set() + bd_sequences: Set[str] = set() drules = debian_dir.get("rules") if drules and drules.is_file: diff --git a/src/debputy/filesystem_scan.py b/src/debputy/filesystem_scan.py index dec123c..0a18899 100644 --- a/src/debputy/filesystem_scan.py +++ b/src/debputy/filesystem_scan.py @@ -1603,9 +1603,9 @@ class FSROOverlay(VirtualPathBase): continue if dir_part == "..": p = current.parent_dir - if current is None: + if p is None: raise ValueError(f'The path "{path}" escapes the root dir') - current = p + current = cast("FSROOverlay", p) continue try: current = current[dir_part] diff --git a/src/debputy/highlevel_manifest.py b/src/debputy/highlevel_manifest.py index 30440f1..1fea1a2 100644 --- a/src/debputy/highlevel_manifest.py +++ b/src/debputy/highlevel_manifest.py @@ -1199,7 +1199,7 @@ class HighLevelManifest: dtmp_dir = None search_dirs = install_request_context.search_dirs into = frozenset(self._binary_packages.values()) - seen = set() + seen: Set[BinaryPackage] = set() for search_dir in search_dirs: seen.update(search_dir.applies_to) diff --git a/src/debputy/highlevel_manifest_parser.py b/src/debputy/highlevel_manifest_parser.py index 28a3f80..c5fb410 100644 --- a/src/debputy/highlevel_manifest_parser.py +++ b/src/debputy/highlevel_manifest_parser.py @@ -444,13 +444,10 @@ class YAMLManifestParser(HighLevelManifestParser): parser_generator = self._plugin_provided_feature_set.manifest_parser_generator dispatchable_object_parsers = parser_generator.dispatchable_object_parsers manifest_root_parser = dispatchable_object_parsers[OPARSER_MANIFEST_ROOT] - parsed_data = cast( - "ManifestRootRule", - manifest_root_parser.parse_input( - yaml_data, - attribute_path, - parser_context=self, - ), + parsed_data = manifest_root_parser.parse_input( + yaml_data, + attribute_path, + parser_context=self, ) packages_dict: Mapping[str, PackageContextData[Mapping[str, Any]]] = cast( diff --git a/src/debputy/installations.py b/src/debputy/installations.py index e1e8f3a..b781757 100644 --- a/src/debputy/installations.py +++ b/src/debputy/installations.py @@ -546,6 +546,7 @@ def _resolve_matches( dest_paths: Union[Sequence[Tuple[str, bool]], Callable[[PathMatch], str]], install_context: "InstallRuleContext", ) -> Iterator[Tuple[PathMatch, Sequence[Tuple[str, "FSPath"]]]]: + dest_and_roots: Sequence[Tuple[str, "FSPath"]] if callable(dest_paths): compute_dest_path = dest_paths for match in matches: diff --git a/src/debputy/interpreter.py b/src/debputy/interpreter.py index 0d986e1..5a933fc 100644 --- a/src/debputy/interpreter.py +++ b/src/debputy/interpreter.py @@ -147,6 +147,10 @@ class DetectedInterpreter(Interpreter): def replace_shebang_line(self, path: "VirtualPath") -> None: new_shebang_line = self.corrected_shebang_line + if new_shebang_line is None: + raise RuntimeError( + "Please do not call replace_shebang_line when fixup_needed returns False" + ) assert new_shebang_line.startswith("#!") if not new_shebang_line.endswith("\n"): new_shebang_line += "\n" diff --git a/src/debputy/linting/lint_impl.py b/src/debputy/linting/lint_impl.py index a6f493e..ec13d53 100644 --- a/src/debputy/linting/lint_impl.py +++ b/src/debputy/linting/lint_impl.py @@ -13,6 +13,7 @@ from lsprotocol.types import ( TextEdit, Position, DiagnosticSeverity, + Diagnostic, ) from debputy.commands.debputy_cmd.context import CommandContext @@ -185,9 +186,9 @@ def _auto_fix_run( lint_report: LintReport, ) -> None: another_round = True - unfixed_diagnostics = [] + unfixed_diagnostics: List[Diagnostic] = [] remaining_rounds = 10 - fixed_count = False + fixed_count = 0 too_many_rounds = False lines = text.splitlines(keepends=True) lint_state = lint_context.state_for( diff --git a/src/debputy/lsp/debputy_ls.py b/src/debputy/lsp/debputy_ls.py index f375992..cc3f00e 100644 --- a/src/debputy/lsp/debputy_ls.py +++ b/src/debputy/lsp/debputy_ls.py @@ -1,6 +1,17 @@ import dataclasses import os -from typing import Optional, List, Any, Mapping +from typing import ( + Optional, + List, + Any, + Mapping, + Container, + TYPE_CHECKING, + Tuple, + Literal, +) + +from lsprotocol.types import MarkupKind from debputy.linting.lint_util import LintState from debputy.lsp.text_util import LintCapablePositionCodec @@ -11,17 +22,23 @@ from debputy.packages import ( ) from debputy.plugin.api.feature_set import PluginProvidedFeatureSet -try: +if TYPE_CHECKING: from pygls.server import LanguageServer from pygls.workspace import TextDocument from pygls.uris import from_fs_path -except ImportError as e: - class LanguageServer: - def __init__(self, *args, **kwargs) -> None: - """Placeholder to work if pygls is not installed""" - # Should not be called - raise e # pragma: no cover +else: + try: + from pygls.server import LanguageServer + from pygls.workspace import TextDocument + from pygls.uris import from_fs_path + except ImportError as e: + + class LanguageServer: + def __init__(self, *args, **kwargs) -> None: + """Placeholder to work if pygls is not installed""" + # Should not be called + raise e # pragma: no cover @dataclasses.dataclass(slots=True) @@ -86,10 +103,13 @@ class LSProvidedLintState(LintState): dctrl_doc = self._ls.workspace.get_text_document(dctrl_cache.doc_uri) re_parse_lines: Optional[List[str]] = None if is_open: + last_doc_version = dctrl_cache.last_doc_version + dctrl_doc_version = dctrl_doc.version if ( not dctrl_cache.is_open_in_editor - or dctrl_cache.last_doc_version is None - or dctrl_cache.last_doc_version < dctrl_doc.version + or last_doc_version is None + or dctrl_doc_version is None + or last_doc_version < dctrl_doc_version ): re_parse_lines = doc.lines @@ -127,6 +147,19 @@ class LSProvidedLintState(LintState): return dctrl.binary_packages if dctrl is not None else None +def _preference( + client_preference: Optional[List[MarkupKind]], + options: Container[MarkupKind], + fallback_kind: MarkupKind, +) -> MarkupKind: + if not client_preference: + return fallback_kind + for markdown_kind in client_preference: + if markdown_kind in options: + return markdown_kind + return fallback_kind + + class DebputyLanguageServer(LanguageServer): def __init__( @@ -137,6 +170,7 @@ class DebputyLanguageServer(LanguageServer): super().__init__(*args, **kwargs) self._dctrl_parser: Optional[DctrlParser] = None self._plugin_feature_set: Optional[PluginProvidedFeatureSet] = None + self._trust_language_ids: Optional[bool] = None @property def plugin_feature_set(self) -> PluginProvidedFeatureSet: @@ -177,3 +211,82 @@ class DebputyLanguageServer(LanguageServer): dir_path = os.path.dirname(dir_path) return LSProvidedLintState(self, doc, dir_path, self.dctrl_parser) + + @property + def _client_hover_markup_formats(self) -> Optional[List[MarkupKind]]: + try: + return ( + self.client_capabilities.text_document.hover.content_format + ) # type : ignore + except AttributeError: + return None + + def hover_markup_format( + self, + *options: MarkupKind, + fallback_kind: MarkupKind = MarkupKind.PlainText, + ) -> MarkupKind: + """Pick the client preferred hover markup format from a set of options + + :param options: The markup kinds possible. + :param fallback_kind: If no overlapping option was found in the client preferences + (or client did not announce a value at all), this parameter is returned instead. + :returns: The client's preferred markup format from the provided options, or, + (if there is no overlap), the `fallback_kind` value is returned. + """ + client_preference = self._client_hover_markup_formats + return _preference(client_preference, frozenset(options), fallback_kind) + + @property + def _client_completion_item_document_markup_formats( + self, + ) -> Optional[List[MarkupKind]]: + try: + return ( + self.client_capabilities.text_document.completion.completion_item.documentation_format # type : ignore + ) + except AttributeError: + return None + + def completion_item_document_markup( + self, + *options: MarkupKind, + fallback_kind: MarkupKind = MarkupKind.PlainText, + ) -> MarkupKind: + """Pick the client preferred completion item documentation markup format from a set of options + + :param options: The markup kinds possible. + :param fallback_kind: If no overlapping option was found in the client preferences + (or client did not announce a value at all), this parameter is returned instead. + :returns: The client's preferred markup format from the provided options, or, + (if there is no overlap), the `fallback_kind` value is returned. + """ + + client_preference = self._client_completion_item_document_markup_formats + return _preference(client_preference, frozenset(options), fallback_kind) + + @property + def trust_language_ids(self) -> bool: + v = self._trust_language_ids + if v is None: + return True + return v + + @trust_language_ids.setter + def trust_language_ids(self, new_value: bool) -> None: + self._trust_language_ids = new_value + + def determine_language_id( + self, + doc: "TextDocument", + ) -> Tuple[Literal["editor-provided", "filename"], str]: + lang_id = doc.language_id + if self.trust_language_ids and lang_id and not lang_id.isspace(): + return "editor-provided", lang_id + path = doc.path + try: + last_idx = path.rindex("debian/") + except ValueError: + return "filename", os.path.basename(path) + guess_language_id = path[last_idx:] + return "filename", guess_language_id diff --git a/src/debputy/lsp/lsp_debian_changelog.py b/src/debputy/lsp/lsp_debian_changelog.py index 89604e4..ecff192 100644 --- a/src/debputy/lsp/lsp_debian_changelog.py +++ b/src/debputy/lsp/lsp_debian_changelog.py @@ -262,7 +262,7 @@ def _scan_debian_changelog_for_diagnostics( *, max_line_length: int = _MAXIMUM_WIDTH, ) -> Iterator[List[Diagnostic]]: - diagnostics = [] + diagnostics: List[Diagnostic] = [] diagnostics_at_last_update = 0 lines_since_last_update = 0 lines = lint_state.lines diff --git a/src/debputy/lsp/lsp_debian_control.py b/src/debputy/lsp/lsp_debian_control.py index 8c246d8..b44e8f9 100644 --- a/src/debputy/lsp/lsp_debian_control.py +++ b/src/debputy/lsp/lsp_debian_control.py @@ -1,5 +1,7 @@ +import dataclasses import re import textwrap +from functools import lru_cache from typing import ( Union, Sequence, @@ -9,15 +11,16 @@ from typing import ( Iterable, Mapping, List, + FrozenSet, + Dict, ) +from debputy.lsp.debputy_ls import DebputyLanguageServer from lsprotocol.types import ( DiagnosticSeverity, Range, Diagnostic, Position, - DidOpenTextDocumentParams, - DidChangeTextDocumentParams, FoldingRange, FoldingRangeParams, CompletionItem, @@ -39,6 +42,7 @@ from debputy.lsp.lsp_debian_control_reference_data import ( BINARY_FIELDS, SOURCE_FIELDS, DctrlFileMetadata, + package_name_to_section, ) from debputy.lsp.lsp_features import ( lint_diagnostics, @@ -53,11 +57,13 @@ from debputy.lsp.lsp_generic_deb822 import ( deb822_hover, deb822_folding_ranges, deb822_semantic_tokens_full, + deb822_token_iter, ) from debputy.lsp.quickfixes import ( propose_remove_line_quick_fix, range_compatible_with_remove_line_fix, propose_correct_text_quick_fix, + propose_insert_text_on_line_after_diagnostic_quick_fix, ) from debputy.lsp.spellchecking import default_spellchecker from debputy.lsp.text_util import ( @@ -100,123 +106,182 @@ _LANGUAGE_IDS = [ # vim's name "debcontrol", ] -_SUBSTVAR_RE = re.compile(r"[$][{][a-zA-Z0-9][a-zA-Z0-9-:]*[}]") -_SUBSTVARS_DOC = { - "${}": textwrap.dedent( - """\ - This is a substvar for a literal `$`. This form will never recurse - into another substvar. As an example, `${}{binary:Version}` will result - literal `${binary:Version}` (which will not be replaced). - - Defined by: `dpkg-gencontrol` - DH Sequence: - Source: - """ - ), - "${binary:Version}": textwrap.dedent( - """\ - The version of the current binary package including binNMU version. - Often used with `Depends: dep (= ${binary:Version})` relations - where: - * The `dep` package is from the same source (listed in the same - `debian/control` file) - * The current package and `dep` are both `arch:any` (or both `arch:all`) - packages. +@dataclasses.dataclass(slots=True, frozen=True) +class SubstvarMetadata: + name: str + defined_by: str + dh_sequence: Optional[str] + source: Optional[str] + description: str - Defined by: `dpkg-gencontrol` - DH Sequence: - Source: - """ - ), - "${source:Version}": textwrap.dedent( - """\ - The version of the current source package excluding binNMU version. + def render_metadata_fields(self) -> str: + def_by = f"Defined by: {self.defined_by}" + dh_seq = ( + f"DH Sequence: {self.dh_sequence}" if self.dh_sequence is not None else None + ) + source = f"Source: {self.source}" if self.source is not None else None + return "\n".join(filter(None, (def_by, dh_seq, source))) + + +def relationship_substvar_for_field(substvar: str) -> Optional[str]: + relationship_fields = _relationship_fields() + try: + col_idx = substvar.rindex(":") + except ValueError: + return None + return relationship_fields.get(substvar[col_idx + 1 : -1].lower()) - Often used with `Depends: dep (= ${source:Version})` relations - where: - * The `dep` package is from the same source (listed in the same - `debian/control` file) - * The `dep` is `arch:all`. +def _substvars_metadata(*args: SubstvarMetadata) -> Mapping[str, SubstvarMetadata]: + r = {s.name: s for s in args} + assert len(r) == len(args) + return r - Defined by: `dpkg-gencontrol` - DH Sequence: - Source: + +_SUBSTVAR_RE = re.compile(r"[$][{][a-zA-Z0-9][a-zA-Z0-9-:]*[}]") +_SUBSTVARS_DOC = _substvars_metadata( + SubstvarMetadata( + "${}", + "`dpkg-gencontrol`", + "(default)", + "", + textwrap.dedent( + """\ + This is a substvar for a literal `$`. This form will never recurse + into another substvar. As an example, `${}{binary:Version}` will result + literal `${binary:Version}` (which will not be replaced). + """ + ), + ), + SubstvarMetadata( + "${binary:Version}", + "`dpkg-gencontrol`", + "(default)", + "", + textwrap.dedent( + """\ + The version of the current binary package including binNMU version. + + Often used with `Depends: dep (= ${binary:Version})` relations + where: + + * The `dep` package is from the same source (listed in the same + `debian/control` file) + * The current package and `dep` are both `arch:any` (or both `arch:all`) + packages. """ + ), ), - "${misc:Depends}": textwrap.dedent( - """\ - Some debhelper commands may make the generated package need to depend on some other packages. - For example, if you use `dh_installdebconf(1)`, your package will generally need to depend on - debconf. Or if you use `dh_installxfonts(1)`, your package will generally need to depend on a - particular version of xutils. Keeping track of these miscellaneous dependencies can be - annoying since they are dependent on how debhelper does things, so debhelper offers a way to - automate it. - - All commands of this type, besides documenting what dependencies may be needed on their man - pages, will automatically generate a substvar called ${misc:Depends}. If you put that token - into your `debian/control` file, it will be expanded to the dependencies debhelper figures - you need. - - This is entirely independent of the standard `${shlibs:Depends}` generated by `dh_makeshlibs(1)`, - and the `${perl:Depends}` generated by `dh_perl(1)`. - - Defined by: `debhelper` - DH Sequence: - Source: + SubstvarMetadata( + "${source:Version}", + "`dpkg-gencontrol`", + "(default)", + "", + textwrap.dedent( + """\ + The version of the current source package excluding binNMU version. + + Often used with `Depends: dep (= ${source:Version})` relations + where: + + * The `dep` package is from the same source (listed in the same + `debian/control` file) + * The `dep` is `arch:all`. """ + ), ), - "${misc:Pre-Depends}": textwrap.dedent( - """\ - This is the moral equivalent to `${misc:Depends}` but for `Pre-Depends`. - - Defined by: `debhelper` - DH Sequence: + SubstvarMetadata( + "${misc:Depends}", + "`debhelper`", + "(default)", + "", + textwrap.dedent( + """\ + Some debhelper commands may make the generated package need to depend on some other packages. + For example, if you use `dh_installdebconf(1)`, your package will generally need to depend on + debconf. Or if you use `dh_installxfonts(1)`, your package will generally need to depend on a + particular version of xutils. Keeping track of these miscellaneous dependencies can be + annoying since they are dependent on how debhelper does things, so debhelper offers a way to + automate it. + + All commands of this type, besides documenting what dependencies may be needed on their man + pages, will automatically generate a substvar called ${misc:Depends}. If you put that token + into your `debian/control` file, it will be expanded to the dependencies debhelper figures + you need. + + This is entirely independent of the standard `${shlibs:Depends}` generated by `dh_makeshlibs(1)`, + and the `${perl:Depends}` generated by `dh_perl(1)`. """ + ), ), - "${perl:Depends}": textwrap.dedent( - """\ - The dependency on perl as determined by `dh_perl`. Note this only covers the relationship - with the Perl interpreter and not perl modules. - - Defined by: `dh_perl` - DH Sequence: - Source: + SubstvarMetadata( + "${misc:Pre-Depends}", + "`debhelper`", + "(default)", + None, + textwrap.dedent( + """\ + This is the moral equivalent to `${misc:Depends}` but for `Pre-Depends`. """ + ), ), - "${gir:Depends}": textwrap.dedent( - """\ - Dependencies related to GObject introspection data. + SubstvarMetadata( + "${perl:Depends}", + "`dh_perl`", + "(default)", + "", + textwrap.dedent( + """\ + The dependency on perl as determined by `dh_perl`. Note this only covers the relationship + with the Perl interpreter and not perl modules. - Defined by: `dh_girepository` - DH Sequence: `gir` - Source: """ + ), ), - "${shlibs:Depends}": textwrap.dedent( - """\ - Dependencies related to ELF dependencies. - - Defined by: `dpkg-shlibdeps` (often via `dh_shlibdeps`) - DH Sequence: - Source: + SubstvarMetadata( + "${gir:Depends}", + "`dh_girepository`", + "gir", + "", + textwrap.dedent( + """\ + Dependencies related to GObject introspection data. """ + ), ), - "${shlibs:Pre-Depends}": textwrap.dedent( - """\ - Dependencies related to ELF dependencies. The `Pre-Depends` - version is often only seen in `Essential: yes` packages - or packages that manually request the `Pre-Depends` - relation via `dpkg-shlibdeps`. - - Defined by: `dpkg-shlibdeps` (often via `dh_shlibdeps`) - DH Sequence: - Source: + SubstvarMetadata( + "${shlibs:Depends}", + "`dpkg-shlibdeps` (often via `dh_shlibdeps`)", + "(default)", + "", + textwrap.dedent( + """\ + Dependencies related to ELF dependencies. """ + ), ), -} + SubstvarMetadata( + "${shlibs:Pre-Depends}", + "`dpkg-shlibdeps` (often via `dh_shlibdeps`)", + "(default)", + "", + textwrap.dedent( + """\ + Dependencies related to ELF dependencies. The `Pre-Depends` + version is often only seen in `Essential: yes` packages + or packages that manually request the `Pre-Depends` + relation via `dpkg-shlibdeps`. + + Note: This substvar only appears in `debhelper-compat (= 14)`, or + with use of `debputy` (at an integration level, where `debputy` + runs `dpkg-shlibdeps`), or when passing relevant options to + `dpkg-shlibdeps` (often via `dh_shlibdeps`) such as `-dPre-Depends`. + """ + ), + ), +) _DCTRL_FILE_METADATA = DctrlFileMetadata() @@ -225,9 +290,30 @@ lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_CODE_ACTION) lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL) +@lru_cache +def _relationship_fields() -> Mapping[str, str]: + # TODO: Pull from `dpkg-dev` when possible fallback only to the static list. + return { + f.lower(): f + for f in ( + "Pre-Depends", + "Depends", + "Recommends", + "Suggests", + "Enhances", + "Conflicts", + "Breaks", + "Replaces", + "Provides", + "Built-Using", + "Static-Built-Using", + ) + } + + @lsp_hover(_LANGUAGE_IDS) def _debian_control_hover( - ls: "LanguageServer", + ls: "DebputyLanguageServer", params: HoverParams, ) -> Optional[Hover]: return deb822_hover(ls, params, _DCTRL_FILE_METADATA, custom_handler=_custom_hover) @@ -248,26 +334,40 @@ def _custom_hover( line_no = server_position.line line = lines[line_no] substvar_search_ref = server_position.character - if line[substvar_search_ref] in ("$", "{"): - substvar_search_ref += 2 substvar = "" try: + if line and line[substvar_search_ref] in ("$", "{"): + substvar_search_ref += 2 substvar_start = line.rindex("${", 0, substvar_search_ref) substvar_end = line.index("}", substvar_start) if server_position.character <= substvar_end: - _info( - f"Range {substvar_start} <= {server_position.character} <= {substvar_end}" - ) substvar = line[substvar_start : substvar_end + 1] - except ValueError: + except (ValueError, IndexError): pass if substvar == "${}" or _SUBSTVAR_RE.fullmatch(substvar): - doc = _SUBSTVARS_DOC.get(substvar) + substvar_md = _SUBSTVARS_DOC.get(substvar) + + computed_doc = "" + for_field = relationship_substvar_for_field(substvar) + if for_field: + # Leading empty line is intentional! + computed_doc = textwrap.dedent( + f""" + This substvar is a relationship substvar for the field {for_field}. + Relationship substvars are automatically added in the field they + are named after in `debhelper-compat (= 14)` or later, or with + `debputy` (any integration mode after 0.1.21). + """ + ) - if doc is None: - doc = "No documentation for {substvar}." - return f"# Substvar `{substvar}`\n\n{doc}" + if substvar_md is None: + doc = f"No documentation for {substvar}.\n" + md_fields = "" + else: + doc = substvar_md.description + md_fields = "\n" + substvar_md.render_metadata_fields() + return f"# Substvar `{substvar}`\n\n{doc}{computed_doc}{md_fields}" if known_field is None or known_field.name != "Description": return None @@ -318,7 +418,7 @@ def _custom_hover( @lsp_completer(_LANGUAGE_IDS) def _debian_control_completions( - ls: "LanguageServer", + ls: "DebputyLanguageServer", params: CompletionParams, ) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]: return deb822_completer(ls, params, _DCTRL_FILE_METADATA) @@ -326,37 +426,12 @@ def _debian_control_completions( @lsp_folding_ranges(_LANGUAGE_IDS) def _debian_control_folding_ranges( - ls: "LanguageServer", + ls: "DebputyLanguageServer", params: FoldingRangeParams, ) -> Optional[Sequence[FoldingRange]]: return deb822_folding_ranges(ls, params, _DCTRL_FILE_METADATA) -def _deb822_token_iter( - tokens: Iterable[Deb822Token], -) -> Iterator[Tuple[Deb822Token, int, int, int, int, int]]: - line_no = 0 - line_offset = 0 - - for token in tokens: - start_line = line_no - start_line_offset = line_offset - - newlines = token.text.count("\n") - line_no += newlines - text_len = len(token.text) - if newlines: - if token.text.endswith("\n"): - line_offset = 0 - else: - # -2, one to remove the "\n" and one to get 0-offset - line_offset = text_len - token.text.rindex("\n") - 2 - else: - line_offset += text_len - - yield token, start_line, start_line_offset, line_no, line_offset - - def _paragraph_representation_field( paragraph: Deb822ParagraphElement, ) -> Deb822KeyValuePairElement: @@ -441,23 +516,32 @@ def _binary_package_checks( source="debputy", ) ) - if effective_section != "debian-installer": - quickfix_data = None - if section is not None: - quickfix_data = [ - propose_correct_text_quick_fix( - f"{component_prefix}debian-installer" - ) - ] - diagnostics.append( - Diagnostic( - section_range, - f'The Section should be "{component_prefix}debian-installer" for udebs', - severity=DiagnosticSeverity.Warning, - source="debputy", - data=quickfix_data, + guessed_section = "debian-installer" + section_diagnostic_rationale = " since it is an udeb" + else: + guessed_section = package_name_to_section(package_name) + section_diagnostic_rationale = " based on the package name" + if guessed_section is not None and guessed_section != effective_section: + if section is not None: + quickfix_data = [ + propose_correct_text_quick_fix(f"{component_prefix}{guessed_section}") + ] + else: + quickfix_data = [ + propose_insert_text_on_line_after_diagnostic_quick_fix( + f"Section: {component_prefix}{guessed_section}\n" ) + ] + assert section_range is not None # mypy hint + diagnostics.append( + Diagnostic( + section_range, + f'The Section should be "{component_prefix}{guessed_section}"{section_diagnostic_rationale}', + severity=DiagnosticSeverity.Warning, + source="debputy", + data=quickfix_data, ) + ) def _diagnostics_for_paragraph( @@ -513,7 +597,7 @@ def _diagnostics_for_paragraph( diagnostics, ) - seen_fields = {} + seen_fields: Dict[str, Tuple[str, str, Range, List[Range]]] = {} for kvpair in stanza.iter_parts_of_type(Deb822KeyValuePairElement): field_name_token = kvpair.field_token @@ -621,12 +705,12 @@ def _diagnostics_for_paragraph( ) if pos: word_pos_te = TEPosition(0, pos).relative_to(word_pos_te) - word_range = TERange( + word_range_te = TERange( START_POSITION, TEPosition(0, endpos - pos), ) word_range_server_units = te_range_to_lsp( - TERange.from_position_and_size(word_pos_te, word_range) + TERange.from_position_and_size(word_pos_te, word_range_te) ) word_range = position_codec.range_to_client_units( lines, @@ -718,7 +802,7 @@ def _scan_for_syntax_errors_and_token_level_diagnostics( start_offset, end_line, end_offset, - ) in _deb822_token_iter(deb822_file.iter_tokens()): + ) in deb822_token_iter(deb822_file.iter_tokens()): if token.is_error: first_error = min(first_error, start_line) start_pos = Position( @@ -741,17 +825,17 @@ def _scan_for_syntax_errors_and_token_level_diagnostics( ) ) elif token.is_comment: - for word, pos, end_pos in spell_checker.iter_words(token.text): + for word, col_pos, end_col_pos in spell_checker.iter_words(token.text): corrections = spell_checker.provide_corrections_for(word) if not corrections: continue start_pos = Position( start_line, - pos, + col_pos, ) end_pos = Position( start_line, - end_pos, + end_col_pos, ) word_range = position_codec.range_to_client_units( lines, Range(start_pos, end_pos) @@ -820,8 +904,8 @@ def _lint_debian_control( @lsp_semantic_tokens_full(_LANGUAGE_IDS) -def _semantic_tokens_full( - ls: "LanguageServer", +def _debian_control_semantic_tokens_full( + ls: "DebputyLanguageServer", request: SemanticTokensParams, ) -> Optional[SemanticTokens]: return deb822_semantic_tokens_full( diff --git a/src/debputy/lsp/lsp_debian_control_reference_data.py b/src/debputy/lsp/lsp_debian_control_reference_data.py index e65ab86..898faab 100644 --- a/src/debputy/lsp/lsp_debian_control_reference_data.py +++ b/src/debputy/lsp/lsp_debian_control_reference_data.py @@ -22,8 +22,6 @@ from typing import ( ) from debian.debian_support import DpkgArchTable -from lsprotocol.types import DiagnosticSeverity, Diagnostic, DiagnosticTag, Range - from debputy.lsp.quickfixes import ( propose_correct_text_quick_fix, propose_remove_line_quick_fix, @@ -56,6 +54,7 @@ from debputy.lsp.vendoring._deb822_repro.tokens import ( Deb822SpaceSeparatorToken, ) from debputy.util import PKGNAME_REGEX +from lsprotocol.types import DiagnosticSeverity, Diagnostic, DiagnosticTag, Range try: from debputy.lsp.vendoring._deb822_repro.locatable import ( @@ -330,7 +329,7 @@ def all_architectures_and_wildcards(arch2table) -> Iterable[Union[str, Keyword]] @functools.lru_cache -def dpkg_arch_and_wildcards() -> FrozenSet[str]: +def dpkg_arch_and_wildcards() -> FrozenSet[Union[str, Keyword]]: dpkg_arch_table = DpkgArchTable.load_arch_table() return frozenset(all_architectures_and_wildcards(dpkg_arch_table._arch2table)) @@ -505,6 +504,180 @@ def _combined_custom_field_check(*checks: CustomFieldCheck) -> CustomFieldCheck: return _validator +@dataclasses.dataclass(slots=True, frozen=True) +class PackageNameSectionRule: + section: str + check: Callable[[str], bool] + + +def _package_name_section_rule( + section: str, + check: Union[Callable[[str], bool], re.Pattern], + *, + confirm_re: Optional[re.Pattern] = None, +) -> PackageNameSectionRule: + if confirm_re is not None: + assert callable(check) + + def _impl(v: str) -> bool: + return check(v) and confirm_re.search(v) + + elif isinstance(check, re.Pattern): + + def _impl(v: str) -> bool: + return check.search(v) is not None + + else: + _impl = check + + return PackageNameSectionRule(section, _impl) + + +# rules: order is important (first match wins in case of a conflict) +_PKGNAME_VS_SECTION_RULES = [ + _package_name_section_rule("debian-installer", lambda n: n.endswith("-udeb")), + _package_name_section_rule("doc", lambda n: n.endswith(("-doc", "-docs"))), + _package_name_section_rule("debug", lambda n: n.endswith(("-dbg", "-dbgsym"))), + _package_name_section_rule( + "httpd", + lambda n: n.startswith(("lighttpd-mod", "libapache2-mod-", "libnginx-mod-")), + ), + _package_name_section_rule("gnustep", lambda n: n.startswith("gnustep-")), + _package_name_section_rule( + "gnustep", + lambda n: n.endswith( + ( + ".framework", + ".framework-common", + ".tool", + ".tool-common", + ".app", + ".app-common", + ) + ), + ), + _package_name_section_rule("embedded", lambda n: n.startswith("moblin-")), + _package_name_section_rule("javascript", lambda n: n.startswith("node-")), + _package_name_section_rule("zope", lambda n: n.startswith(("python-zope", "zope"))), + _package_name_section_rule( + "python", + lambda n: n.startswith(("python-", "python3-")), + ), + _package_name_section_rule( + "gnu-r", + lambda n: n.startswith(("r-cran-", "r-bioc-", "r-other-")), + ), + _package_name_section_rule("editors", lambda n: n.startswith("elpa-")), + _package_name_section_rule("lisp", lambda n: n.startswith("cl-")), + _package_name_section_rule( + "lisp", + lambda n: "-elisp-" in n or n.endswith("-elisp"), + ), + _package_name_section_rule( + "lisp", + lambda n: n.startswith("lib") and n.endswith("-guile"), + ), + _package_name_section_rule("lisp", lambda n: n.startswith("guile-")), + _package_name_section_rule("golang", lambda n: n.startswith("golang-")), + _package_name_section_rule( + "perl", + lambda n: n.startswith("lib") and n.endswith("-perl"), + ), + _package_name_section_rule( + "cli-mono", + lambda n: n.startswith("lib") and n.endswith(("-cil", "-cil-dev")), + ), + _package_name_section_rule( + "java", + lambda n: n.startswith("lib") and n.endswith(("-java", "-gcj", "-jni")), + ), + _package_name_section_rule( + "php", + lambda n: n.startswith(("libphp", "php")), + confirm_re=re.compile(r"^(?:lib)?php(?:\d(?:\.\d)?)?-"), + ), + _package_name_section_rule( + "php", lambda n: n.startswith("lib-") and n.endswith("-php") + ), + _package_name_section_rule( + "haskell", + lambda n: n.startswith(("haskell-", "libhugs-", "libghc-", "libghc6-")), + ), + _package_name_section_rule( + "ruby", + lambda n: "-ruby" in n, + confirm_re=re.compile(r"^lib.*-ruby(?:1\.\d)?$"), + ), + _package_name_section_rule("ruby", lambda n: n.startswith("ruby-")), + _package_name_section_rule( + "rust", + lambda n: n.startswith("librust-") and n.endswith("-dev"), + ), + _package_name_section_rule("rust", lambda n: n.startswith("rust-")), + _package_name_section_rule( + "ocaml", + lambda n: n.startswith("lib-") and n.endswith(("-ocaml-dev", "-camlp4-dev")), + ), + _package_name_section_rule("javascript", lambda n: n.startswith("libjs-")), + _package_name_section_rule( + "interpreters", + lambda n: n.startswith("lib-") and n.endswith(("-tcl", "-lua", "-gst")), + ), + _package_name_section_rule( + "introspection", + lambda n: n.startswith("gir-"), + confirm_re=re.compile(r"^gir\d+\.\d+-.*-\d+\.\d+$"), + ), + _package_name_section_rule( + "fonts", + lambda n: n.startswith(("xfonts-", "fonts-", "ttf-")), + ), + _package_name_section_rule("admin", lambda n: n.startswith(("libnss-", "libpam-"))), + _package_name_section_rule( + "localization", + lambda n: n.startswith( + ( + "aspell-", + "hunspell-", + "myspell-", + "mythes-", + "dict-freedict-", + "gcompris-sound-", + ) + ), + ), + _package_name_section_rule( + "localization", + lambda n: n.startswith("hypen-"), + confirm_re=re.compile(r"^hyphen-[a-z]{2}(?:-[a-z]{2})?$"), + ), + _package_name_section_rule( + "localization", + lambda n: "-l10n-" in n or n.endswith("-l10n"), + ), + _package_name_section_rule("kernel", lambda n: n.endswith(("-dkms", "-firmware"))), + _package_name_section_rule( + "libdevel", + lambda n: n.startswith("lib") and n.endswith(("-dev", "-headers")), + ), + _package_name_section_rule( + "libs", + lambda n: n.startswith("lib"), + confirm_re=re.compile(r"^lib.*\d[ad]?$"), + ), +] + + +# Fiddling with the package name can cause a lot of changes (diagnostic scans), so we have an upper bound +# on the cache. The number is currently just taken out of a hat. +@functools.lru_cache(64) +def package_name_to_section(name: str) -> Optional[str]: + for rule in _PKGNAME_VS_SECTION_RULES: + if rule.check(name): + return rule.section + return None + + class FieldValueClass(Enum): SINGLE_VALUE = auto(), LIST_SPACE_SEPARATED_INTERPRETATION SPACE_SEPARATED_LIST = auto(), LIST_SPACE_SEPARATED_INTERPRETATION @@ -576,6 +749,8 @@ class Deb822KnownField: unknown_value_diagnostic_severity: Optional[DiagnosticSeverity] = ( DiagnosticSeverity.Error ) + # One-line description for space-constrained docs (such as completion docs) + synopsis_doc: Optional[str] = None hover_text: Optional[str] = None spellcheck_value: bool = False is_stanza_name: bool = False @@ -812,6 +987,7 @@ SOURCE_FIELDS = _fields( custom_field_check=_each_value_match_regex_validation(PKGNAME_REGEX), missing_field_severity=DiagnosticSeverity.Error, is_stanza_name=True, + synopsis_doc="Name of source package", hover_text=textwrap.dedent( """\ Declares the name of the source package. @@ -824,6 +1000,7 @@ SOURCE_FIELDS = _fields( "Standards-Version", FieldValueClass.SINGLE_VALUE, missing_field_severity=DiagnosticSeverity.Error, + synopsis_doc="Debian Policy version this package complies with", hover_text=textwrap.dedent( """\ Declares the last semantic version of the Debian Policy this package as last checked against. @@ -843,6 +1020,7 @@ SOURCE_FIELDS = _fields( FieldValueClass.SINGLE_VALUE, known_values=ALL_SECTIONS, unknown_value_diagnostic_severity=DiagnosticSeverity.Warning, + synopsis_doc="Default section", hover_text=textwrap.dedent( """\ Define the default section for packages in this source package. @@ -862,6 +1040,7 @@ SOURCE_FIELDS = _fields( default_value="optional", warn_if_default=False, known_values=ALL_PRIORITIES, + synopsis_doc="Default priority", hover_text=textwrap.dedent( """\ Define the default priority for packages in this source package. @@ -881,6 +1060,7 @@ SOURCE_FIELDS = _fields( "Maintainer", FieldValueClass.SINGLE_VALUE, missing_field_severity=DiagnosticSeverity.Error, + synopsis_doc="Name and email of maintainer / maintenance team", hover_text=textwrap.dedent( """\ The maintainer of the package. @@ -897,6 +1077,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Uploaders", FieldValueClass.COMMA_SEPARATED_EMAIL_LIST, + synopsis_doc="Names and emails of co-maintainers", hover_text=textwrap.dedent( """\ Comma separated list of uploaders associated with the package. @@ -922,6 +1103,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Vcs-Browser", FieldValueClass.SINGLE_VALUE, + synopsis_doc="URL for browsers to interact with packaging VCS", hover_text=textwrap.dedent( """\ URL to the Version control system repo used for the packaging. The URL should be usable with a @@ -934,6 +1116,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Vcs-Git", FieldValueClass.SPACE_SEPARATED_LIST, + synopsis_doc="URL and options for cloning the packaging VCS", hover_text=textwrap.dedent( """\ URL to the git repo used for the packaging. The URL should be usable with `git clone` @@ -952,6 +1135,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Vcs-Svn", FieldValueClass.SPACE_SEPARATED_LIST, # TODO: Might be a single value + synopsis_doc="URL for checking out the packaging VCS", hover_text=textwrap.dedent( """\ URL to the git repo used for the packaging. The URL should be usable with `svn checkout` @@ -965,6 +1149,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Vcs-Arch", FieldValueClass.SPACE_SEPARATED_LIST, # TODO: Might be a single value + synopsis_doc="URL for checking out the packaging VCS", hover_text=textwrap.dedent( """\ URL to the git repo used for the packaging. The URL should be usable for getting a copy of the @@ -977,6 +1162,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Vcs-Cvs", FieldValueClass.SPACE_SEPARATED_LIST, # TODO: Might be a single value + synopsis_doc="URL for checking out the packaging VCS", hover_text=textwrap.dedent( """\ URL to the git repo used for the packaging. The URL should be usable for getting a copy of the @@ -989,6 +1175,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Vcs-Darcs", FieldValueClass.SPACE_SEPARATED_LIST, # TODO: Might be a single value + synopsis_doc="URL for checking out the packaging VCS", hover_text=textwrap.dedent( """\ URL to the git repo used for the packaging. The URL should be usable for getting a copy of the @@ -1001,6 +1188,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Vcs-Hg", FieldValueClass.SPACE_SEPARATED_LIST, # TODO: Might be a single value + synopsis_doc="URL for checking out the packaging VCS", hover_text=textwrap.dedent( """\ URL to the git repo used for the packaging. The URL should be usable for getting a copy of the @@ -1013,6 +1201,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Vcs-Mtn", FieldValueClass.SPACE_SEPARATED_LIST, # TODO: Might be a single value + synopsis_doc="URL for checking out the packaging VCS", hover_text=textwrap.dedent( """\ URL to the git repo used for the packaging. The URL should be usable for getting a copy of the @@ -1028,6 +1217,7 @@ SOURCE_FIELDS = _fields( deprecated_with_no_replacement=True, default_value="no", known_values=_allowed_values("yes", "no"), + synopsis_doc="**Obsolete**: Old ACL mechanism for Debian Managers", hover_text=textwrap.dedent( """\ Obsolete field @@ -1044,6 +1234,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Build-Depends", FieldValueClass.COMMA_SEPARATED_LIST, + synopsis_doc="Dependencies requires for clean and full build actions", hover_text=textwrap.dedent( """\ All minimum build-dependencies for this source package. Needed for any target including **clean**. @@ -1053,6 +1244,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Build-Depends-Arch", FieldValueClass.COMMA_SEPARATED_LIST, + synopsis_doc="Dependencies requires for arch:any action (build-arch/binary-arch)", hover_text=textwrap.dedent( """\ Build-dependencies required for building the architecture dependent binary packages of this source @@ -1068,6 +1260,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Build-Depends-Indep", FieldValueClass.COMMA_SEPARATED_LIST, + synopsis_doc="Dependencies requires for arch:all action (build-indep/binary-indep)", hover_text=textwrap.dedent( """\ Build-dependencies required for building the architecture independent binary packages of this source @@ -1083,6 +1276,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Build-Conflicts", FieldValueClass.COMMA_SEPARATED_LIST, + synopsis_doc="Package versions that will break the build or the clean target (use sparingly)", hover_text=textwrap.dedent( """\ Packages that must **not** be installed during **any** part of the build, including the **clean** @@ -1097,6 +1291,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Build-Conflicts-Arch", FieldValueClass.COMMA_SEPARATED_LIST, + synopsis_doc="Package versions that will break an arch:any build (use sparingly)", hover_text=textwrap.dedent( """\ Packages that must **not** be installed during the **build-arch** or **binary-arch** targets. @@ -1111,6 +1306,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Build-Conflicts-Indep", FieldValueClass.COMMA_SEPARATED_LIST, + synopsis_doc="Package versions that will break an arch:all build (use sparingly)", hover_text=textwrap.dedent( """\ Packages that must **not** be installed during the **build-indep** or **binary-indep** targets. @@ -1125,6 +1321,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Testsuite", FieldValueClass.SPACE_SEPARATED_LIST, + synopsis_doc="Announce **autodep8** tests", hover_text=textwrap.dedent( """\ Declares that this package provides or should run install time tests via `autopkgtest`. @@ -1142,6 +1339,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Homepage", FieldValueClass.SINGLE_VALUE, + synopsis_doc="Upstream homepage", hover_text=textwrap.dedent( """\ Link to the upstream homepage for this source package. @@ -1196,6 +1394,7 @@ SOURCE_FIELDS = _fields( ), ), ), + synopsis_doc="Declare (fake)root requirements for the package", hover_text=textwrap.dedent( """\ Declare if and when the package build assumes it is run as root or fakeroot. @@ -1225,6 +1424,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Bugs", FieldValueClass.SINGLE_VALUE, + synopsis_doc="Custom bugtracker URL (for third-party packages)", hover_text=textwrap.dedent( """\ Provide a custom bug tracker URL @@ -1238,6 +1438,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "Origin", FieldValueClass.SINGLE_VALUE, + synopsis_doc="Custom origin (for third-party packages)", hover_text=textwrap.dedent( """\ Declare the origin of the package. @@ -1252,6 +1453,7 @@ SOURCE_FIELDS = _fields( "X-Python-Version", FieldValueClass.COMMA_SEPARATED_LIST, replaced_by="X-Python3-Version", + synopsis_doc="**Obsolete**: Supported Python2 versions (`dh-python` specific)", hover_text=textwrap.dedent( """\ Obsolete field for declaring the supported Python2 versions @@ -1264,6 +1466,7 @@ SOURCE_FIELDS = _fields( DctrlKnownField( "X-Python3-Version", FieldValueClass.COMMA_SEPARATED_LIST, + synopsis_doc="Supported Python3 versions (`dh-python` specific)", hover_text=textwrap.dedent( # Too lazy to provide a better description """\ @@ -1278,6 +1481,7 @@ SOURCE_FIELDS = _fields( "XS-Autobuild", FieldValueClass.SINGLE_VALUE, known_values=_allowed_values("yes"), + synopsis_doc="Whether this non-free is auto-buildable on buildds", hover_text=textwrap.dedent( """\ Used for non-free packages to denote that they may be auto-build on the Debian build infrastructure @@ -1291,6 +1495,7 @@ SOURCE_FIELDS = _fields( "Description", FieldValueClass.FREE_TEXT_FIELD, spellcheck_value=True, + synopsis_doc="Common base description for all packages via substvar", hover_text=textwrap.dedent( """\ This field contains a human-readable description of the package. However, it is not used directly. @@ -1343,6 +1548,7 @@ BINARY_FIELDS = _fields( custom_field_check=_each_value_match_regex_validation(PKGNAME_REGEX), is_stanza_name=True, missing_field_severity=DiagnosticSeverity.Error, + synopsis_doc="Declares the name of a binary package", hover_text="Declares the name of a binary package", ), DctrlKnownField( @@ -1356,6 +1562,7 @@ BINARY_FIELDS = _fields( hover_text="The package will be built as a micro-deb (also known as a udeb). These are solely used by the debian-installer.", ), ), + synopsis_doc="Non-standard package type (such as `udeb`)", hover_text=textwrap.dedent( """\ **Special-purpose only**. *This field is a special purpose field and is rarely needed.* @@ -1373,6 +1580,7 @@ BINARY_FIELDS = _fields( missing_field_severity=DiagnosticSeverity.Error, unknown_value_diagnostic_severity=None, known_values=_allowed_values(*dpkg_arch_and_wildcards()), + synopsis_doc="Architecture of the package", hover_text=textwrap.dedent( """\ Determines which architectures this package can be compiled for or if it is an architecture-independent @@ -1424,6 +1632,7 @@ BINARY_FIELDS = _fields( ), ), ), + synopsis_doc="Whether the package is essential (Policy term)", hover_text=textwrap.dedent( """\ **Special-purpose only**. *This field is a special purpose field and is rarely needed.* @@ -1451,6 +1660,7 @@ BINARY_FIELDS = _fields( FieldValueClass.SINGLE_VALUE, replaced_by="Protected", default_value="no", + synopsis_doc="**Deprecated**: Use Protected instead", known_values=_allowed_values( Keyword( "yes", @@ -1469,6 +1679,13 @@ BINARY_FIELDS = _fields( ), ), ), + hover_text=textwrap.dedent( + """\ + This is the prototype field that lead to `Protected`, which should be used instead. + + It makes `apt` (but not `dpkg`) require extra confirmation before removing the package. + """ + ), ), DctrlKnownField( "Protected", @@ -1492,10 +1709,24 @@ BINARY_FIELDS = _fields( ), ), ), + synopsis_doc="Mark as protected (uninstall protection)", + hover_text=textwrap.dedent( + """\ + Declare this package as a potential system critical package. When set to `yes`, both `apt` + and `dpkg` will assume that removing the package *may* break the system. As a consequence, + they will require extra confirmation (or "force" options) before removing the package. + + This field basically provides a "uninstall" protection similar to that of `Essential` packages + without the other benefits and requirements that comes with `Essential` packages. This option + is generally applicable to packages like bootloaders, kernels, and other packages that might + be necessary for booting the system. + """ + ), ), DctrlKnownField( "Pre-Depends", FieldValueClass.COMMA_SEPARATED_LIST, + synopsis_doc="Very strong dependencies; prefer Depends when applicable", hover_text=textwrap.dedent( """\ **Advanced field**. *This field covers an advanced topic. If you are new to packaging, you are* @@ -1522,6 +1753,7 @@ BINARY_FIELDS = _fields( DctrlKnownField( "Depends", FieldValueClass.COMMA_SEPARATED_LIST, + synopsis_doc="Dependencies required to install and use this package", hover_text=textwrap.dedent( """\ Lists the packages that must be installed, before this package is installed. @@ -1550,6 +1782,7 @@ BINARY_FIELDS = _fields( DctrlKnownField( "Recommends", FieldValueClass.COMMA_SEPARATED_LIST, + synopsis_doc="Optional dependencies **most** people should have", hover_text=textwrap.dedent( """\ Lists the packages that *should* be installed when this package is installed in all but @@ -1573,6 +1806,7 @@ BINARY_FIELDS = _fields( DctrlKnownField( "Suggests", FieldValueClass.COMMA_SEPARATED_LIST, + synopsis_doc="Optional dependencies that some people might want", hover_text=textwrap.dedent( """\ Lists the packages that may make this package more useful but not installing them is perfectly @@ -1589,6 +1823,7 @@ BINARY_FIELDS = _fields( DctrlKnownField( "Enhances", FieldValueClass.COMMA_SEPARATED_LIST, + synopsis_doc="Packages enhanced by installing this package", hover_text=textwrap.dedent( """\ This field is similar to Suggests but works in the opposite direction. It is used to declare that @@ -1606,6 +1841,7 @@ BINARY_FIELDS = _fields( DctrlKnownField( "Provides", FieldValueClass.COMMA_SEPARATED_LIST, + synopsis_doc="Additional packages/versions this package dependency-wise satisfy", hover_text=textwrap.dedent( """\ Declare this package also provide one or more other packages. This means that this package can @@ -1648,6 +1884,7 @@ BINARY_FIELDS = _fields( DctrlKnownField( "Conflicts", FieldValueClass.COMMA_SEPARATED_LIST, + synopsis_doc="Packages that this package is not co-installable with", hover_text=textwrap.dedent( """\ **Warning**: *You may be looking for Breaks instead of Conflicts*. @@ -1675,6 +1912,7 @@ BINARY_FIELDS = _fields( DctrlKnownField( "Breaks", FieldValueClass.COMMA_SEPARATED_LIST, + synopsis_doc="Package/versions that does not work with this package", hover_text=textwrap.dedent( """\ This package cannot be installed together with the packages listed in the `Breaks` field. @@ -1719,6 +1957,7 @@ BINARY_FIELDS = _fields( DctrlKnownField( "Replaces", FieldValueClass.COMMA_SEPARATED_LIST, + synopsis_doc="This package replaces content from these packages/versions", hover_text=textwrap.dedent( """\ This package either replaces another package or overwrites files that used to be provided by @@ -1745,6 +1984,7 @@ BINARY_FIELDS = _fields( DctrlKnownField( "Build-Profiles", FieldValueClass.BUILD_PROFILES_LIST, + synopsis_doc="Conditionally build this package", hover_text=textwrap.dedent( """\ **Advanced field**. *This field covers an advanced topic. If you are new to packaging, you are* @@ -1780,6 +2020,7 @@ BINARY_FIELDS = _fields( inherits_from_source=True, known_values=ALL_SECTIONS, unknown_value_diagnostic_severity=DiagnosticSeverity.Warning, + synopsis_doc="Which section this package should be in", hover_text=textwrap.dedent( """\ Define the section for this package. @@ -1801,6 +2042,7 @@ BINARY_FIELDS = _fields( missing_field_severity=DiagnosticSeverity.Error, inherits_from_source=True, known_values=ALL_PRIORITIES, + synopsis_doc="The package's priority (Policy term)", hover_text=textwrap.dedent( """\ Define the priority this package. @@ -1892,6 +2134,7 @@ BINARY_FIELDS = _fields( ), ), ), + synopsis_doc="**Advanced field**: How this package interacts with multi arch", hover_text=textwrap.dedent( """\ **Advanced field**. *This field covers an advanced topic. If you are new to packaging, you are* @@ -1921,7 +2164,8 @@ BINARY_FIELDS = _fields( * If you have an architecture dependent package, where everything is installed in `/usr/lib/${DEB_HOST_MULTIARCH}` (plus a bit of standard documentation in `/usr/share/doc`), then - you *probably* want `Multi-Arch: same` + you *probably* want `Multi-Arch: same`. Note that `debputy` automatically detects the most common + variants of this case and sets the field for you. * If none of the above applies, then omit the field unless you know what you are doing or you are receiving advice from a Multi-Arch expert. @@ -2001,6 +2245,7 @@ BINARY_FIELDS = _fields( _udeb_only_field_validation, _each_value_match_regex_validation(re.compile(r"^[1-9]\d{3,4}$")), ), + synopsis_doc="(udeb-only) Package's order in the d-i menu", hover_text=textwrap.dedent( """\ This field is only relevant for `udeb` packages (debian-installer). @@ -2034,6 +2279,7 @@ BINARY_FIELDS = _fields( hover_text="The package should be compiled for `DEB_TARGET_ARCH`.", ), ), + synopsis_doc="(Special purpose) For cross-compiling cross-compilers", hover_text=textwrap.dedent( """\ **Special-purpose only**. *This field is a special purpose field and is rarely needed.* @@ -2064,6 +2310,7 @@ BINARY_FIELDS = _fields( "X-Time64-Compat", FieldValueClass.SINGLE_VALUE, custom_field_check=_each_value_match_regex_validation(PKGNAME_REGEX), + synopsis_doc="(Special purpose) Compat name for time64_t transition", hover_text=textwrap.dedent( """\ Special purpose field related to the 64-bit time transition. @@ -2077,6 +2324,7 @@ BINARY_FIELDS = _fields( DctrlKnownField( "Homepage", FieldValueClass.SINGLE_VALUE, + synopsis_doc="(Special purpose) Upstream homepage URL for this binary package", hover_text=textwrap.dedent( """\ Link to the upstream homepage for this binary package. @@ -2095,6 +2343,7 @@ BINARY_FIELDS = _fields( spellcheck_value=True, # It will build just fine. But no one will know what it is for, so it probably won't be installed missing_field_severity=DiagnosticSeverity.Warning, + synopsis_doc="Package synopsis and description", hover_text=textwrap.dedent( """\ A human-readable description of the package. This field consists of two related but distinct parts. @@ -2140,6 +2389,7 @@ BINARY_FIELDS = _fields( "XB-Cnf-Visible-Pkgname", FieldValueClass.SINGLE_VALUE, custom_field_check=_each_value_match_regex_validation(PKGNAME_REGEX), + synopsis_doc="(Special purpose) Hint for `command-not-found`", hover_text=textwrap.dedent( """\ **Special-case field**: *This field is only useful in very special circumstances.* @@ -2168,6 +2418,7 @@ BINARY_FIELDS = _fields( DctrlKnownField( "X-DhRuby-Root", FieldValueClass.SINGLE_VALUE, + synopsis_doc="For multi-binary layout with `dh_ruby`", hover_text=textwrap.dedent( """\ Used by `dh_ruby` to request "multi-binary" layout and where the root for the given @@ -2624,6 +2875,7 @@ _DTESTSCTRL_FIELDS = _fields( FieldValueClass.SPACE_SEPARATED_LIST, unknown_value_diagnostic_severity=None, known_values=_allowed_values(*dpkg_arch_and_wildcards()), + synopsis_doc="Only run these tests on specific architectures", hover_text=textwrap.dedent( """\ When package tests are only supported on a limited set of @@ -2641,6 +2893,7 @@ _DTESTSCTRL_FIELDS = _fields( Deb822KnownField( "Classes", FieldValueClass.FREE_TEXT_FIELD, + synopsis_doc="Hardware related tagging", hover_text=textwrap.dedent( """\ Most package tests should work in a minimal environment and are @@ -2663,6 +2916,7 @@ _DTESTSCTRL_FIELDS = _fields( "Depends", FieldValueClass.COMMA_SEPARATED_LIST, default_value="@", + synopsis_doc="Dependencies for running the tests", hover_text="""\ Declares that the specified packages must be installed for the test to go ahead. This supports all features of dpkg dependencies, including @@ -3019,6 +3273,7 @@ _DTESTSCTRL_FIELDS = _fields( ), ), ), + synopsis_doc="Test restrictions and requirements", hover_text=textwrap.dedent( """\ Declares some restrictions or problems with the tests defined in @@ -3035,6 +3290,7 @@ _DTESTSCTRL_FIELDS = _fields( Deb822KnownField( "Tests", FieldValueClass.COMMA_OR_SPACE_SEPARATED_LIST, + synopsis_doc="List of test scripts to run", hover_text=textwrap.dedent( """\ This field names the tests which are defined by this stanza, and map @@ -3051,6 +3307,7 @@ _DTESTSCTRL_FIELDS = _fields( Deb822KnownField( "Test-Command", FieldValueClass.FREE_TEXT_FIELD, + synopsis_doc="Single test command", hover_text=textwrap.dedent( """\ If your test only contains a shell command or two, or you want to @@ -3069,6 +3326,8 @@ _DTESTSCTRL_FIELDS = _fields( Deb822KnownField( "Test-Directory", FieldValueClass.FREE_TEXT_FIELD, # TODO: Single path + default_value="debian/tests", + synopsis_doc="The directory containing the tests listed in from `Tests`", hover_text=textwrap.dedent( """\ Replaces the path segment `debian/tests` in the filenames of the @@ -3190,7 +3449,9 @@ _DTESTSCTRL_STANZA = DTestsCtrlStanzaMetadata("Tests", _DTESTSCTRL_FIELDS) class Dep5FileMetadata(Deb822FileMetadata[Dep5StanzaMetadata]): - def classify_stanza(self, stanza: Deb822ParagraphElement, stanza_idx: int) -> S: + def classify_stanza( + self, stanza: Deb822ParagraphElement, stanza_idx: int + ) -> Dep5StanzaMetadata: if stanza_idx == 0: return _DEP5_HEADER_STANZA if stanza_idx > 0: @@ -3199,19 +3460,19 @@ class Dep5FileMetadata(Deb822FileMetadata[Dep5StanzaMetadata]): return _DEP5_LICENSE_STANZA raise ValueError("The stanza_idx must be 0 or greater") - def guess_stanza_classification_by_idx(self, stanza_idx: int) -> S: + def guess_stanza_classification_by_idx(self, stanza_idx: int) -> Dep5StanzaMetadata: if stanza_idx == 0: return _DEP5_HEADER_STANZA if stanza_idx > 0: return _DEP5_FILES_STANZA raise ValueError("The stanza_idx must be 0 or greater") - def stanza_types(self) -> Iterable[S]: + def stanza_types(self) -> Iterable[Dep5StanzaMetadata]: yield _DEP5_HEADER_STANZA yield _DEP5_FILES_STANZA yield _DEP5_LICENSE_STANZA - def __getitem__(self, item: str) -> S: + def __getitem__(self, item: str) -> Dep5StanzaMetadata: if item == "Header": return _DEP5_FILES_STANZA if item == "Files": @@ -3222,18 +3483,20 @@ class Dep5FileMetadata(Deb822FileMetadata[Dep5StanzaMetadata]): class DctrlFileMetadata(Deb822FileMetadata[DctrlStanzaMetadata]): - def guess_stanza_classification_by_idx(self, stanza_idx: int) -> S: + def guess_stanza_classification_by_idx( + self, stanza_idx: int + ) -> DctrlStanzaMetadata: if stanza_idx == 0: return _DCTRL_SOURCE_STANZA if stanza_idx > 0: return _DCTRL_PACKAGE_STANZA raise ValueError("The stanza_idx must be 0 or greater") - def stanza_types(self) -> Iterable[S]: + def stanza_types(self) -> Iterable[DctrlStanzaMetadata]: yield _DCTRL_SOURCE_STANZA yield _DCTRL_PACKAGE_STANZA - def __getitem__(self, item: str) -> S: + def __getitem__(self, item: str) -> DctrlStanzaMetadata: if item == "Source": return _DCTRL_SOURCE_STANZA if item == "Package": diff --git a/src/debputy/lsp/lsp_debian_copyright.py b/src/debputy/lsp/lsp_debian_copyright.py index b21cc79..b037792 100644 --- a/src/debputy/lsp/lsp_debian_copyright.py +++ b/src/debputy/lsp/lsp_debian_copyright.py @@ -8,8 +8,10 @@ from typing import ( Iterable, Mapping, List, + Dict, ) +from debputy.lsp.debputy_ls import DebputyLanguageServer from lsprotocol.types import ( DiagnosticSeverity, Range, @@ -51,6 +53,7 @@ from debputy.lsp.lsp_generic_deb822 import ( deb822_hover, deb822_folding_ranges, deb822_semantic_tokens_full, + deb822_token_iter, ) from debputy.lsp.quickfixes import ( propose_correct_text_quick_fix, @@ -105,7 +108,7 @@ lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL) @lsp_hover(_LANGUAGE_IDS) def _debian_copyright_hover( - ls: "LanguageServer", + ls: "DebputyLanguageServer", params: HoverParams, ) -> Optional[Hover]: return deb822_hover(ls, params, _DEP5_FILE_METADATA) @@ -113,7 +116,7 @@ def _debian_copyright_hover( @lsp_completer(_LANGUAGE_IDS) def _debian_copyright_completions( - ls: "LanguageServer", + ls: "DebputyLanguageServer", params: CompletionParams, ) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]: return deb822_completer(ls, params, _DEP5_FILE_METADATA) @@ -121,37 +124,12 @@ def _debian_copyright_completions( @lsp_folding_ranges(_LANGUAGE_IDS) def _debian_copyright_folding_ranges( - ls: "LanguageServer", + ls: "DebputyLanguageServer", params: FoldingRangeParams, ) -> Optional[Sequence[FoldingRange]]: return deb822_folding_ranges(ls, params, _DEP5_FILE_METADATA) -def _deb822_token_iter( - tokens: Iterable[Deb822Token], -) -> Iterator[Tuple[Deb822Token, int, int, int, int, int]]: - line_no = 0 - line_offset = 0 - - for token in tokens: - start_line = line_no - start_line_offset = line_offset - - newlines = token.text.count("\n") - line_no += newlines - text_len = len(token.text) - if newlines: - if token.text.endswith("\n"): - line_offset = 0 - else: - # -2, one to remove the "\n" and one to get 0-offset - line_offset = text_len - token.text.rindex("\n") - 2 - else: - line_offset += text_len - - yield token, start_line, start_line_offset, line_no, line_offset - - def _paragraph_representation_field( paragraph: Deb822ParagraphElement, ) -> Deb822KeyValuePairElement: @@ -196,7 +174,7 @@ def _diagnostics_for_paragraph( ) ) - seen_fields = {} + seen_fields: Dict[str, Tuple[str, str, Range, List[Range]]] = {} for kvpair in stanza.iter_parts_of_type(Deb822KeyValuePairElement): field_name_token = kvpair.field_token @@ -306,12 +284,12 @@ def _diagnostics_for_paragraph( ) if pos: word_pos_te = TEPosition(0, pos).relative_to(word_pos_te) - word_range = TERange( + word_range_te = TERange( START_POSITION, TEPosition(0, endpos - pos), ) word_range_server_units = te_range_to_lsp( - TERange.from_position_and_size(word_pos_te, word_range) + TERange.from_position_and_size(word_pos_te, word_range_te) ) word_range = position_codec.range_to_client_units( lines, @@ -387,7 +365,7 @@ def _scan_for_syntax_errors_and_token_level_diagnostics( start_offset, end_line, end_offset, - ) in _deb822_token_iter(deb822_file.iter_tokens()): + ) in deb822_token_iter(deb822_file.iter_tokens()): if token.is_error: first_error = min(first_error, start_line) start_pos = Position( @@ -444,7 +422,7 @@ def _lint_debian_copyright( lines = lint_state.lines position_codec = lint_state.position_codec doc_reference = lint_state.doc_uri - diagnostics = [] + diagnostics: List[Diagnostic] = [] deb822_file = parse_deb822_file( lines, accept_files_with_duplicated_fields=True, @@ -494,8 +472,8 @@ def _lint_debian_copyright( @lsp_semantic_tokens_full(_LANGUAGE_IDS) -def _semantic_tokens_full( - ls: "LanguageServer", +def _debian_copyright_semantic_tokens_full( + ls: "DebputyLanguageServer", request: SemanticTokensParams, ) -> Optional[SemanticTokens]: return deb822_semantic_tokens_full( diff --git a/src/debputy/lsp/lsp_debian_debputy_manifest.py b/src/debputy/lsp/lsp_debian_debputy_manifest.py index 03581be..74b5d7b 100644 --- a/src/debputy/lsp/lsp_debian_debputy_manifest.py +++ b/src/debputy/lsp/lsp_debian_debputy_manifest.py @@ -134,7 +134,7 @@ def _lint_debian_debputy_manifest( path = lint_state.path if not is_valid_file(path): return None - diagnostics = [] + diagnostics: List[Diagnostic] = [] try: content = MANIFEST_YAML.load("".join(lines)) except MarkedYAMLError as e: @@ -922,7 +922,7 @@ def debputy_manifest_hover( ) if km is None: _info("No keyword match") - return + return None parser, plugin_metadata, at_depth_idx = km _info(f"Match leaf parser {at_depth_idx}/{len(segments)} -- {parser.__class__}") hover_doc_text = resolve_hover_text( @@ -1020,19 +1020,14 @@ def resolve_hover_text( return hover_doc_text -def _hover_doc(ls: "LanguageServer", hover_doc_text: Optional[str]) -> Optional[Hover]: +def _hover_doc( + ls: "DebputyLanguageServer", hover_doc_text: Optional[str] +) -> Optional[Hover]: if hover_doc_text is None: return None - try: - supported_formats = ls.client_capabilities.text_document.hover.content_format - except AttributeError: - supported_formats = [] - markup_kind = MarkupKind.Markdown - if markup_kind not in supported_formats: - markup_kind = MarkupKind.PlainText return Hover( contents=MarkupContent( - kind=markup_kind, + kind=ls.hover_markup_format(MarkupKind.Markdown, MarkupKind.PlainText), value=hover_doc_text, ), ) diff --git a/src/debputy/lsp/lsp_debian_rules.py b/src/debputy/lsp/lsp_debian_rules.py index b44fad4..7f5aef9 100644 --- a/src/debputy/lsp/lsp_debian_rules.py +++ b/src/debputy/lsp/lsp_debian_rules.py @@ -12,6 +12,7 @@ from typing import ( List, Iterator, Tuple, + Set, ) from lsprotocol.types import ( @@ -238,7 +239,7 @@ def _lint_debian_rules_impl( source_root = os.path.dirname(os.path.dirname(path)) if source_root == "": source_root = "." - diagnostics = [] + diagnostics: List[Diagnostic] = [] make_error = _run_make_dryrun(source_root, lines) if make_error is not None: @@ -316,7 +317,7 @@ def _lint_debian_rules_impl( def _all_dh_commands(source_root: str, lines: List[str]) -> Optional[Sequence[str]]: - drules_sequences = set() + drules_sequences: Set[str] = set() parse_drules_for_addons(lines, drules_sequences) cmd = ["dh_assistant", "list-commands", "--output-format=json"] if drules_sequences: @@ -369,6 +370,8 @@ def _debian_rules_completions( source_root = os.path.dirname(os.path.dirname(doc.path)) all_commands = _all_dh_commands(source_root, lines) + if all_commands is None: + return None items = [CompletionItem(ht) for c in all_commands for ht in _as_hook_targets(c)] return items diff --git a/src/debputy/lsp/lsp_debian_tests_control.py b/src/debputy/lsp/lsp_debian_tests_control.py index 27221f6..cc27579 100644 --- a/src/debputy/lsp/lsp_debian_tests_control.py +++ b/src/debputy/lsp/lsp_debian_tests_control.py @@ -8,8 +8,11 @@ from typing import ( Iterable, Mapping, List, + Set, + Dict, ) +from debputy.lsp.debputy_ls import DebputyLanguageServer from lsprotocol.types import ( DiagnosticSeverity, Range, @@ -49,6 +52,7 @@ from debputy.lsp.lsp_generic_deb822 import ( deb822_hover, deb822_folding_ranges, deb822_semantic_tokens_full, + deb822_token_iter, ) from debputy.lsp.quickfixes import ( propose_correct_text_quick_fix, @@ -103,7 +107,7 @@ lsp_standard_handler(_LANGUAGE_IDS, TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL) @lsp_hover(_LANGUAGE_IDS) def debian_tests_control_hover( - ls: "LanguageServer", + ls: "DebputyLanguageServer", params: HoverParams, ) -> Optional[Hover]: return deb822_hover(ls, params, _DEP5_FILE_METADATA) @@ -111,7 +115,7 @@ def debian_tests_control_hover( @lsp_completer(_LANGUAGE_IDS) def debian_tests_control_completions( - ls: "LanguageServer", + ls: "DebputyLanguageServer", params: CompletionParams, ) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]: return deb822_completer(ls, params, _DEP5_FILE_METADATA) @@ -119,37 +123,12 @@ def debian_tests_control_completions( @lsp_folding_ranges(_LANGUAGE_IDS) def debian_tests_control_folding_ranges( - ls: "LanguageServer", + ls: "DebputyLanguageServer", params: FoldingRangeParams, ) -> Optional[Sequence[FoldingRange]]: return deb822_folding_ranges(ls, params, _DEP5_FILE_METADATA) -def _deb822_token_iter( - tokens: Iterable[Deb822Token], -) -> Iterator[Tuple[Deb822Token, int, int, int, int, int]]: - line_no = 0 - line_offset = 0 - - for token in tokens: - start_line = line_no - start_line_offset = line_offset - - newlines = token.text.count("\n") - line_no += newlines - text_len = len(token.text) - if newlines: - if token.text.endswith("\n"): - line_offset = 0 - else: - # -2, one to remove the "\n" and one to get 0-offset - line_offset = text_len - token.text.rindex("\n") - 2 - else: - line_offset += text_len - - yield token, start_line, start_line_offset, line_no, line_offset - - def _paragraph_representation_field( paragraph: Deb822ParagraphElement, ) -> Deb822KeyValuePairElement: @@ -211,7 +190,7 @@ def _diagnostics_for_paragraph( ) ) - seen_fields = {} + seen_fields: Dict[str, Tuple[str, str, Range, List[Range]]] = {} for kvpair in stanza.iter_parts_of_type(Deb822KeyValuePairElement): field_name_token = kvpair.field_token @@ -384,7 +363,7 @@ def _scan_for_syntax_errors_and_token_level_diagnostics( start_offset, end_line, end_offset, - ) in _deb822_token_iter(deb822_file.iter_tokens()): + ) in deb822_token_iter(deb822_file.iter_tokens()): if token.is_error: first_error = min(first_error, start_line) start_pos = Position( @@ -441,7 +420,7 @@ def _lint_debian_tests_control( lines = lint_state.lines position_codec = lint_state.position_codec doc_reference = lint_state.doc_uri - diagnostics = [] + diagnostics: List[Diagnostic] = [] deb822_file = parse_deb822_file( lines, accept_files_with_duplicated_fields=True, @@ -475,8 +454,8 @@ def _lint_debian_tests_control( @lsp_semantic_tokens_full(_LANGUAGE_IDS) -def _semantic_tokens_full( - ls: "LanguageServer", +def _debian_tests_control_semantic_tokens_full( + ls: "DebputyLanguageServer", request: SemanticTokensParams, ) -> Optional[SemanticTokens]: return deb822_semantic_tokens_full( diff --git a/src/debputy/lsp/lsp_dispatch.py b/src/debputy/lsp/lsp_dispatch.py index b63f30c..5d09a44 100644 --- a/src/debputy/lsp/lsp_dispatch.py +++ b/src/debputy/lsp/lsp_dispatch.py @@ -10,6 +10,7 @@ from typing import ( Mapping, List, Tuple, + Literal, ) from lsprotocol.types import ( @@ -75,21 +76,22 @@ def is_doc_at_version(uri: str, version: int) -> bool: return dv == version -def determine_language_id(doc: "TextDocument") -> Tuple[str, str]: - lang_id = doc.language_id - if lang_id and not lang_id.isspace(): - return "declared", lang_id - path = doc.path - try: - last_idx = path.rindex("debian/") - except ValueError: - return "filename", os.path.basename(path) - guess_language_id = path[last_idx:] - return "filename", guess_language_id +@DEBPUTY_LANGUAGE_SERVER.feature(TEXT_DOCUMENT_DID_OPEN) +async def _open_document( + ls: "DebputyLanguageServer", + params: DidChangeTextDocumentParams, +) -> None: + await _open_or_changed_document(ls, params) -@DEBPUTY_LANGUAGE_SERVER.feature(TEXT_DOCUMENT_DID_OPEN) @DEBPUTY_LANGUAGE_SERVER.feature(TEXT_DOCUMENT_DID_CHANGE) +async def _changed_document( + ls: "DebputyLanguageServer", + params: DidChangeTextDocumentParams, +) -> None: + await _open_or_changed_document(ls, params) + + async def _open_or_changed_document( ls: "DebputyLanguageServer", params: Union[DidOpenTextDocumentParams, DidChangeTextDocumentParams], @@ -99,7 +101,7 @@ async def _open_or_changed_document( doc = ls.workspace.get_text_document(doc_uri) _DOCUMENT_VERSION_TABLE[doc_uri] = version - id_source, language_id = determine_language_id(doc) + id_source, language_id = ls.determine_language_id(doc) handler = DIAGNOSTIC_HANDLERS.get(language_id) if handler is None: _info( @@ -214,7 +216,7 @@ def _dispatch_standard_handler( ) -> R: doc = ls.workspace.get_text_document(doc_uri) - id_source, language_id = determine_language_id(doc) + id_source, language_id = ls.determine_language_id(doc) handler = handler_table.get(language_id) if handler is None: _info( diff --git a/src/debputy/lsp/lsp_features.py b/src/debputy/lsp/lsp_features.py index 7a1110d..e7b4445 100644 --- a/src/debputy/lsp/lsp_features.py +++ b/src/debputy/lsp/lsp_features.py @@ -1,7 +1,16 @@ import collections import inspect import sys -from typing import Callable, TypeVar, Sequence, Union, Dict, List, Optional +from typing import ( + Callable, + TypeVar, + Sequence, + Union, + Dict, + List, + Optional, + AsyncIterator, +) from lsprotocol.types import ( TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL, @@ -29,14 +38,23 @@ from debputy.lsp.text_util import on_save_trim_end_of_line_whitespace C = TypeVar("C", bound=Callable) SEMANTIC_TOKENS_LEGEND = SemanticTokensLegend( - token_types=["keyword", "enumMember"], + token_types=["keyword", "enumMember", "comment"], token_modifiers=[], ) SEMANTIC_TOKEN_TYPES_IDS = { t: idx for idx, t in enumerate(SEMANTIC_TOKENS_LEGEND.token_types) } -DIAGNOSTIC_HANDLERS = {} +DIAGNOSTIC_HANDLERS: Dict[ + str, + Callable[ + [ + "DebputyLanguageServer", + Union["DidOpenTextDocumentParams", "DidChangeTextDocumentParams"], + ], + AsyncIterator[Optional[List[Diagnostic]]], + ], +] = {} COMPLETER_HANDLERS = {} HOVER_HANDLERS = {} CODE_ACTION_HANDLERS = {} diff --git a/src/debputy/lsp/lsp_generic_deb822.py b/src/debputy/lsp/lsp_generic_deb822.py index ec7b979..e2124e4 100644 --- a/src/debputy/lsp/lsp_generic_deb822.py +++ b/src/debputy/lsp/lsp_generic_deb822.py @@ -1,3 +1,4 @@ +import dataclasses import re from typing import ( Optional, @@ -13,6 +14,7 @@ from typing import ( Callable, ) +from debputy.lsp.debputy_ls import DebputyLanguageServer from debputy.lsp.lsp_debian_control_reference_data import ( Deb822FileMetadata, Deb822KnownField, @@ -22,11 +24,13 @@ from debputy.lsp.lsp_debian_control_reference_data import ( S, ) from debputy.lsp.lsp_features import SEMANTIC_TOKEN_TYPES_IDS -from debputy.lsp.text_util import normalize_dctrl_field_name +from debputy.lsp.text_util import normalize_dctrl_field_name, te_position_to_lsp from debputy.lsp.vendoring._deb822_repro import parse_deb822_file from debputy.lsp.vendoring._deb822_repro.parsing import ( Deb822KeyValuePairElement, LIST_SPACE_SEPARATED_INTERPRETATION, + Deb822ParagraphElement, + Deb822ValueLineElement, ) from debputy.lsp.vendoring._deb822_repro.tokens import tokenize_deb822_file, Deb822Token from debputy.util import _info @@ -64,7 +68,7 @@ def _at_cursor( ) -> Tuple[Position, Optional[str], str, bool, int, Set[str]]: paragraph_no = -1 paragraph_started = False - seen_fields = set() + seen_fields: Set[str] = set() last_field_seen: Optional[str] = None current_field: Optional[str] = None server_position = doc.position_codec.position_from_client_units( @@ -116,7 +120,7 @@ def _at_cursor( def deb822_completer( - ls: "LanguageServer", + ls: "DebputyLanguageServer", params: CompletionParams, file_metadata: Deb822FileMetadata[Any], ) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]: @@ -142,6 +146,7 @@ def deb822_completer( else: _info("Completing field name") items = _complete_field_name( + ls, stanza_metadata, seen_fields, ) @@ -152,7 +157,7 @@ def deb822_completer( def deb822_hover( - ls: "LanguageServer", + ls: "DebputyLanguageServer", params: HoverParams, file_metadata: Deb822FileMetadata[S], *, @@ -170,7 +175,7 @@ def deb822_hover( Optional[Hover], ] ] = None, -) -> Optional[Union[Hover, str]]: +) -> Optional[Hover]: doc = ls.workspace.get_text_document(params.text_document.uri) lines = doc.lines server_pos, current_field, word_at_position, in_value, paragraph_no, _ = _at_cursor( @@ -220,27 +225,17 @@ def deb822_hover( if hover_text is None: return None - - try: - supported_formats = ls.client_capabilities.text_document.hover.content_format - except AttributeError: - supported_formats = [] - - _info(f"Supported formats {supported_formats}") - markup_kind = MarkupKind.Markdown - if markup_kind not in supported_formats: - markup_kind = MarkupKind.PlainText return Hover( contents=MarkupContent( - kind=markup_kind, + kind=ls.hover_markup_format(MarkupKind.Markdown, MarkupKind.PlainText), value=hover_text, ) ) -def _deb822_token_iter( +def deb822_token_iter( tokens: Iterable[Deb822Token], -) -> Iterator[Tuple[Deb822Token, int, int, int, int, int]]: +) -> Iterator[Tuple[Deb822Token, int, int, int, int]]: line_no = 0 line_offset = 0 @@ -264,7 +259,7 @@ def _deb822_token_iter( def deb822_folding_ranges( - ls: "LanguageServer", + ls: "DebputyLanguageServer", params: FoldingRangeParams, # Unused for now: might be relevant for supporting folding for some fields _file_metadata: Deb822FileMetadata[Any], @@ -278,7 +273,7 @@ def deb822_folding_ranges( start_offset, end_line, end_offset, - ) in _deb822_token_iter(tokenize_deb822_file(doc.lines)): + ) in deb822_token_iter(tokenize_deb822_file(doc.lines)): if token.is_comment: if comment_start < 0: comment_start = start_line @@ -295,90 +290,170 @@ def deb822_folding_ranges( return folding_ranges +@dataclasses.dataclass(slots=True) +class SemanticTokenState: + ls: "DebputyLanguageServer" + file_metadata: Deb822FileMetadata[Any] + doc: "TextDocument" + lines: List[str] + tokens: List[int] + keyword_token_code: int + known_value_token_code: int + comment_token_code: int + _previous_line: int = 0 + _previous_col: int = 0 + + def emit_token( + self, + start_pos: Position, + len_client_units: int, + token_code: int, + *, + token_modifiers: int = 0, + ) -> None: + line_delta = start_pos.line - self._previous_line + self._previous_line = start_pos.line + previous_col = self._previous_col + + if line_delta: + previous_col = 0 + + column_delta = start_pos.character - previous_col + self._previous_col = start_pos.character + + tokens = self.tokens + tokens.append(line_delta) # Line delta + tokens.append(column_delta) # Token column delta + tokens.append(len_client_units) # Token length + tokens.append(token_code) + tokens.append(token_modifiers) + + +def _deb822_paragraph_semantic_tokens_full( + sem_token_state: SemanticTokenState, + stanza: Deb822ParagraphElement, + stanza_idx: int, +) -> None: + doc = sem_token_state.doc + keyword_token_code = sem_token_state.keyword_token_code + known_value_token_code = sem_token_state.known_value_token_code + comment_token_code = sem_token_state.comment_token_code + + stanza_position = stanza.position_in_file() + stanza_metadata = sem_token_state.file_metadata.classify_stanza( + stanza, + stanza_idx=stanza_idx, + ) + for kvpair in stanza.iter_parts_of_type(Deb822KeyValuePairElement): + field_start = kvpair.key_position_in_stanza().relative_to(stanza_position) + comment = kvpair.comment_element + if comment: + comment_start_line = field_start.line_position - len(comment) + for comment_line_no, comment_token in enumerate( + comment.iter_parts(), + start=comment_start_line, + ): + assert comment_token.is_comment + assert isinstance(comment_token, Deb822Token) + sem_token_state.emit_token( + Position(comment_line_no, 0), + len(comment_token.text.rstrip()), + comment_token_code, + ) + field_size = doc.position_codec.client_num_units(kvpair.field_name) + + sem_token_state.emit_token( + te_position_to_lsp(field_start), + field_size, + keyword_token_code, + ) + + known_field: Optional[Deb822KnownField] = stanza_metadata.get(kvpair.field_name) + if known_field is not None: + if known_field.spellcheck_value: + continue + known_values: Container[str] = known_field.known_values or frozenset() + interpretation = known_field.field_value_class.interpreter() + else: + known_values = frozenset() + interpretation = None + + value_element_pos = kvpair.value_position_in_stanza().relative_to( + stanza_position + ) + if interpretation is None: + # TODO: Emit tokens for value comments of unknown fields. + continue + else: + parts = kvpair.interpret_as(interpretation).iter_parts() + for te in parts: + if te.is_whitespace: + continue + if te.is_separator: + continue + value_range_in_parent_te = te.range_in_parent() + value_range_te = value_range_in_parent_te.relative_to(value_element_pos) + value = te.convert_to_text() + if te.is_comment: + token_type = comment_token_code + value = value.rstrip() + elif value in known_values: + token_type = known_value_token_code + else: + continue + value_len = doc.position_codec.client_num_units(value) + + sem_token_state.emit_token( + te_position_to_lsp(value_range_te.start_pos), + value_len, + token_type, + ) + + def deb822_semantic_tokens_full( - ls: "LanguageServer", + ls: "DebputyLanguageServer", request: SemanticTokensParams, file_metadata: Deb822FileMetadata[Any], ) -> Optional[SemanticTokens]: doc = ls.workspace.get_text_document(request.text_document.uri) + position_codec = doc.position_codec lines = doc.lines deb822_file = parse_deb822_file( lines, accept_files_with_duplicated_fields=True, accept_files_with_error_tokens=True, ) - tokens = [] - previous_line = 0 - keyword_token_code = SEMANTIC_TOKEN_TYPES_IDS["keyword"] - known_value_token_code = SEMANTIC_TOKEN_TYPES_IDS["enumMember"] - no_modifiers = 0 - - # TODO: Add comment support; slightly complicated by how we parse the file. - - for stanza_idx, stanza in enumerate(deb822_file): - stanza_position = stanza.position_in_file() - stanza_metadata = file_metadata.classify_stanza(stanza, stanza_idx=stanza_idx) - for kvpair in stanza.iter_parts_of_type(Deb822KeyValuePairElement): - kvpair_pos = kvpair.position_in_parent().relative_to(stanza_position) - # These two happen to be the same; the indirection is to make it explicit that the two - # positions for different tokens are the same. - field_position_without_comments = kvpair_pos - field_size = doc.position_codec.client_num_units(kvpair.field_name) - current_line = field_position_without_comments.line_position - line_delta = current_line - previous_line - previous_line = current_line - tokens.append(line_delta) # Line delta - tokens.append(0) # Token column delta - tokens.append(field_size) # Token length - tokens.append(keyword_token_code) - tokens.append(no_modifiers) - - known_field: Optional[Deb822KnownField] = stanza_metadata.get( - kvpair.field_name - ) - if ( - known_field is None - or not known_field.known_values - or known_field.spellcheck_value - ): - continue - - if known_field.field_value_class not in ( - FieldValueClass.SINGLE_VALUE, - FieldValueClass.SPACE_SEPARATED_LIST, - ): - continue - value_element_pos = kvpair.value_element.position_in_parent().relative_to( - kvpair_pos - ) - - last_token_start_column = 0 + tokens: List[int] = [] + comment_token_code = SEMANTIC_TOKEN_TYPES_IDS["comment"] + sem_token_state = SemanticTokenState( + ls, + file_metadata, + doc, + lines, + tokens, + SEMANTIC_TOKEN_TYPES_IDS["keyword"], + SEMANTIC_TOKEN_TYPES_IDS["enumMember"], + comment_token_code, + ) - for value_ref in kvpair.interpret_as( - LIST_SPACE_SEPARATED_INTERPRETATION - ).iter_value_references(): - if value_ref.value not in known_field.known_values: - continue - value_loc = value_ref.locatable - value_range_te = value_loc.range_in_parent().relative_to( - value_element_pos - ) - start_line = value_range_te.start_pos.line_position - line_delta = start_line - current_line - current_line = start_line - if line_delta: - last_token_start_column = 0 - - value_start_column = value_range_te.start_pos.cursor_position - column_delta = value_start_column - last_token_start_column - last_token_start_column = value_start_column - - tokens.append(line_delta) # Line delta - tokens.append(column_delta) # Token column delta - tokens.append(field_size) # Token length - tokens.append(known_value_token_code) - tokens.append(no_modifiers) + stanza_idx = 0 + for part in deb822_file.iter_parts(): + if part.is_comment: + pos = part.position_in_file() + sem_token_state.emit_token( + te_position_to_lsp(pos), + # Avoid trailing newline + position_codec.client_num_units(part.convert_to_text().rstrip()), + comment_token_code, + ) + elif isinstance(part, Deb822ParagraphElement): + _deb822_paragraph_semantic_tokens_full( + sem_token_state, + part, + stanza_idx, + ) + stanza_idx += 1 if not tokens: return None return SemanticTokens(tokens) @@ -396,10 +471,14 @@ def _should_complete_field_with_value(cand: Deb822KnownField) -> bool: def _complete_field_name( + ls: "DebputyLanguageServer", fields: StanzaMetadata[Any], seen_fields: Container[str], ) -> Optional[Union[CompletionList, Sequence[CompletionItem]]]: items = [] + markdown_kind = ls.completion_item_document_markup( + MarkupKind.Markdown, MarkupKind.PlainText + ) for cand_key, cand in fields.items(): if cand_key.lower() in seen_fields: continue @@ -409,14 +488,28 @@ def _complete_field_name( value = next(iter(v for v in cand.known_values if v != cand.default_value)) complete_as += value tags = [] + is_deprecated = False if cand.replaced_by or cand.deprecated_with_no_replacement: + is_deprecated = True tags.append(CompletionItemTag.Deprecated) + doc = cand.hover_text + if doc: + doc = MarkupContent( + value=doc, + kind=markdown_kind, + ) + else: + doc = None + items.append( CompletionItem( name, insert_text=complete_as, + deprecated=is_deprecated, tags=tags, + detail=cand.synopsis_doc, + documentation=doc, ) ) return items diff --git a/src/debputy/lsp/lsp_self_check.py b/src/debputy/lsp/lsp_self_check.py index 61a5733..3c7d2e4 100644 --- a/src/debputy/lsp/lsp_self_check.py +++ b/src/debputy/lsp/lsp_self_check.py @@ -83,7 +83,7 @@ def spell_checking() -> bool: ) -def assert_can_start_lsp(): +def assert_can_start_lsp() -> None: for self_check in LSP_CHECKS: if self_check.is_mandatory and not self_check.test(): _error( diff --git a/src/debputy/lsp/quickfixes.py b/src/debputy/lsp/quickfixes.py index d911961..2d564f4 100644 --- a/src/debputy/lsp/quickfixes.py +++ b/src/debputy/lsp/quickfixes.py @@ -17,7 +17,6 @@ from lsprotocol.types import ( Command, CodeActionParams, Diagnostic, - CodeActionDisabledType, TextEdit, WorkspaceEdit, TextDocumentEdit, @@ -30,7 +29,10 @@ from lsprotocol.types import ( from debputy.util import _warn try: - from debian._deb822_repro.locatable import Position as TEPosition, Range as TERange + from debputy.lsp.vendoring._deb822_repro.locatable import ( + Position as TEPosition, + Range as TERange, + ) from pygls.server import LanguageServer from pygls.workspace import TextDocument @@ -38,7 +40,11 @@ except ImportError: pass -CodeActionName = Literal["correct-text", "remove-line"] +CodeActionName = Literal[ + "correct-text", + "remove-line", + "insert-text-on-line-after-diagnostic", +] class CorrectTextCodeAction(TypedDict): @@ -46,6 +52,11 @@ class CorrectTextCodeAction(TypedDict): correct_value: str +class InsertTextOnLineAfterDiagnosticCodeAction(TypedDict): + code_action: Literal["insert-text-on-line-after-diagnostic"] + text_to_insert: str + + class RemoveLineCodeAction(TypedDict): code_action: Literal["remove-line"] @@ -57,6 +68,15 @@ def propose_correct_text_quick_fix(correct_value: str) -> CorrectTextCodeAction: } +def propose_insert_text_on_line_after_diagnostic_quick_fix( + text_to_insert: str, +) -> InsertTextOnLineAfterDiagnosticCodeAction: + return { + "code_action": "insert-text-on-line-after-diagnostic", + "text_to_insert": text_to_insert, + } + + def propose_remove_line_quick_fix() -> RemoveLineCodeAction: return { "code_action": "remove-line", @@ -93,24 +113,64 @@ def _correct_value_code_action( diagnostic: Diagnostic, ) -> Iterable[Union[CodeAction, Command]]: corrected_value = code_action_data["correct_value"] - edits = [ - TextEdit( - diagnostic.range, - corrected_value, - ), - ] + edit = TextEdit( + diagnostic.range, + corrected_value, + ) yield CodeAction( title=f'Replace with "{corrected_value}"', kind=CodeActionKind.QuickFix, diagnostics=[diagnostic], edit=WorkspaceEdit( - changes={code_action_params.text_document.uri: edits}, + changes={code_action_params.text_document.uri: [edit]}, + document_changes=[ + TextDocumentEdit( + text_document=OptionalVersionedTextDocumentIdentifier( + uri=code_action_params.text_document.uri, + ), + edits=[edit], + ) + ], + ), + ) + + +@_code_handler_for("insert-text-on-line-after-diagnostic") +def _correct_value_code_action( + code_action_data: InsertTextOnLineAfterDiagnosticCodeAction, + code_action_params: CodeActionParams, + diagnostic: Diagnostic, +) -> Iterable[Union[CodeAction, Command]]: + corrected_value = code_action_data["text_to_insert"] + line_no = diagnostic.range.end.line + if diagnostic.range.end.character > 0: + line_no += 1 + insert_range = Range( + Position( + line_no, + 0, + ), + Position( + line_no, + 0, + ), + ) + edit = TextEdit( + insert_range, + corrected_value, + ) + yield CodeAction( + title=f'Insert "{corrected_value}"', + kind=CodeActionKind.QuickFix, + diagnostics=[diagnostic], + edit=WorkspaceEdit( + changes={code_action_params.text_document.uri: [edit]}, document_changes=[ TextDocumentEdit( text_document=OptionalVersionedTextDocumentIdentifier( uri=code_action_params.text_document.uri, ), - edits=edits, + edits=[edit], ) ], ), @@ -126,7 +186,7 @@ def range_compatible_with_remove_line_fix(range_: Range) -> bool: @_code_handler_for("remove-line") -def _correct_value_code_action( +def _remove_line_code_action( _code_action_data: RemoveLineCodeAction, code_action_params: CodeActionParams, diagnostic: Diagnostic, @@ -138,33 +198,31 @@ def _correct_value_code_action( ) return - edits = [ - TextEdit( - Range( - start=Position( - line=start.line, - character=0, - ), - end=Position( - line=start.line + 1, - character=0, - ), + edit = TextEdit( + Range( + start=Position( + line=start.line, + character=0, + ), + end=Position( + line=start.line + 1, + character=0, ), - "", ), - ] + "", + ) yield CodeAction( title="Remove the line", kind=CodeActionKind.QuickFix, diagnostics=[diagnostic], edit=WorkspaceEdit( - changes={code_action_params.text_document.uri: edits}, + changes={code_action_params.text_document.uri: [edit]}, document_changes=[ TextDocumentEdit( text_document=OptionalVersionedTextDocumentIdentifier( uri=code_action_params.text_document.uri, ), - edits=edits, + edits=[edit], ) ], ), @@ -174,7 +232,7 @@ def _correct_value_code_action( def provide_standard_quickfixes_from_diagnostics( code_action_params: CodeActionParams, ) -> Optional[List[Union[Command, CodeAction]]]: - actions = [] + actions: List[Union[Command, CodeAction]] = [] for diagnostic in code_action_params.context.diagnostics: data = diagnostic.data if not isinstance(data, list): diff --git a/src/debputy/lsp/text_util.py b/src/debputy/lsp/text_util.py index d66cb28..ef4cd0a 100644 --- a/src/debputy/lsp/text_util.py +++ b/src/debputy/lsp/text_util.py @@ -1,4 +1,4 @@ -from typing import List, Optional, Sequence, Union, Iterable +from typing import List, Optional, Sequence, Union, Iterable, TYPE_CHECKING from lsprotocol.types import ( TextEdit, @@ -10,15 +10,22 @@ from lsprotocol.types import ( from debputy.linting.lint_util import LinterPositionCodec try: - from debian._deb822_repro.locatable import Position as TEPosition, Range as TERange + from debputy.lsp.vendoring._deb822_repro.locatable import ( + Position as TEPosition, + Range as TERange, + ) except ImportError: pass try: - from pygls.workspace import LanguageServer, TextDocument, PositionCodec + from pygls.server import LanguageServer + from pygls.workspace import TextDocument, PositionCodec +except ImportError: + pass +if TYPE_CHECKING: LintCapablePositionCodec = Union[LinterPositionCodec, PositionCodec] -except ImportError: +else: LintCapablePositionCodec = LinterPositionCodec diff --git a/src/debputy/lsp/vendoring/_deb822_repro/parsing.py b/src/debputy/lsp/vendoring/_deb822_repro/parsing.py index e2c638a..c5753e2 100644 --- a/src/debputy/lsp/vendoring/_deb822_repro/parsing.py +++ b/src/debputy/lsp/vendoring/_deb822_repro/parsing.py @@ -280,6 +280,9 @@ class Deb822ParsedTokenList( # type: () -> Iterator[VE] yield from (v for v in self._token_list if isinstance(v, self._vtype)) + def iter_parts(self) -> Iterable[TokenOrElement]: + yield from self._token_list + def _mark_changed(self): # type: () -> None self._changed = True @@ -1081,6 +1084,14 @@ class Deb822Element(Locatable): # type: () -> bool return False + @property + def is_whitespace(self) -> bool: + return False + + @property + def is_separator(self) -> bool: + return False + @property def parent_element(self): # type: () -> Optional[Deb822Element] @@ -1492,6 +1503,20 @@ class Deb822KeyValuePairElement(Deb822Element): yield self._separator_token yield self._value_element + def key_position_in_stanza(self) -> Position: + position = super().position_in_parent(skip_leading_comments=False) + if self._comment_element: + field_pos = self._field_token.position_in_parent() + position = field_pos.relative_to(position) + return position + + def value_position_in_stanza(self) -> Position: + position = super().position_in_parent(skip_leading_comments=False) + if self._comment_element: + value_pos = self._value_element.position_in_parent() + position = value_pos.relative_to(position) + return position + def position_in_parent( self, *, diff --git a/src/debputy/lsp/vendoring/_deb822_repro/tokens.py b/src/debputy/lsp/vendoring/_deb822_repro/tokens.py index 6697a2c..88d2058 100644 --- a/src/debputy/lsp/vendoring/_deb822_repro/tokens.py +++ b/src/debputy/lsp/vendoring/_deb822_repro/tokens.py @@ -160,6 +160,10 @@ class Deb822Token(Locatable): # type: () -> bool return False + @property + def is_separator(self) -> bool: + return False + @property def text(self): # type: () -> str @@ -253,6 +257,10 @@ class Deb822SpaceSeparatorToken(Deb822SemanticallySignificantWhiteSpace): __slots__ = () + @property + def is_separator(self) -> bool: + return True + class Deb822ErrorToken(Deb822Token): """Token that represents a syntactical error""" @@ -296,8 +304,12 @@ class Deb822SeparatorToken(Deb822Token): __slots__ = () + @property + def is_separator(self) -> bool: + return True + -class Deb822FieldSeparatorToken(Deb822Token): +class Deb822FieldSeparatorToken(Deb822SeparatorToken): __slots__ = () diff --git a/src/debputy/path_matcher.py b/src/debputy/path_matcher.py index 47e5c91..2917b14 100644 --- a/src/debputy/path_matcher.py +++ b/src/debputy/path_matcher.py @@ -229,7 +229,9 @@ class MatchAnything(MatchRule): def _full_pattern(self) -> str: return "**/*" - def finditer(self, fs_root: VP, *, ignore_paths=None) -> Iterable[VP]: + def finditer( + self, fs_root: VP, *, ignore_paths: Optional[Callable[[VP], bool]] = None + ) -> Iterable[VP]: if ignore_paths is not None: yield from (p for p in fs_root.all_paths() if not ignore_paths(p)) yield from fs_root.all_paths() @@ -253,7 +255,9 @@ class ExactFileSystemPath(MatchRule): def _full_pattern(self) -> str: return self._path - def finditer(self, fs_root: VP, *, ignore_paths=None) -> Iterable[VP]: + def finditer( + self, fs_root: VP, *, ignore_paths: Optional[Callable[[VP], bool]] = None + ) -> Iterable[VP]: p = _lookup_path(fs_root, self._path) if p is not None and (ignore_paths is None or not ignore_paths(p)): yield p @@ -376,7 +380,12 @@ class BasenameGlobMatch(MatchRule): return f"{self._directory}/{maybe_recursive}{self._basename_glob}" return self._basename_glob - def finditer(self, fs_root: VP, *, ignore_paths=None) -> Iterable[VP]: + def finditer( + self, + fs_root: VP, + *, + ignore_paths: Optional[Callable[[VP], bool]] = None, + ) -> Iterable[VP]: search_root = fs_root if self._directory is not None: p = _lookup_path(fs_root, self._directory) @@ -466,7 +475,12 @@ class GenericGlobImplementation(MatchRule): def _full_pattern(self) -> str: return self._glob_pattern - def finditer(self, fs_root: VP, *, ignore_paths=None) -> Iterable[VP]: + def finditer( + self, + fs_root: VP, + *, + ignore_paths: Optional[Callable[[VP], bool]] = None, + ) -> Iterable[VP]: search_history = [fs_root] for part in self._match_parts: next_layer = itertools.chain.from_iterable( diff --git a/src/debputy/plugin/api/impl_types.py b/src/debputy/plugin/api/impl_types.py index 5aca980..9075ac6 100644 --- a/src/debputy/plugin/api/impl_types.py +++ b/src/debputy/plugin/api/impl_types.py @@ -420,7 +420,7 @@ class DispatchingParserBase(Generic[TP]): def _add_parser( self, - keyword: Union[str, List[str]], + keyword: Union[str, Iterable[str]], ppp: "PluginProvidedParser[PF, TP]", ) -> None: ks = [keyword] if isinstance(keyword, str) else keyword diff --git a/src/debputy/plugin/api/spec.py b/src/debputy/plugin/api/spec.py index dba4523..07954e6 100644 --- a/src/debputy/plugin/api/spec.py +++ b/src/debputy/plugin/api/spec.py @@ -1046,7 +1046,7 @@ class VirtualPath: self, *, byte_io: Literal[False] = False, - buffering: Optional[int] = ..., + buffering: int = -1, ) -> TextIO: ... @overload @@ -1054,7 +1054,7 @@ class VirtualPath: self, *, byte_io: Literal[True], - buffering: Optional[int] = ..., + buffering: int = -1, ) -> BinaryIO: ... @overload @@ -1062,7 +1062,7 @@ class VirtualPath: self, *, byte_io: bool, - buffering: Optional[int] = ..., + buffering: int = -1, ) -> Union[TextIO, BinaryIO]: ... def open( @@ -1085,7 +1085,7 @@ class VirtualPath: :param byte_io: If True, open the file in binary mode (like `rb` for `open`) :param buffering: Same as open(..., buffering=...) where supported. Notably during testing, the content may be purely in memory and use a BytesIO/StringIO - (which does not accept that parameter, but then is buffered in a different way) + (which does not accept that parameter, but then it is buffered in a different way) :return: The file handle. """ diff --git a/src/debputy/plugin/debputy/metadata_detectors.py b/src/debputy/plugin/debputy/metadata_detectors.py index 4338087..e325500 100644 --- a/src/debputy/plugin/debputy/metadata_detectors.py +++ b/src/debputy/plugin/debputy/metadata_detectors.py @@ -520,8 +520,8 @@ def auto_depends_arch_any_solink( if not roots: return - for libdir, target in targets: - final_path = os.path.join(libdir, target) + for libdir_path, target in targets: + final_path = os.path.join(libdir_path, target) matches = [] for opkg, ofs_root in roots: m = ofs_root.lookup(final_path) diff --git a/src/debputy/plugin/debputy/private_api.py b/src/debputy/plugin/debputy/private_api.py index 8428a5f..37c9318 100644 --- a/src/debputy/plugin/debputy/private_api.py +++ b/src/debputy/plugin/debputy/private_api.py @@ -2517,21 +2517,20 @@ def _install_docs_rule_handler( path, package_type="deb", package_attribute="into" ) ] - into = frozenset(into) if install_as is not None: assert len(sources) == 1 assert dest_dir is None return InstallRule.install_doc_as( sources[0], install_as.match_rule.path, - into, + frozenset(into), path.path, condition, ) return InstallRule.install_doc( sources, dest_dir, - into, + frozenset(into), path.path, condition, ) @@ -2622,10 +2621,9 @@ def _install_man_rule_handler( ) ] condition = parsed_data.get("when") - into = frozenset(into) return InstallRule.install_man( sources, - into, + frozenset(into), section, language, attribute_path.path, diff --git a/src/debputy/plugin/debputy/strip_non_determinism.py b/src/debputy/plugin/debputy/strip_non_determinism.py index 2f8fd39..a94d348 100644 --- a/src/debputy/plugin/debputy/strip_non_determinism.py +++ b/src/debputy/plugin/debputy/strip_non_determinism.py @@ -70,10 +70,10 @@ class ExtensionPlusFileOutputRule(SndDetectionRule): def file_output_verdict( self, path: VirtualPath, - file_analysis: str, + file_analysis: Optional[str], ) -> bool: file_pattern = self.file_pattern - assert file_pattern is not None + assert file_pattern is not None and file_analysis is not None m = file_pattern.search(file_analysis) return m is not None diff --git a/src/debputy/util.py b/src/debputy/util.py index 4da2772..d8cfd67 100644 --- a/src/debputy/util.py +++ b/src/debputy/util.py @@ -70,8 +70,8 @@ _DOUBLE_ESCAPEES = re.compile(r'([\n`$"\\])') _REGULAR_ESCAPEES = re.compile(r'([\s!"$()*+#;<>?@\[\]\\`|~])') _PROFILE_GROUP_SPLIT = re.compile(r">\s+<") _DEFAULT_LOGGER: Optional[logging.Logger] = None -_STDOUT_HANDLER: Optional[logging.StreamHandler] = None -_STDERR_HANDLER: Optional[logging.StreamHandler] = None +_STDOUT_HANDLER: Optional[logging.StreamHandler[Any]] = None +_STDERR_HANDLER: Optional[logging.StreamHandler[Any]] = None def assume_not_none(x: Optional[T]) -> T: @@ -764,14 +764,14 @@ def setup_logging( ) logger = logging.getLogger() if existing_stdout_handler is not None: - logger.removeHandler(existing_stderr_handler) + logger.removeHandler(existing_stdout_handler) _STDERR_HANDLER = stderr_handler logger.addHandler(stderr_handler) else: stderr_handler = logging.StreamHandler(sys.stderr) stderr_handler.setFormatter(logging.Formatter(colorless_format, style="{")) logger = logging.getLogger() - if existing_stdout_handler is not None: + if existing_stderr_handler is not None: logger.removeHandler(existing_stderr_handler) _STDERR_HANDLER = stderr_handler logger.addHandler(stderr_handler) diff --git a/src/debputy/yaml/compat.py b/src/debputy/yaml/compat.py index f26af02..f36fc5a 100644 --- a/src/debputy/yaml/compat.py +++ b/src/debputy/yaml/compat.py @@ -10,10 +10,10 @@ __all__ = [ ] try: - from ruyaml import YAMLError, YAML, Node + from ruyaml import YAML, Node from ruyaml.comments import LineCol, CommentedBase, CommentedMap, CommentedSeq - from ruyaml.error import MarkedYAMLError + from ruyaml.error import YAMLError, MarkedYAMLError except (ImportError, ModuleNotFoundError): - from ruamel.yaml import YAMLError, YAML, Node - from ruamel.yaml.comments import LineCol, CommentedBase, CommentedMap, CommentedSeq - from ruamel.yaml.error import MarkedYAMLError + from ruamel.yaml import YAML, Node # type: ignore + from ruamel.yaml.comments import LineCol, CommentedBase, CommentedMap, CommentedSeq # type: ignore + from ruamel.yaml.error import YAMLError, MarkedYAMLError # type: ignore diff --git a/tests/lint_tests/test_lint_changelog.py b/tests/lint_tests/test_lint_changelog.py index 25dac0e..258c2fe 100644 --- a/tests/lint_tests/test_lint_changelog.py +++ b/tests/lint_tests/test_lint_changelog.py @@ -1,17 +1,11 @@ import textwrap -from typing import List, Optional import pytest from debputy.lsp.lsp_debian_changelog import _lint_debian_changelog -from debputy.lsp.lsp_debian_control import _lint_debian_control from debputy.packages import DctrlParser from debputy.plugin.api.feature_set import PluginProvidedFeatureSet -from lint_tests.lint_tutil import ( - group_diagnostics_by_severity, - requires_levenshtein, - LintWrapper, -) +from lint_tests.lint_tutil import LintWrapper try: from lsprotocol.types import Diagnostic, DiagnosticSeverity @@ -60,7 +54,7 @@ def test_dctrl_lint(line_linter: LintWrapper) -> None: Package: something-else """ - ) + ).splitlines(keepends=True) diagnostics = line_linter(lines) print(diagnostics) @@ -73,7 +67,7 @@ def test_dctrl_lint(line_linter: LintWrapper) -> None: Package: something-else """ - ) + ).splitlines(keepends=True) diagnostics = line_linter(lines) print(diagnostics) diff --git a/tests/lint_tests/test_lint_dctrl.py b/tests/lint_tests/test_lint_dctrl.py index e9a5756..ce34d7c 100644 --- a/tests/lint_tests/test_lint_dctrl.py +++ b/tests/lint_tests/test_lint_dctrl.py @@ -153,6 +153,7 @@ def test_dctrl_lint_mx_value_with_typo(line_linter: LintWrapper) -> None: diagnostics = line_linter(lines) print(diagnostics) + assert diagnostics is not None assert len(diagnostics) == 2 by_severity = group_diagnostics_by_severity(diagnostics) assert DiagnosticSeverity.Error in by_severity diff --git a/tests/lint_tests/test_lint_debputy.py b/tests/lint_tests/test_lint_debputy.py index 9c30392..8e405f8 100644 --- a/tests/lint_tests/test_lint_debputy.py +++ b/tests/lint_tests/test_lint_debputy.py @@ -1,187 +1,4 @@ -from typing import List, Optional, Callable - -import pytest - -from debputy.packages import DctrlParser -from debputy.plugin.api.feature_set import PluginProvidedFeatureSet -from lint_tests.lint_tutil import ( - requires_levenshtein, - LintWrapper, -) - -try: - from lsprotocol.types import Diagnostic, DiagnosticSeverity -except ImportError: - pass - - -@pytest.fixture -def line_linter( - debputy_plugin_feature_set: PluginProvidedFeatureSet, - lint_dctrl_parser: DctrlParser, -) -> LintWrapper: - return LintWrapper( - "/nowhere/debian/debputy.manifest", - _lint_debian_debputy_manifest, - debputy_plugin_feature_set, - lint_dctrl_parser, - ) - - -def test_debputy_lint_unknown_keys(line_linter: LintWrapper) -> None: - lines = textwrap.dedent( - """\ - manifest-version: 0.1 - installations: - - install-something: - sources: - - abc - - def - - install-docs: - source: foo - puff: true # Unknown keyword (assuming install-docs) - when: - negated: cross-compiling - - install-docs: - source: bar - when: ross-compiling # Typo of "cross-compiling"; FIXME not caught - packages: - foo: - blah: qwe # Unknown keyword - """ - ).splitlines(keepends=True) - - diagnostics = line_linter(lines) - by_severity = group_diagnostics_by_severity(diagnostics) - # This example triggers errors only - assert DiagnosticSeverity.Error in by_severity - - assert DiagnosticSeverity.Warning not in by_severity - assert DiagnosticSeverity.Hint not in by_severity - assert DiagnosticSeverity.Information not in by_severity - - errors = by_severity[DiagnosticSeverity.Error] - print(errors) - assert len(errors) == 4 - - first_error, second_error, third_error, fourth_error = errors - - msg = 'Unknown or unsupported key "install-something".' - assert first_error.message == msg - assert f"{first_error.range}" == "2:2-2:19" - - msg = 'Unknown or unsupported key "puff".' - assert second_error.message == msg - assert f"{second_error.range}" == "8:4-8:8" - - msg = 'Unknown or unsupported key "negated".' - assert third_error.message == msg - assert f"{third_error.range}" == "10:6-10:13" - - msg = 'Unknown or unsupported key "blah".' - assert fourth_error.message == msg - assert f"{fourth_error.range}" == "16:4-16:8" - - -@requires_levenshtein -def test_debputy_lint_unknown_keys_spelling(line_linter: LintWrapper) -> None: - lines = textwrap.dedent( - """\ - manifest-version: 0.1 - installations: - - install-dcoss: # typo - sources: - - abc - - def - puff: true # Unknown keyword (assuming install-docs) - when: - nut: cross-compiling # Typo of "not" - - install-docs: - source: bar - when: ross-compiling # Typo of "cross-compiling"; FIXME not caught - """ - ).splitlines(keepends=True) - - diagnostics = line_linter(lines) - by_severity = group_diagnostics_by_severity(diagnostics) - # This example triggers errors only - assert DiagnosticSeverity.Error in by_severity - - assert DiagnosticSeverity.Warning not in by_severity - assert DiagnosticSeverity.Hint not in by_severity - assert DiagnosticSeverity.Information not in by_severity - - errors = by_severity[DiagnosticSeverity.Error] - print(errors) - assert len(errors) == 3 - - first_error, second_error, third_error = errors - - msg = 'Unknown or unsupported key "install-dcoss". It looks like a typo of "install-docs".' - assert first_error.message == msg - assert f"{first_error.range}" == "2:2-2:15" - - msg = 'Unknown or unsupported key "puff".' - assert second_error.message == msg - assert f"{second_error.range}" == "6:4-6:8" - - msg = 'Unknown or unsupported key "nut". It looks like a typo of "not".' - assert third_error.message == msg - assert f"{third_error.range}" == "8:6-8:9" - - -def test_debputy_lint_conflicting_keys(line_linter: LintWrapper) -> None: - lines = textwrap.dedent( - """\ - manifest-version: 0.1 - installations: - - install-docs: - sources: - - foo - - bar - as: baz # Conflicts with "sources" (#85) - - install: - source: foo - sources: # Conflicts with "source" (#85) - - bar - - baz - """ - ).splitlines(keepends=True) - - diagnostics = line_linter(lines) - by_severity = group_diagnostics_by_severity(diagnostics) - # This example triggers errors only - assert DiagnosticSeverity.Error in by_severity - - assert DiagnosticSeverity.Warning not in by_severity - assert DiagnosticSeverity.Hint not in by_severity - assert DiagnosticSeverity.Information not in by_severity - - errors = by_severity[DiagnosticSeverity.Error] - print(errors) - assert len(errors) == 4 - - first_error, second_error, third_error, fourth_error = errors - - msg = 'The "sources" cannot be used with "as".' - assert first_error.message == msg - assert f"{first_error.range}" == "3:4-3:11" - - msg = 'The "as" cannot be used with "sources".' - assert second_error.message == msg - assert f"{second_error.range}" == "6:4-6:6" - - msg = 'The "source" cannot be used with "sources".' - assert third_error.message == msg - assert f"{third_error.range}" == "8:4-8:10" - - msg = 'The "sources" cannot be used with "source".' - assert fourth_error.message == msg - assert f"{fourth_error.range}" == "9:4-9:11" - - import textwrap -from typing import List, Optional, Callable import pytest @@ -189,9 +6,9 @@ from debputy.lsp.lsp_debian_debputy_manifest import _lint_debian_debputy_manifes from debputy.packages import DctrlParser from debputy.plugin.api.feature_set import PluginProvidedFeatureSet from lint_tests.lint_tutil import ( - group_diagnostics_by_severity, requires_levenshtein, LintWrapper, + group_diagnostics_by_severity, ) try: @@ -199,8 +16,6 @@ try: except ImportError: pass -TestLintWrapper = Callable[[List[str]], Optional[List["Diagnostic"]]] - @pytest.fixture def line_linter( diff --git a/tests/lsp_tests/lsp_tutil.py b/tests/lsp_tests/lsp_tutil.py index 0843f79..bc0fa91 100644 --- a/tests/lsp_tests/lsp_tutil.py +++ b/tests/lsp_tests/lsp_tutil.py @@ -1,16 +1,52 @@ -from typing import Tuple, Union +import dataclasses +from typing import Tuple, Union, FrozenSet, Optional, List + +from debputy.lsp.lsp_features import SEMANTIC_TOKENS_LEGEND +from debputy.util import grouper try: - from pygls.server import LanguageServer from lsprotocol.types import ( TextDocumentItem, Position, + Range, + SemanticTokens, ) from debputy.lsp.debputy_ls import DebputyLanguageServer except ImportError: pass +@dataclasses.dataclass(slots=True, frozen=True) +class ResolvedSemanticToken: + range: "Range" + token_name: str + modifiers: FrozenSet[str] = frozenset() + + +def resolved_semantic_token( + line_no: int, + col_start: int, + token_len: int, + token_type: str, + *, + token_modifiers: FrozenSet[str] = frozenset(), +) -> ResolvedSemanticToken: + return ResolvedSemanticToken( + Range( + Position( + line_no, + col_start, + ), + Position( + line_no, + col_start + token_len, + ), + ), + token_type, + token_modifiers, + ) + + def _locate_cursor(text: str) -> Tuple[str, "Position"]: lines = text.splitlines(keepends=True) for line_no in range(len(lines)): @@ -27,12 +63,27 @@ def _locate_cursor(text: str) -> Tuple[str, "Position"]: def put_doc_with_cursor( - ls: Union["LanguageServer", "DebputyLanguageServer"], + ls: "DebputyLanguageServer", uri: str, language_id: str, content: str, ) -> "Position": cleaned_content, cursor_pos = _locate_cursor(content) + put_doc_no_cursor( + ls, + uri, + language_id, + cleaned_content, + ) + return cursor_pos + + +def put_doc_no_cursor( + ls: "DebputyLanguageServer", + uri: str, + language_id: str, + content: str, +) -> None: doc_version = 1 existing = ls.workspace.text_documents.get(uri) if existing is not None: @@ -42,7 +93,38 @@ def put_doc_with_cursor( uri, language_id, doc_version, - cleaned_content, + content, ) ) - return cursor_pos + + +def resolve_semantic_tokens( + token_result: Optional["SemanticTokens"], +) -> Optional[List[ResolvedSemanticToken]]: + if token_result is None: + return None + assert (len(token_result.data) % 5) == 0 + current_line = 0 + current_col = 0 + resolved_tokens = [] + token_types = SEMANTIC_TOKENS_LEGEND.token_types + for token_data in grouper(token_result.data, 5, incomplete="strict"): + line_delta, col_start_delta, token_len, token_code, modifier_codes = token_data + if line_delta: + current_col = 0 + current_line += line_delta + current_col += col_start_delta + assert ( + not modifier_codes + ), "TODO: Modifiers not supported (no modifiers defined)" + + resolved_tokens.append( + resolved_semantic_token( + current_line, + current_col, + token_len, + token_types[token_code], + ), + ) + + return resolved_tokens diff --git a/tests/lsp_tests/test_debpkg_metadata.py b/tests/lsp_tests/test_debpkg_metadata.py new file mode 100644 index 0000000..f784b0a --- /dev/null +++ b/tests/lsp_tests/test_debpkg_metadata.py @@ -0,0 +1,25 @@ +from typing import Optional + +import pytest + +from debputy.lsp.lsp_debian_control_reference_data import package_name_to_section + + +@pytest.mark.parametrize( + "name,guessed_section", + [ + ("foo-udeb", "debian-installer"), + ("python-foo", "python"), + ("python-foo-doc", "doc"), + ("libfoo-dev", "libdevel"), + ("php-foo", "php"), + ("libpam-foo", "admin"), + ("fonts-foo", "fonts"), + ("xxx-l10n", "localization"), + ("xxx-l10n-bar", "localization"), + ("libfoo4", "libs"), + ("unknown", None), + ] +) +def test_package_name_to_section(name: str, guessed_section: Optional[str]) -> None: + assert package_name_to_section(name) == guessed_section diff --git a/tests/lsp_tests/test_lsp_dctrl.py b/tests/lsp_tests/test_lsp_dctrl.py index 122b929..2bc90ba 100644 --- a/tests/lsp_tests/test_lsp_dctrl.py +++ b/tests/lsp_tests/test_lsp_dctrl.py @@ -1,25 +1,34 @@ import textwrap +from debputy.lsp.debputy_ls import DebputyLanguageServer + try: from lsprotocol.types import ( CompletionParams, TextDocumentIdentifier, HoverParams, MarkupContent, + SemanticTokensParams, ) from debputy.lsp.lsp_debian_control import ( _debian_control_completions, _debian_control_hover, + _debian_control_semantic_tokens_full, ) from pygls.server import LanguageServer except ImportError: pass -from lsp_tests.lsp_tutil import put_doc_with_cursor +from lsp_tests.lsp_tutil import ( + put_doc_with_cursor, + put_doc_no_cursor, + resolve_semantic_tokens, + resolved_semantic_token, +) -def test_dctrl_complete_field(ls: "LanguageServer") -> None: +def test_dctrl_complete_field(ls: "DebputyLanguageServer") -> None: dctrl_uri = "file:///nowhere/debian/control" cursor_pos = put_doc_with_cursor( @@ -48,7 +57,7 @@ def test_dctrl_complete_field(ls: "LanguageServer") -> None: assert "Source" not in keywords -def test_dctrl_hover_doc_field(ls: "LanguageServer") -> None: +def test_dctrl_hover_doc_field(ls: "DebputyLanguageServer") -> None: dctrl_uri = "file:///nowhere/debian/control" cursor_pos = put_doc_with_cursor( ls, @@ -72,7 +81,7 @@ def test_dctrl_hover_doc_field(ls: "LanguageServer") -> None: assert "Determines which architecture" in hover_doc.contents.value -def test_dctrl_hover_doc_synopsis(ls: "LanguageServer") -> None: +def test_dctrl_hover_doc_synopsis(ls: "DebputyLanguageServer") -> None: dctrl_uri = "file:///nowhere/debian/control" cursor_pos = put_doc_with_cursor( ls, @@ -98,7 +107,7 @@ def test_dctrl_hover_doc_synopsis(ls: "LanguageServer") -> None: assert "super charged tool with batteries included" in hover_doc.contents.value -def test_dctrl_hover_doc_substvars(ls: "LanguageServer") -> None: +def test_dctrl_hover_doc_substvars(ls: "DebputyLanguageServer") -> None: dctrl_uri = "file:///nowhere/debian/control" matching_cases = [ "bar (= ${binary:Version})", @@ -158,3 +167,54 @@ def test_dctrl_hover_doc_substvars(ls: "LanguageServer") -> None: if hover_doc is not None and isinstance(hover_doc.contents, MarkupContent): provided_doc = hover_doc.contents.value assert not provided_doc.startswith("# Substvar `${binary:Version}`") + + +def test_dctrl_semantic_tokens(ls: "DebputyLanguageServer") -> None: + dctrl_uri = "file:///nowhere/debian/control" + put_doc_no_cursor( + ls, + dctrl_uri, + "debian/control", + textwrap.dedent( + """\ + # Some leading comment + + Source: foo + + # Comment between stanzas + + Package: foo + # Comment before Architecture + Architecture: any + Depends: + # Comment about bar + bar (>= 1.0), + baz [linux-any] + Description: super charged tool with batteries included + Unknown-Field: Some value + # Comment in that field + that we do not know about. +""" + ), + ) + + semantic_tokens = _debian_control_semantic_tokens_full( + ls, + SemanticTokensParams(TextDocumentIdentifier(dctrl_uri)), + ) + resolved_semantic_tokens = resolve_semantic_tokens(semantic_tokens) + assert resolved_semantic_tokens is not None + assert resolved_semantic_tokens == [ + resolved_semantic_token(0, 0, len("# Some leading comment"), "comment"), + resolved_semantic_token(2, 0, len("Source"), "keyword"), + resolved_semantic_token(4, 0, len("# Comment between stanzas"), "comment"), + resolved_semantic_token(6, 0, len("Package"), "keyword"), + resolved_semantic_token(7, 0, len("# Comment before Architecture"), "comment"), + resolved_semantic_token(8, 0, len("Architecture"), "keyword"), + resolved_semantic_token(8, len("Architecture: "), len("any"), "enumMember"), + resolved_semantic_token(9, 0, len("Depends"), "keyword"), + resolved_semantic_token(10, 0, len("# Comment about bar"), "comment"), + resolved_semantic_token(13, 0, len("Description"), "keyword"), + resolved_semantic_token(14, 0, len("Unknown-Field"), "keyword"), + # TODO: resolved_semantic_token(15, 0, len("# Comment in that field"), "comment"), + ] diff --git a/tests/lsp_tests/test_lsp_debputy_manifest_hover.py b/tests/lsp_tests/test_lsp_debputy_manifest_hover.py index c66db80..54c6b6a 100644 --- a/tests/lsp_tests/test_lsp_debputy_manifest_hover.py +++ b/tests/lsp_tests/test_lsp_debputy_manifest_hover.py @@ -2,6 +2,7 @@ import textwrap import pytest +from debputy.lsp.debputy_ls import DebputyLanguageServer from lsp_tests.lsp_tutil import put_doc_with_cursor try: @@ -25,7 +26,7 @@ except ImportError: HAS_PYGLS = False -def test_basic_debputy_hover_tlk(ls: "LanguageServer") -> None: +def test_basic_debputy_hover_tlk(ls: "DebputyLanguageServer") -> None: debputy_manifest_uri = "file:///nowhere/debian/debputy.manifest" cursor_pos = put_doc_with_cursor( ls, @@ -52,7 +53,7 @@ def test_basic_debputy_hover_tlk(ls: "LanguageServer") -> None: assert hover_doc.contents.value.startswith("Installations") -def test_basic_debputy_hover_install_docs_key(ls: "LanguageServer") -> None: +def test_basic_debputy_hover_install_docs_key(ls: "DebputyLanguageServer") -> None: debputy_manifest_uri = "file:///nowhere/debian/debputy.manifest" cursor_pos = put_doc_with_cursor( ls, @@ -79,7 +80,7 @@ def test_basic_debputy_hover_install_docs_key(ls: "LanguageServer") -> None: assert hover_doc.contents.value.startswith("Install documentation (`install-docs`)") -def test_basic_debputy_hover_install_docs_sources(ls: "LanguageServer") -> None: +def test_basic_debputy_hover_install_docs_sources(ls: "DebputyLanguageServer") -> None: debputy_manifest_uri = "file:///nowhere/debian/debputy.manifest" cursor_pos = put_doc_with_cursor( ls, @@ -106,7 +107,7 @@ def test_basic_debputy_hover_install_docs_sources(ls: "LanguageServer") -> None: assert hover_doc.contents.value.startswith("# Attribute `sources`") -def test_basic_debputy_hover_install_docs_when(ls: "LanguageServer") -> None: +def test_basic_debputy_hover_install_docs_when(ls: "DebputyLanguageServer") -> None: debputy_manifest_uri = "file:///nowhere/debian/debputy.manifest" cursor_pos = put_doc_with_cursor( ls, @@ -134,7 +135,7 @@ def test_basic_debputy_hover_install_docs_when(ls: "LanguageServer") -> None: assert hover_doc.contents.value.startswith("# Attribute `when`") -def test_basic_debputy_hover_install_docs_str_cond(ls: "LanguageServer") -> None: +def test_basic_debputy_hover_install_docs_str_cond(ls: "DebputyLanguageServer") -> None: debputy_manifest_uri = "file:///nowhere/debian/debputy.manifest" cursor_pos = put_doc_with_cursor( ls, @@ -165,7 +166,7 @@ def test_basic_debputy_hover_install_docs_str_cond(ls: "LanguageServer") -> None def test_basic_debputy_hover_install_docs_mapping_cond_key( - ls: "LanguageServer", + ls: "DebputyLanguageServer", ) -> None: debputy_manifest_uri = "file:///nowhere/debian/debputy.manifest" cursor_pos = put_doc_with_cursor( @@ -197,7 +198,7 @@ def test_basic_debputy_hover_install_docs_mapping_cond_key( @pytest.mark.xfail def test_basic_debputy_hover_install_docs_mapping_cond_str_value( - ls: "LanguageServer", + ls: "DebputyLanguageServer", ) -> None: debputy_manifest_uri = "file:///nowhere/debian/debputy.manifest" cursor_pos = put_doc_with_cursor( @@ -230,7 +231,7 @@ def test_basic_debputy_hover_install_docs_mapping_cond_str_value( ) -def test_basic_debputy_hover_binary_version(ls: "LanguageServer") -> None: +def test_basic_debputy_hover_binary_version(ls: "DebputyLanguageServer") -> None: debputy_manifest_uri = "file:///nowhere/debian/debputy.manifest" cursor_pos = put_doc_with_cursor( ls, @@ -256,7 +257,7 @@ def test_basic_debputy_hover_binary_version(ls: "LanguageServer") -> None: ) -def test_basic_debputy_hover_services(ls: "LanguageServer") -> None: +def test_basic_debputy_hover_services(ls: "DebputyLanguageServer") -> None: debputy_manifest_uri = "file:///nowhere/debian/debputy.manifest" cursor_pos = put_doc_with_cursor( ls, @@ -283,7 +284,7 @@ def test_basic_debputy_hover_services(ls: "LanguageServer") -> None: ) -def test_basic_debputy_hover_services_service(ls: "LanguageServer") -> None: +def test_basic_debputy_hover_services_service(ls: "DebputyLanguageServer") -> None: debputy_manifest_uri = "file:///nowhere/debian/debputy.manifest" cursor_pos = put_doc_with_cursor( ls, diff --git a/tests/plugin_tests/__init__.py b/tests/plugin_tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/plugin_tests/conftest.py b/tests/plugin_tests/conftest.py index f2a8aea..6b28755 100644 --- a/tests/plugin_tests/conftest.py +++ b/tests/plugin_tests/conftest.py @@ -1,10 +1,11 @@ import os +from typing import Iterator import pytest @pytest.fixture(autouse=True) -def workaround_debputys_own_test_suite() -> None: +def workaround_debputys_own_test_suite() -> Iterator[None]: # This fixture is only required as long as the tests are run inside `debputy`'s # own test suite. If you copy out a plugin + tests, you should *not* need this # fixture. diff --git a/tests/plugin_tests/numpy3_test.py b/tests/plugin_tests/numpy3_test.py index 9b252fb..62fbd77 100644 --- a/tests/plugin_tests/numpy3_test.py +++ b/tests/plugin_tests/numpy3_test.py @@ -1,4 +1,5 @@ import os +from typing import Iterator import pytest @@ -12,7 +13,7 @@ DATA_FILE = os.path.join(os.path.dirname(__file__), "numpy3_test.data") @pytest.fixture(scope="session") -def numpy3_stub_data_file() -> None: +def numpy3_stub_data_file() -> Iterator[None]: os.environ["_NUMPY_TEST_PATH"] = DATA_FILE yield try: diff --git a/tests/plugin_tests/perl-openssl_test.py b/tests/plugin_tests/perl-openssl_test.py index 37f2ba1..27d680d 100644 --- a/tests/plugin_tests/perl-openssl_test.py +++ b/tests/plugin_tests/perl-openssl_test.py @@ -1,5 +1,6 @@ import stat import os +from typing import Iterator import pytest @@ -13,7 +14,7 @@ STUB_CMD = os.path.join(os.path.dirname(__file__), "perl-ssl_test.sh") @pytest.fixture(scope="session") -def perl_ssl_stub_cmd() -> None: +def perl_ssl_stub_cmd() -> Iterator[None]: os.environ["_PERL_SSL_DEFAULTS_TEST_PATH"] = STUB_CMD mode = stat.S_IMODE(os.stat(STUB_CMD).st_mode) if (mode & 0o500) != 0o500: diff --git a/tests/test_alternatives.py b/tests/test_alternatives.py index 2a42d69..c395dab 100644 --- a/tests/test_alternatives.py +++ b/tests/test_alternatives.py @@ -79,6 +79,8 @@ def test_alternatives( prerm = maintscript_snippets["prerm"].generate_snippet(reverse=True) postinst = maintscript_snippets["postinst"].generate_snippet(reverse=True) + assert prerm is not None + assert postinst is not None assert "--remove x-terminal-emulator /usr/bin/xterm" in prerm assert ( "--install /usr/bin/x-terminal-emulator x-terminal-emulator /usr/bin/xterm 20" diff --git a/tests/test_deb_packaging_support.py b/tests/test_deb_packaging_support.py index d47526d..e4e13da 100644 --- a/tests/test_deb_packaging_support.py +++ b/tests/test_deb_packaging_support.py @@ -203,9 +203,9 @@ def test_upstream_changelog_salsa_issue_49( ) -> None: # https://salsa.debian.org/debian/debputy/-/issues/49 dctrl = package_single_foo_arch_all_cxt_amd64["foo"] - doc_dir = f"./usr/share/doc/{dctrl.name}" + doc_dir_path = f"./usr/share/doc/{dctrl.name}" data_fs_root = build_virtual_fs( - [virtual_path_def(f"{doc_dir}", link_target="foo-data")], read_write_fs=True + [virtual_path_def(doc_dir_path, link_target="foo-data")], read_write_fs=True ) source_fs_root = build_virtual_fs( [virtual_path_def("changelog", materialized_content="Wrong file!")] diff --git a/tests/test_debputy_plugin.py b/tests/test_debputy_plugin.py index a5d7758..dc60597 100644 --- a/tests/test_debputy_plugin.py +++ b/tests/test_debputy_plugin.py @@ -1,6 +1,6 @@ import os import textwrap -from typing import Sequence +from typing import Sequence, Any, List, Optional import pytest @@ -643,6 +643,8 @@ def test_system_service_detection() -> None: systemd_service_system_dir = f"{systemd_service_root_dir}/system" systemd_service_user_dir = f"{systemd_service_root_dir}/user" + services: List[DetectedService[Optional[object]]] + services, _ = plugin.run_service_detection_and_integrations( "systemd", build_virtual_file_system([]) ) @@ -704,7 +706,9 @@ def test_system_service_detection() -> None: assert foo_service.enable_by_default assert foo_service.start_by_default assert foo_service.default_upgrade_rule == "restart" - assert foo_service.service_context.had_install_section + foo_service_context = foo_service.service_context + assert isinstance(foo_service_context, SystemdServiceContext) + assert foo_service_context.had_install_section bar_timer = _extract_service(services, "bar.timer") assert set(bar_timer.names) == {"bar.timer"} @@ -713,7 +717,9 @@ def test_system_service_detection() -> None: assert not bar_timer.enable_by_default assert bar_timer.start_by_default assert bar_timer.default_upgrade_rule == "restart" - assert not bar_timer.service_context.had_install_section + bar_service_context = bar_timer.service_context + assert isinstance(bar_service_context, SystemdServiceContext) + assert not bar_service_context.had_install_section snippets = metadata.maintscripts() assert len(snippets) == 4 @@ -742,6 +748,8 @@ def test_sysv_service_detection() -> None: ) init_dir = "etc/init.d" + services: List[DetectedService[Optional[object]]] + services, _ = plugin.run_service_detection_and_integrations( "sysvinit", build_virtual_file_system([]) ) @@ -1135,9 +1143,9 @@ def test_pam_auth_update() -> None: assert postinst.registration_method == "on_configure" assert "pam-auth-update --package" in postinst.plugin_provided_script - prerms = prerms[0] - assert prerms.registration_method == "on_before_removal" - assert "pam-auth-update --package --remove foo-pam" in prerms.plugin_provided_script + prerm = prerms[0] + assert prerm.registration_method == "on_before_removal" + assert "pam-auth-update --package --remove foo-pam" in prerm.plugin_provided_script def test_auto_depends_solink() -> None: diff --git a/tests/test_declarative_parser.py b/tests/test_declarative_parser.py index 94341ea..26291dd 100644 --- a/tests/test_declarative_parser.py +++ b/tests/test_declarative_parser.py @@ -1,4 +1,11 @@ -from typing import List, TypedDict, NotRequired, Annotated, Union, Mapping +from typing import ( + List, + TypedDict, + NotRequired, + Annotated, + Union, + Mapping, +) import pytest @@ -25,7 +32,7 @@ class TFinalEntity(DebputyParsedContent): class TSourceEntity(TypedDict): sources: NotRequired[List[str]] - source: Annotated[NotRequired[str], DebputyParseHint.target_attribute("sources")] + source: NotRequired[Annotated[str, DebputyParseHint.target_attribute("sources")]] as_: NotRequired[ Annotated[ str, diff --git a/tests/test_fs_metadata.py b/tests/test_fs_metadata.py index 14a397f..f32afb0 100644 --- a/tests/test_fs_metadata.py +++ b/tests/test_fs_metadata.py @@ -1,11 +1,13 @@ import dataclasses import textwrap -from typing import Tuple, List, Optional, Union +from typing import Tuple, List, Optional, Union, Sequence import pytest from debputy.filesystem_scan import PathDef, build_virtual_fs -from debputy.highlevel_manifest_parser import YAMLManifestParser +from debputy.highlevel_manifest_parser import ( + YAMLManifestParser, +) from debputy.intermediate_manifest import PathType, IntermediateManifest, TarMember from debputy.plugin.api import virtual_path_def from debputy.plugin.api.test_api import build_virtual_file_system @@ -57,7 +59,7 @@ def _has_fs_path(tm: TarMember) -> bool: def verify_paths( intermediate_manifest: IntermediateManifest, - expected_results: List[Tuple[Union[str, PathDef], Expected]], + expected_results: Sequence[Tuple[Union[str, PathDef], Expected]], ) -> None: result = {tm.member_path: tm for tm in intermediate_manifest} expected_table = { @@ -89,7 +91,9 @@ def verify_paths( assert tm.member_path in expected_table -def test_mtime_clamp_and_builtin_dir_mode(manifest_parser_pkg_foo): +def test_mtime_clamp_and_builtin_dir_mode( + manifest_parser_pkg_foo: YAMLManifestParser, +) -> None: manifest = manifest_parser_pkg_foo.build_manifest() claim_mtime_to = 255 path_defs: List[Tuple[PathDef, Expected]] = [ diff --git a/tests/test_interpreter.py b/tests/test_interpreter.py index 6cbfd44..154ee4a 100644 --- a/tests/test_interpreter.py +++ b/tests/test_interpreter.py @@ -82,7 +82,7 @@ def test_interpreter_detection( ) -> None: interpreter = extract_shebang_interpreter(raw_shebang) # The `and ...` part is just to get the raw line in the error message - assert interpreter is not None or raw_shebang == b"" + assert interpreter is not None assert interpreter.original_command == original_command assert interpreter.command_full_basename == command_full_basename @@ -147,11 +147,11 @@ def test_interpreter_rewrite(empty_manifest: HighLevelManifest) -> None: foo = fs_root.lookup("usr/bin/foo") foo_sh = fs_root.lookup("usr/bin/foo.sh") - assert foo.is_file + assert foo is not None and foo.is_file with foo.open() as fd: assert fd.read() == "random data" - assert foo_sh.is_file + assert foo_sh is not None and foo_sh.is_file with foo_sh.open() as fd: expected = textwrap.dedent( """\ diff --git a/tests/test_migrations.py b/tests/test_migrations.py index cbf3f79..9d43549 100644 --- a/tests/test_migrations.py +++ b/tests/test_migrations.py @@ -119,8 +119,10 @@ def _assert_unsupported_feature( def _write_manifest(manifest: HighLevelManifest) -> str: + mutable_manifest = manifest.mutable_manifest + assert mutable_manifest is not None with io.StringIO() as fd: - manifest.mutable_manifest.write_to(fd) + mutable_manifest.write_to(fd) return fd.getvalue() @@ -134,7 +136,7 @@ def _verify_migrator_generates_parsable_manifest( expected_warnings: Optional[List[str]] = None, expected_renamed_paths: Optional[List[Tuple[str, str]]] = None, expected_removals: Optional[List[str]] = None, - required_plugins: Optional[Sequence[str]] = tuple(), + required_plugins: Sequence[str] = tuple(), dh_config_mode: Optional[int] = None, ) -> None: # No file, no changes diff --git a/tests/test_packager_provided_files.py b/tests/test_packager_provided_files.py index 149564d..b0e075f 100644 --- a/tests/test_packager_provided_files.py +++ b/tests/test_packager_provided_files.py @@ -1,5 +1,5 @@ import random -from typing import cast +from typing import cast, TYPE_CHECKING import pytest @@ -10,9 +10,17 @@ from debputy.plugin.api.test_api import ( InitializedPluginUnderTest, build_virtual_file_system, ) -from debputy.plugin.api.test_api.test_impl import initialize_plugin_under_test_preloaded +from debputy.plugin.api.test_api.test_impl import ( + initialize_plugin_under_test_preloaded, +) from tutil import faked_binary_package, binary_package_table +if TYPE_CHECKING: + from debputy.plugin.api.test_api.test_impl import InitializedPluginUnderTestImpl + + # Irrelevant, but makes the import not "unused" for things that does not parse `cast("...", ...)` expressions + assert InitializedPluginUnderTestImpl is not None + def ppf_test_plugin(api: DebputyPluginInitializer) -> None: api.packager_provided_file( diff --git a/tests/test_plugin_tester.py b/tests/test_plugin_tester.py index 8078a02..b41f8a3 100644 --- a/tests/test_plugin_tester.py +++ b/tests/test_plugin_tester.py @@ -1,6 +1,6 @@ import json import os.path -from typing import List, Tuple, Type, cast +from typing import List, Tuple, Type, cast, TYPE_CHECKING import pytest @@ -25,6 +25,13 @@ from debputy.plugin.api.test_api.test_impl import ( initialize_plugin_under_test_from_inline_json, ) +if TYPE_CHECKING: + from debputy.plugin.api import PluginInitializationEntryPoint + + # Irrelevant, but makes the import not "unused" for things that does not parse `cast("...", ...)` expressions + assert PluginInitializationEntryPoint is not None + + CUSTOM_PLUGIN_JSON_FILE = os.path.join( os.path.dirname(__file__), "data", "custom-plugin.json.in" ) diff --git a/typing-stubs/Levenshtein/__init__.pyi b/typing-stubs/Levenshtein/__init__.pyi new file mode 100644 index 0000000..a028706 --- /dev/null +++ b/typing-stubs/Levenshtein/__init__.pyi @@ -0,0 +1 @@ +def distance(a: str, b: str) -> int: ... diff --git a/typing-stubs/README.md b/typing-stubs/README.md new file mode 100644 index 0000000..5e307ae --- /dev/null +++ b/typing-stubs/README.md @@ -0,0 +1,4 @@ +# Typing stubs + +This directory contains typing stubs for modules/libraries imported by `debputy` that +does not provide proper typing. diff --git a/typing-stubs/colored/__init__.pyi b/typing-stubs/colored/__init__.pyi new file mode 100644 index 0000000..340b656 --- /dev/null +++ b/typing-stubs/colored/__init__.pyi @@ -0,0 +1,8 @@ +from .attributes import Style as Style +from .background import Back as Back +from .colored import Colored as Colored, attr as attr, back as back, back_rgb as back_rgb, bg as bg, fg as fg, fore as fore, fore_rgb as fore_rgb, set_tty_aware as set_tty_aware, style as style, stylize as stylize, stylize_interactive as stylize_interactive +from .controls import Controls as Controls +from .cprint import cprint as cprint +from .foreground import Fore as Fore + +__version__: str diff --git a/typing-stubs/colored/attributes.pyi b/typing-stubs/colored/attributes.pyi new file mode 100644 index 0000000..d7fc0ab --- /dev/null +++ b/typing-stubs/colored/attributes.pyi @@ -0,0 +1,14 @@ +from .exceptions import InvalidStyle as InvalidStyle +from .library import Library as Library +from .utilities import Utilities as Utilities + +class MetaStyle(type): + def __getattr__(cls, color: str): ... + +class Style(metaclass=MetaStyle): + @classmethod + def underline_color(cls, color: str | int) -> str: ... + @classmethod + def UNDERLINE_COLOR(cls, color: str | int) -> str: ... + +class style(Style): ... diff --git a/typing-stubs/colored/background.pyi b/typing-stubs/colored/background.pyi new file mode 100644 index 0000000..f98ef99 --- /dev/null +++ b/typing-stubs/colored/background.pyi @@ -0,0 +1,14 @@ +from .exceptions import InvalidColor as InvalidColor +from .library import Library as Library +from .utilities import Utilities as Utilities + +class MetaBack(type): + def __getattr__(cls, color: str): ... + +class Back(metaclass=MetaBack): + @classmethod + def rgb(cls, r: int | str, g: int | str, b: int | str) -> str: ... + @classmethod + def RGB(cls, r: int | str, g: int | str, b: int | str) -> str: ... + +class back(Back): ... diff --git a/typing-stubs/colored/colored.pyi b/typing-stubs/colored/colored.pyi new file mode 100644 index 0000000..8a463ac --- /dev/null +++ b/typing-stubs/colored/colored.pyi @@ -0,0 +1,30 @@ +from .hexadecimal import Hex as Hex +from .library import Library as Library +from .utilities import Utilities as Utilities +from _typeshed import Incomplete +from typing import Any + +TTY_AWARE: bool +IS_TTY: Incomplete + +class Colored: + def __init__(self, name: Any) -> None: ... + def attribute(self, line_color: str = '') -> str: ... + def foreground(self) -> str: ... + def background(self) -> str: ... + @staticmethod + def enable_windows_terminal_mode() -> Any: ... + @staticmethod + def enabled() -> bool: ... + +def style(name: int | str, color: str | int = '') -> str: ... +def fore(name: int | str) -> str: ... +def back(name: int | str) -> str: ... +def fore_rgb(r: int | str, g: int | str, b: int | str) -> str: ... +def back_rgb(r: int | str, g: int | str, b: int | str) -> str: ... +def attr(name: int | str) -> str: ... +def fg(name: int | str) -> str: ... +def bg(name: int | str) -> str: ... +def stylize(text: str, formatting: int | str, reset: bool = True) -> str: ... +def stylize_interactive(text: str, formatting: str, reset: bool = True) -> str: ... +def set_tty_aware(awareness: bool = True) -> None: ... diff --git a/typing-stubs/colored/controls.pyi b/typing-stubs/colored/controls.pyi new file mode 100644 index 0000000..11bf845 --- /dev/null +++ b/typing-stubs/colored/controls.pyi @@ -0,0 +1,8 @@ +from .exceptions import InvalidControl as InvalidControl +from .library import Library as Library +from .utilities import Utilities as Utilities +from _typeshed import Incomplete + +class Controls: + def __init__(self) -> None: ... + def nav(self, name: str, row: int, column: Incomplete | None = None) -> str: ... diff --git a/typing-stubs/colored/cprint.pyi b/typing-stubs/colored/cprint.pyi new file mode 100644 index 0000000..81b942d --- /dev/null +++ b/typing-stubs/colored/cprint.pyi @@ -0,0 +1,3 @@ +from .colored import Colored as Colored + +def cprint(text: str, fore_256: int | str = '', back_256: int | str = '', fore_rgb: tuple = (255, 255, 255), back_rgb: tuple = (0, 0, 0), formatting: int | str = '', line_color: int | str = '', reset: bool = True, **kwargs) -> None: ... diff --git a/typing-stubs/colored/exceptions.pyi b/typing-stubs/colored/exceptions.pyi new file mode 100644 index 0000000..cf3e9b8 --- /dev/null +++ b/typing-stubs/colored/exceptions.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete + +class InvalidColor(Exception): + message: Incomplete + def __init__(self, message: str) -> None: ... + +class InvalidHexColor(Exception): + message: Incomplete + def __init__(self, message: str) -> None: ... + +class InvalidStyle(Exception): + message: Incomplete + def __init__(self, message: str) -> None: ... + +class InvalidControl(Exception): + message: Incomplete + def __init__(self, message: str) -> None: ... diff --git a/typing-stubs/colored/foreground.pyi b/typing-stubs/colored/foreground.pyi new file mode 100644 index 0000000..1bca2ed --- /dev/null +++ b/typing-stubs/colored/foreground.pyi @@ -0,0 +1,14 @@ +from .exceptions import InvalidColor as InvalidColor +from .library import Library as Library +from .utilities import Utilities as Utilities + +class MetaFore(type): + def __getattr__(cls, color: str): ... + +class Fore(metaclass=MetaFore): + @classmethod + def rgb(cls, r: int | str, g: int | str, b: int | str) -> str: ... + @classmethod + def RGB(cls, r: int | str, g: int | str, b: int | str) -> str: ... + +class fore(Fore): ... diff --git a/typing-stubs/colored/hexadecimal.pyi b/typing-stubs/colored/hexadecimal.pyi new file mode 100644 index 0000000..17f376e --- /dev/null +++ b/typing-stubs/colored/hexadecimal.pyi @@ -0,0 +1,7 @@ +from .library import Library as Library + +class Hex: + def find(self, color: str | int) -> str: ... + @staticmethod + def cube(x: int) -> int: ... + def fit(self, hex_val: str, ref: int) -> int: ... diff --git a/typing-stubs/colored/library.pyi b/typing-stubs/colored/library.pyi new file mode 100644 index 0000000..c8ff33a --- /dev/null +++ b/typing-stubs/colored/library.pyi @@ -0,0 +1,17 @@ +from dataclasses import dataclass + +@dataclass +class Library: + ESC: str = ... + END: str = ... + FOREGROUND_256: str = ... + BACKGROUND_256: str = ... + FOREGROUND_RGB: str = ... + BACKGROUND_RGB: str = ... + UNDERLINE_COLOR: str = ... + CONTROLS = ... + STYLES = ... + COLORTERM = ... + COLORS = ... + HEX_COLORS = ... + def __init__(self, ESC, END, FOREGROUND_256, BACKGROUND_256, FOREGROUND_RGB, BACKGROUND_RGB, UNDERLINE_COLOR) -> None: ... diff --git a/typing-stubs/colored/utilities.pyi b/typing-stubs/colored/utilities.pyi new file mode 100644 index 0000000..97a1157 --- /dev/null +++ b/typing-stubs/colored/utilities.pyi @@ -0,0 +1,13 @@ +from .exceptions import InvalidColor as InvalidColor, InvalidControl as InvalidControl, InvalidHexColor as InvalidHexColor, InvalidStyle as InvalidStyle +from .library import Library as Library + +class Utilities: + RGB_MAXIMUM_COLOR: int + colorterm: str + def __init__(self) -> None: ... + def is_color_exist(self, name: str) -> bool: ... + def is_style_exist(self, name: str) -> bool: ... + def is_control_exist(self, name: str) -> bool: ... + def convert_percentages(self, percent: str | int) -> int | str: ... + def set_colorterm(self, colorterm: str = ''): ... + def is_percentage(self, numbers: tuple) -> list: ... -- cgit v1.2.3