From 9e3c08db40b8916968b9f30096c7be3f00ce9647 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Sun, 21 Apr 2024 13:44:51 +0200 Subject: Adding upstream version 1:115.7.0. Signed-off-by: Daniel Baumann --- python/mozbuild/.ruff.toml | 9 + python/mozbuild/metrics.yaml | 140 + python/mozbuild/mozbuild/__init__.py | 0 python/mozbuild/mozbuild/action/__init__.py | 0 python/mozbuild/mozbuild/action/buildlist.py | 49 + python/mozbuild/mozbuild/action/check_binary.py | 343 +++ .../mozbuild/action/download_wpt_manifest.py | 21 + python/mozbuild/mozbuild/action/dump_env.py | 30 + python/mozbuild/mozbuild/action/dumpsymbols.py | 109 + python/mozbuild/mozbuild/action/exe_7z_archive.py | 89 + python/mozbuild/mozbuild/action/fat_aar.py | 185 ++ python/mozbuild/mozbuild/action/file_generate.py | 155 + .../mozbuild/action/file_generate_wrapper.py | 38 + .../mozbuild/action/generate_symbols_file.py | 95 + .../mozbuild/action/html_fragment_preprocesor.py | 101 + python/mozbuild/mozbuild/action/install.py | 22 + python/mozbuild/mozbuild/action/jar_maker.py | 16 + python/mozbuild/mozbuild/action/l10n_merge.py | 42 + .../mozbuild/action/langpack_localeNames.json | 426 +++ .../mozbuild/mozbuild/action/langpack_manifest.py | 587 ++++ python/mozbuild/mozbuild/action/make_dmg.py | 67 + python/mozbuild/mozbuild/action/make_unzip.py | 25 + python/mozbuild/mozbuild/action/node.py | 137 + .../mozbuild/action/package_generated_sources.py | 42 + python/mozbuild/mozbuild/action/preprocessor.py | 24 + .../mozbuild/action/process_define_files.py | 115 + .../mozbuild/action/process_install_manifest.py | 125 + python/mozbuild/mozbuild/action/symbols_archive.py | 89 + python/mozbuild/mozbuild/action/test_archive.py | 875 ++++++ python/mozbuild/mozbuild/action/tooltool.py | 1714 +++++++++++ python/mozbuild/mozbuild/action/unify_symbols.py | 49 + python/mozbuild/mozbuild/action/unify_tests.py | 65 + python/mozbuild/mozbuild/action/unpack_dmg.py | 52 + python/mozbuild/mozbuild/action/util.py | 24 + python/mozbuild/mozbuild/action/webidl.py | 19 + python/mozbuild/mozbuild/action/wrap_rustc.py | 79 + python/mozbuild/mozbuild/action/xpccheck.py | 109 + python/mozbuild/mozbuild/action/xpidl-process.py | 153 + python/mozbuild/mozbuild/action/zip.py | 52 + python/mozbuild/mozbuild/analyze/__init__.py | 0 python/mozbuild/mozbuild/analyze/hg.py | 176 ++ python/mozbuild/mozbuild/android_version_code.py | 197 ++ python/mozbuild/mozbuild/artifact_builds.py | 27 + python/mozbuild/mozbuild/artifact_cache.py | 251 ++ python/mozbuild/mozbuild/artifact_commands.py | 615 ++++ python/mozbuild/mozbuild/artifacts.py | 1661 +++++++++++ python/mozbuild/mozbuild/backend/__init__.py | 27 + python/mozbuild/mozbuild/backend/base.py | 389 +++ .../mozbuild/mozbuild/backend/cargo_build_defs.py | 87 + python/mozbuild/mozbuild/backend/clangd.py | 126 + python/mozbuild/mozbuild/backend/common.py | 603 ++++ .../mozbuild/mozbuild/backend/configenvironment.py | 357 +++ python/mozbuild/mozbuild/backend/cpp_eclipse.py | 876 ++++++ python/mozbuild/mozbuild/backend/fastermake.py | 300 ++ python/mozbuild/mozbuild/backend/mach_commands.py | 420 +++ python/mozbuild/mozbuild/backend/make.py | 139 + python/mozbuild/mozbuild/backend/recursivemake.py | 1904 ++++++++++++ .../mozbuild/mozbuild/backend/static_analysis.py | 52 + python/mozbuild/mozbuild/backend/test_manifest.py | 110 + python/mozbuild/mozbuild/backend/visualstudio.py | 712 +++++ python/mozbuild/mozbuild/base.py | 1110 +++++++ python/mozbuild/mozbuild/bootstrap.py | 61 + python/mozbuild/mozbuild/build_commands.py | 366 +++ python/mozbuild/mozbuild/chunkify.py | 56 + python/mozbuild/mozbuild/code_analysis/__init__.py | 0 .../mozbuild/code_analysis/mach_commands.py | 1976 ++++++++++++ python/mozbuild/mozbuild/code_analysis/moz.build | 8 + python/mozbuild/mozbuild/code_analysis/utils.py | 138 + python/mozbuild/mozbuild/codecoverage/__init__.py | 0 .../mozbuild/mozbuild/codecoverage/chrome_map.py | 175 ++ .../mozbuild/codecoverage/lcov_rewriter.py | 777 +++++ .../mozbuild/codecoverage/manifest_handler.py | 52 + python/mozbuild/mozbuild/codecoverage/packager.py | 71 + python/mozbuild/mozbuild/compilation/__init__.py | 0 .../mozbuild/mozbuild/compilation/codecomplete.py | 55 + python/mozbuild/mozbuild/compilation/database.py | 244 ++ python/mozbuild/mozbuild/compilation/util.py | 64 + python/mozbuild/mozbuild/compilation/warnings.py | 392 +++ python/mozbuild/mozbuild/config_status.py | 184 ++ python/mozbuild/mozbuild/configure/__init__.py | 1311 ++++++++ .../mozbuild/configure/check_debug_ranges.py | 68 + python/mozbuild/mozbuild/configure/constants.py | 131 + python/mozbuild/mozbuild/configure/help.py | 90 + python/mozbuild/mozbuild/configure/lint.py | 348 +++ python/mozbuild/mozbuild/configure/options.py | 614 ++++ python/mozbuild/mozbuild/configure/util.py | 235 ++ python/mozbuild/mozbuild/controller/__init__.py | 0 python/mozbuild/mozbuild/controller/building.py | 1872 ++++++++++++ python/mozbuild/mozbuild/controller/clobber.py | 249 ++ python/mozbuild/mozbuild/doctor.py | 605 ++++ python/mozbuild/mozbuild/dotproperties.py | 86 + python/mozbuild/mozbuild/faster_daemon.py | 328 ++ python/mozbuild/mozbuild/frontend/__init__.py | 0 python/mozbuild/mozbuild/frontend/context.py | 3144 ++++++++++++++++++++ python/mozbuild/mozbuild/frontend/data.py | 1369 +++++++++ python/mozbuild/mozbuild/frontend/emitter.py | 1892 ++++++++++++ python/mozbuild/mozbuild/frontend/gyp_reader.py | 497 ++++ python/mozbuild/mozbuild/frontend/mach_commands.py | 338 +++ python/mozbuild/mozbuild/frontend/reader.py | 1432 +++++++++ python/mozbuild/mozbuild/frontend/sandbox.py | 313 ++ python/mozbuild/mozbuild/gen_test_backend.py | 53 + python/mozbuild/mozbuild/generated_sources.py | 75 + python/mozbuild/mozbuild/gn_processor.py | 788 +++++ python/mozbuild/mozbuild/html_build_viewer.py | 118 + python/mozbuild/mozbuild/jar.py | 648 ++++ python/mozbuild/mozbuild/mach_commands.py | 2941 ++++++++++++++++++ python/mozbuild/mozbuild/makeutil.py | 209 ++ python/mozbuild/mozbuild/mozconfig.py | 403 +++ python/mozbuild/mozbuild/mozconfig_loader | 48 + python/mozbuild/mozbuild/mozinfo.py | 163 + python/mozbuild/mozbuild/nodeutil.py | 126 + python/mozbuild/mozbuild/preprocessor.py | 938 ++++++ python/mozbuild/mozbuild/pythonutil.py | 23 + python/mozbuild/mozbuild/repackaging/__init__.py | 0 .../mozbuild/repackaging/application_ini.py | 66 + python/mozbuild/mozbuild/repackaging/deb.py | 694 +++++ python/mozbuild/mozbuild/repackaging/dmg.py | 56 + python/mozbuild/mozbuild/repackaging/installer.py | 55 + python/mozbuild/mozbuild/repackaging/mar.py | 93 + python/mozbuild/mozbuild/repackaging/msi.py | 122 + python/mozbuild/mozbuild/repackaging/msix.py | 1193 ++++++++ python/mozbuild/mozbuild/repackaging/pkg.py | 46 + .../mozbuild/mozbuild/repackaging/test/python.ini | 4 + .../mozbuild/repackaging/test/test_msix.py | 53 + .../html-build-viewer/build_resources.html | 694 +++++ python/mozbuild/mozbuild/schedules.py | 77 + python/mozbuild/mozbuild/settings.py | 30 + python/mozbuild/mozbuild/shellutil.py | 210 ++ python/mozbuild/mozbuild/sphinx.py | 293 ++ python/mozbuild/mozbuild/telemetry.py | 264 ++ python/mozbuild/mozbuild/test/__init__.py | 0 .../html_fragment_preprocesor/example_basic.xml | 10 + .../example_multiple_templates.xml | 30 + .../data/html_fragment_preprocesor/example_xul.xml | 14 + .../test/action/data/invalid/region.properties | 12 + .../test/action/data/node/node-test-script.js | 11 + .../mozbuild/test/action/test_buildlist.py | 96 + .../test/action/test_html_fragment_preprocessor.py | 196 ++ .../mozbuild/test/action/test_langpack_manifest.py | 269 ++ python/mozbuild/mozbuild/test/action/test_node.py | 80 + .../test/action/test_process_install_manifest.py | 65 + python/mozbuild/mozbuild/test/backend/__init__.py | 0 python/mozbuild/mozbuild/test/backend/common.py | 253 ++ .../mozbuild/test/backend/data/build/app/moz.build | 54 + .../mozbuild/test/backend/data/build/bar.ini | 1 + .../mozbuild/test/backend/data/build/bar.js | 2 + .../mozbuild/test/backend/data/build/bar.jsm | 1 + .../mozbuild/test/backend/data/build/baz.ini | 2 + .../mozbuild/test/backend/data/build/baz.jsm | 2 + .../test/backend/data/build/components.manifest | 2 + .../mozbuild/test/backend/data/build/foo.css | 2 + .../mozbuild/test/backend/data/build/foo.ini | 1 + .../mozbuild/test/backend/data/build/foo.js | 1 + .../mozbuild/test/backend/data/build/foo.jsm | 1 + .../mozbuild/test/backend/data/build/jar.mn | 11 + .../mozbuild/test/backend/data/build/moz.build | 68 + .../mozbuild/test/backend/data/build/prefs.js | 1 + .../mozbuild/test/backend/data/build/qux.ini | 5 + .../mozbuild/test/backend/data/build/qux.jsm | 5 + .../mozbuild/test/backend/data/build/resource | 1 + .../mozbuild/test/backend/data/build/resource2 | 1 + .../mozbuild/test/backend/data/build/subdir/bar.js | 1 + .../mozbuild/test/backend/data/database/bar.c | 0 .../mozbuild/test/backend/data/database/baz.cpp | 0 .../backend/data/database/build/non-unified-compat | 0 .../mozbuild/test/backend/data/database/foo.c | 0 .../mozbuild/test/backend/data/database/moz.build | 14 + .../mozbuild/test/backend/data/database/qux.cpp | 0 .../mozbuild/test/backend/data/defines/moz.build | 9 + .../test/backend/data/dist-files/install.rdf | 0 .../mozbuild/test/backend/data/dist-files/main.js | 0 .../test/backend/data/dist-files/moz.build | 8 + .../test/backend/data/exports-generated/dom1.h | 0 .../test/backend/data/exports-generated/foo.h | 0 .../test/backend/data/exports-generated/gfx.h | 0 .../test/backend/data/exports-generated/moz.build | 12 + .../test/backend/data/exports-generated/mozilla1.h | 0 .../mozbuild/test/backend/data/exports/dom1.h | 0 .../mozbuild/test/backend/data/exports/dom2.h | 0 .../mozbuild/test/backend/data/exports/foo.h | 0 .../mozbuild/test/backend/data/exports/gfx.h | 0 .../mozbuild/test/backend/data/exports/moz.build | 8 + .../mozbuild/test/backend/data/exports/mozilla1.h | 0 .../mozbuild/test/backend/data/exports/mozilla2.h | 0 .../mozbuild/test/backend/data/exports/pprio.h | 0 .../data/final-target-files-wildcard/bar.xyz | 0 .../data/final-target-files-wildcard/foo.xyz | 0 .../data/final-target-files-wildcard/moz.build | 5 + .../test/backend/data/final_target/both/moz.build | 6 + .../data/final_target/dist-subdir/moz.build | 5 + .../data/final_target/final-target/moz.build | 5 + .../test/backend/data/final_target/moz.build | 5 + .../backend/data/final_target/xpi-name/moz.build | 5 + .../backend/data/generated-files-force/foo-data | 0 .../data/generated-files-force/generate-bar.py | 0 .../data/generated-files-force/generate-foo.py | 0 .../backend/data/generated-files-force/moz.build | 14 + .../test/backend/data/generated-files/foo-data | 0 .../backend/data/generated-files/generate-bar.py | 0 .../backend/data/generated-files/generate-foo.py | 0 .../test/backend/data/generated-files/moz.build | 12 + .../test/backend/data/generated_includes/moz.build | 5 + .../test/backend/data/host-defines/moz.build | 9 + .../data/host-rust-library-features/Cargo.toml | 13 + .../data/host-rust-library-features/moz.build | 22 + .../test/backend/data/host-rust-library/Cargo.toml | 15 + .../test/backend/data/host-rust-library/moz.build | 22 + .../data/install_substitute_config_files/moz.build | 6 + .../install_substitute_config_files/sub/foo.h.in | 1 + .../install_substitute_config_files/sub/moz.build | 7 + .../test/backend/data/ipdl_sources/bar/moz.build | 16 + .../test/backend/data/ipdl_sources/foo/moz.build | 16 + .../test/backend/data/ipdl_sources/ipdl/moz.build | 9 + .../test/backend/data/ipdl_sources/moz.build | 19 + .../test/backend/data/jar-manifests/moz.build | 7 + .../mozbuild/test/backend/data/linkage/moz.build | 11 + .../test/backend/data/linkage/prog/moz.build | 11 + .../test/backend/data/linkage/prog/qux/moz.build | 6 + .../test/backend/data/linkage/prog/qux/qux1.c | 0 .../test/backend/data/linkage/real/foo/foo1.c | 0 .../test/backend/data/linkage/real/foo/foo2.c | 0 .../test/backend/data/linkage/real/foo/moz.build | 6 + .../test/backend/data/linkage/real/moz.build | 14 + .../test/backend/data/linkage/shared/baz/baz1.c | 0 .../test/backend/data/linkage/shared/baz/moz.build | 6 + .../test/backend/data/linkage/shared/moz.build | 14 + .../test/backend/data/linkage/static/bar/bar1.cc | 0 .../test/backend/data/linkage/static/bar/bar2.cc | 0 .../linkage/static/bar/bar_helper/bar_helper1.cpp | 0 .../data/linkage/static/bar/bar_helper/moz.build | 8 + .../test/backend/data/linkage/static/bar/moz.build | 13 + .../test/backend/data/linkage/static/moz.build | 12 + .../test/backend/data/linkage/templates.mozbuild | 23 + .../bar/baz/dummy_file_for_nonempty_directory | 0 .../foo/dummy_file_for_nonempty_directory | 0 .../test/backend/data/local_includes/moz.build | 5 + .../backend/data/localized-files/en-US/bar.ini | 0 .../test/backend/data/localized-files/en-US/foo.js | 0 .../test/backend/data/localized-files/moz.build | 9 + .../en-US/localized-input | 0 .../data/localized-generated-files-AB_CD/foo-data | 0 .../generate-foo.py | 0 .../inner/locales/en-US/localized-input | 0 .../locales/en-US/localized-input | 0 .../data/localized-generated-files-AB_CD/moz.build | 32 + .../non-localized-input | 0 .../en-US/localized-input | 0 .../data/localized-generated-files-force/foo-data | 0 .../generate-foo.py | 0 .../data/localized-generated-files-force/moz.build | 22 + .../non-localized-input | 0 .../en-US/localized-input | 0 .../data/localized-generated-files/foo-data | 0 .../data/localized-generated-files/generate-foo.py | 0 .../data/localized-generated-files/moz.build | 15 + .../localized-generated-files/non-localized-input | 0 .../backend/data/localized-pp-files/en-US/bar.ini | 0 .../backend/data/localized-pp-files/en-US/foo.js | 0 .../test/backend/data/localized-pp-files/moz.build | 8 + .../data/prog-lib-c-only/c-library/c-library.c | 2 + .../data/prog-lib-c-only/c-library/moz.build | 7 + .../prog-lib-c-only/c-program/c_test_program.c | 2 + .../data/prog-lib-c-only/c-program/moz.build | 7 + .../c-simple-programs/c_simple_program.c | 2 + .../prog-lib-c-only/c-simple-programs/moz.build | 5 + .../data/prog-lib-c-only/cxx-library/c-source.c | 2 + .../prog-lib-c-only/cxx-library/cxx-library.cpp | 2 + .../data/prog-lib-c-only/cxx-library/moz.build | 10 + .../cxx-program/cxx_test_program.cpp | 2 + .../data/prog-lib-c-only/cxx-program/moz.build | 7 + .../cxx-simple-programs/cxx_simple_program.cpp | 2 + .../prog-lib-c-only/cxx-simple-programs/moz.build | 5 + .../test/backend/data/prog-lib-c-only/moz.build | 35 + .../data/prog-lib-c-only/simple-programs/moz.build | 3 + .../backend/data/program-paths/dist-bin/moz.build | 4 + .../data/program-paths/dist-subdir/moz.build | 5 + .../data/program-paths/final-target/moz.build | 5 + .../test/backend/data/program-paths/moz.build | 15 + .../data/program-paths/not-installed/moz.build | 5 + .../test/backend/data/resources/bar.res.in | 0 .../test/backend/data/resources/cursor.cur | 0 .../test/backend/data/resources/desktop1.ttf | 0 .../test/backend/data/resources/desktop2.ttf | 0 .../test/backend/data/resources/extra.manifest | 0 .../mozbuild/test/backend/data/resources/font1.ttf | 0 .../mozbuild/test/backend/data/resources/font2.ttf | 0 .../mozbuild/test/backend/data/resources/foo.res | 0 .../test/backend/data/resources/mobile.ttf | 0 .../mozbuild/test/backend/data/resources/moz.build | 9 + .../test/backend/data/resources/test.manifest | 0 .../backend/data/rust-library-features/Cargo.toml | 15 + .../backend/data/rust-library-features/moz.build | 20 + .../test/backend/data/rust-library/Cargo.toml | 15 + .../test/backend/data/rust-library/moz.build | 19 + .../backend/data/rust-programs/code/Cargo.toml | 10 + .../test/backend/data/rust-programs/code/moz.build | 6 + .../test/backend/data/rust-programs/moz.build | 5 + .../mozbuild/test/backend/data/sources/bar.cpp | 0 .../mozbuild/test/backend/data/sources/bar.s | 0 .../mozbuild/test/backend/data/sources/baz.c | 0 .../mozbuild/test/backend/data/sources/foo.asm | 0 .../mozbuild/test/backend/data/sources/foo.cpp | 0 .../mozbuild/test/backend/data/sources/fuga.mm | 0 .../mozbuild/test/backend/data/sources/hoge.mm | 0 .../mozbuild/test/backend/data/sources/moz.build | 26 + .../mozbuild/test/backend/data/sources/qux.c | 0 .../mozbuild/test/backend/data/sources/titi.S | 0 .../mozbuild/test/backend/data/sources/toto.S | 0 .../mozbuild/test/backend/data/stub0/Makefile.in | 4 + .../test/backend/data/stub0/dir1/Makefile.in | 7 + .../test/backend/data/stub0/dir1/moz.build | 3 + .../test/backend/data/stub0/dir2/moz.build | 3 + .../test/backend/data/stub0/dir3/Makefile.in | 7 + .../test/backend/data/stub0/dir3/moz.build | 3 + .../mozbuild/test/backend/data/stub0/moz.build | 7 + .../data/substitute_config_files/Makefile.in | 0 .../backend/data/substitute_config_files/foo.in | 1 + .../backend/data/substitute_config_files/moz.build | 5 + .../child/another-file.sjs | 0 .../test-manifest-shared-support/child/browser.ini | 6 + .../child/data/one.txt | 0 .../child/data/two.txt | 0 .../test-manifest-shared-support/child/test_sub.js | 0 .../test-manifest-shared-support/mochitest.ini | 8 + .../data/test-manifest-shared-support/moz.build | 5 + .../test-manifest-shared-support/support-file.txt | 0 .../data/test-manifest-shared-support/test_foo.js | 0 .../mochitest-common.ini | 1 + .../test-manifests-backend-sources/mochitest.ini | 2 + .../data/test-manifests-backend-sources/moz.build | 6 + .../test-manifests-backend-sources/test_bar.js | 0 .../test-manifests-backend-sources/test_foo.js | 0 .../mochitest1.ini | 4 + .../mochitest2.ini | 4 + .../moz.build | 7 + .../test_bar.js | 0 .../test_foo.js | 0 .../instrumentation.ini | 1 + .../test-manifests-package-tests/mochitest.ini | 1 + .../data/test-manifests-package-tests/mochitest.js | 0 .../data/test-manifests-package-tests/moz.build | 10 + .../test-manifests-package-tests/not_packaged.java | 0 .../data/test-manifests-written/dir1/test_bar.js | 0 .../data/test-manifests-written/dir1/xpcshell.ini | 3 + .../data/test-manifests-written/mochitest.ini | 3 + .../data/test-manifests-written/mochitest.js | 0 .../backend/data/test-manifests-written/moz.build | 9 + .../data/test-manifests-written/xpcshell.ini | 4 + .../data/test-manifests-written/xpcshell.js | 0 .../data/test-support-binaries-tracked/moz.build | 5 + .../test-support-binaries-tracked/src/moz.build | 12 + .../test-support-binaries-tracked/test/moz.build | 32 + .../test/test-one.cpp | 0 .../test/test-two.cpp | 0 .../mozbuild/test/backend/data/test_config/file.in | 3 + .../test/backend/data/test_config/moz.build | 3 + .../backend/data/variable_passthru/Makefile.in | 0 .../test/backend/data/variable_passthru/baz.def | 0 .../test/backend/data/variable_passthru/moz.build | 11 + .../test/backend/data/variable_passthru/test1.c | 0 .../test/backend/data/variable_passthru/test1.cpp | 0 .../test/backend/data/variable_passthru/test1.mm | 0 .../test/backend/data/variable_passthru/test2.c | 0 .../test/backend/data/variable_passthru/test2.cpp | 0 .../test/backend/data/variable_passthru/test2.mm | 0 .../test/backend/data/visual-studio/dir1/bar.cpp | 0 .../test/backend/data/visual-studio/dir1/foo.cpp | 0 .../test/backend/data/visual-studio/dir1/moz.build | 9 + .../test/backend/data/visual-studio/moz.build | 7 + .../mozbuild/test/backend/data/xpidl/bar.idl | 0 .../data/xpidl/config/makefiles/xpidl/Makefile.in | 0 .../mozbuild/test/backend/data/xpidl/foo.idl | 0 .../mozbuild/test/backend/data/xpidl/moz.build | 6 + .../mozbuild/mozbuild/test/backend/test_build.py | 265 ++ .../test/backend/test_configenvironment.py | 73 + .../mozbuild/test/backend/test_database.py | 91 + .../mozbuild/test/backend/test_fastermake.py | 42 + .../test/backend/test_partialconfigenvironment.py | 173 ++ .../mozbuild/test/backend/test_recursivemake.py | 1307 ++++++++ .../mozbuild/test/backend/test_test_manifest.py | 94 + .../mozbuild/test/backend/test_visualstudio.py | 63 + .../test/code_analysis/test_mach_commands.py | 90 + .../mozbuild/test/codecoverage/sample_lcov.info | 1895 ++++++++++++ .../test/codecoverage/test_lcov_rewrite.py | 444 +++ python/mozbuild/mozbuild/test/common.py | 69 + .../mozbuild/mozbuild/test/compilation/__init__.py | 0 .../mozbuild/test/compilation/test_warnings.py | 240 ++ python/mozbuild/mozbuild/test/configure/common.py | 307 ++ .../test/configure/data/decorators.configure | 53 + .../mozbuild/test/configure/data/empty_mozconfig | 0 .../mozbuild/test/configure/data/extra.configure | 15 + .../test/configure/data/imply_option/imm.configure | 37 + .../configure/data/imply_option/infer.configure | 28 + .../configure/data/imply_option/infer_ko.configure | 36 + .../configure/data/imply_option/negative.configure | 40 + .../configure/data/imply_option/simple.configure | 28 + .../configure/data/imply_option/values.configure | 28 + .../test/configure/data/included.configure | 68 + .../mozbuild/test/configure/data/moz.configure | 205 ++ .../test/configure/data/set_config.configure | 51 + .../test/configure/data/set_define.configure | 51 + .../test/configure/data/subprocess.configure | 24 + python/mozbuild/mozbuild/test/configure/lint.py | 62 + .../configure/macos_fake_sdk/SDKSettings.plist | 8 + .../mozbuild/test/configure/test_bootstrap.py | 43 + .../test/configure/test_checks_configure.py | 1169 ++++++++ .../mozbuild/test/configure/test_compile_checks.py | 599 ++++ .../mozbuild/test/configure/test_configure.py | 1986 +++++++++++++ .../mozbuild/mozbuild/test/configure/test_lint.py | 487 +++ .../mozbuild/test/configure/test_moz_configure.py | 185 ++ .../mozbuild/test/configure/test_options.py | 905 ++++++ .../test/configure/test_toolchain_configure.py | 2056 +++++++++++++ .../test/configure/test_toolchain_helpers.py | 433 +++ .../test/configure/test_toolkit_moz_configure.py | 102 + .../mozbuild/mozbuild/test/configure/test_util.py | 539 ++++ .../mozbuild/mozbuild/test/controller/__init__.py | 0 .../mozbuild/test/controller/test_ccachestats.py | 866 ++++++ .../mozbuild/test/controller/test_clobber.py | 214 ++ python/mozbuild/mozbuild/test/data/Makefile | 0 python/mozbuild/mozbuild/test/data/bad.properties | 12 + .../mozbuild/mozbuild/test/data/test-dir/Makefile | 0 .../mozbuild/test/data/test-dir/with/Makefile | 0 .../test/data/test-dir/with/without/with/Makefile | 0 .../test/data/test-dir/without/with/Makefile | 0 .../mozbuild/mozbuild/test/data/valid.properties | 11 + python/mozbuild/mozbuild/test/frontend/__init__.py | 0 .../data/allow-compiler-warnings/moz.build | 20 + .../frontend/data/allow-compiler-warnings/test1.c | 0 .../mozbuild/test/frontend/data/asflags/moz.build | 15 + .../mozbuild/test/frontend/data/asflags/test1.c | 0 .../mozbuild/test/frontend/data/asflags/test2.S | 0 .../test/frontend/data/branding-files/bar.ico | 0 .../test/frontend/data/branding-files/baz.png | 0 .../test/frontend/data/branding-files/foo.xpm | 0 .../test/frontend/data/branding-files/moz.build | 12 + .../test/frontend/data/branding-files/quux.icns | 0 .../test/frontend/data/compile-defines/moz.build | 16 + .../test/frontend/data/compile-defines/test1.c | 0 .../data/compile-flags-field-validation/moz.build | 15 + .../data/compile-flags-field-validation/test1.c | 0 .../data/compile-flags-templates/moz.build | 27 + .../frontend/data/compile-flags-templates/test1.c | 0 .../data/compile-flags-type-validation/moz.build | 15 + .../data/compile-flags-type-validation/test1.c | 0 .../test/frontend/data/compile-flags/moz.build | 22 + .../test/frontend/data/compile-flags/test1.c | 0 .../test/frontend/data/compile-includes/moz.build | 15 + .../frontend/data/compile-includes/subdir/header.h | 0 .../test/frontend/data/compile-includes/test1.c | 0 .../data/config-file-substitution/moz.build | 6 + .../crate-dependency-path-resolution/Cargo.toml | 18 + .../crate-dependency-path-resolution/moz.build | 19 + .../shallow/Cargo.toml | 6 + .../the/depths/Cargo.toml | 9 + .../mozbuild/test/frontend/data/defines/moz.build | 9 + .../data/disable-compiler-warnings/moz.build | 20 + .../data/disable-compiler-warnings/test1.c | 0 .../frontend/data/disable-stl-wrapping/moz.build | 21 + .../frontend/data/disable-stl-wrapping/test1.c | 0 .../frontend/data/dist-files-missing/install.rdf | 0 .../frontend/data/dist-files-missing/moz.build | 8 + .../test/frontend/data/dist-files/install.rdf | 0 .../mozbuild/test/frontend/data/dist-files/main.js | 0 .../test/frontend/data/dist-files/moz.build | 8 + .../test/frontend/data/exports-generated/foo.h | 0 .../test/frontend/data/exports-generated/moz.build | 8 + .../frontend/data/exports-generated/mozilla1.h | 0 .../frontend/data/exports-missing-generated/foo.h | 0 .../data/exports-missing-generated/moz.build | 5 + .../test/frontend/data/exports-missing/foo.h | 0 .../test/frontend/data/exports-missing/moz.build | 6 + .../test/frontend/data/exports-missing/mozilla1.h | 0 .../mozbuild/test/frontend/data/exports/bar.h | 0 .../mozbuild/test/frontend/data/exports/baz.h | 0 .../mozbuild/test/frontend/data/exports/dom1.h | 0 .../mozbuild/test/frontend/data/exports/dom2.h | 0 .../mozbuild/test/frontend/data/exports/dom3.h | 0 .../mozbuild/test/frontend/data/exports/foo.h | 0 .../mozbuild/test/frontend/data/exports/gfx.h | 0 .../mozbuild/test/frontend/data/exports/mem.h | 0 .../mozbuild/test/frontend/data/exports/mem2.h | 0 .../mozbuild/test/frontend/data/exports/moz.build | 13 + .../mozbuild/test/frontend/data/exports/mozilla1.h | 0 .../mozbuild/test/frontend/data/exports/mozilla2.h | 0 .../mozbuild/test/frontend/data/exports/pprio.h | 0 .../mozbuild/test/frontend/data/exports/pprthred.h | 0 .../bug_component/bad-assignment/moz.build | 2 + .../bug_component/different-matchers/moz.build | 4 + .../data/files-info/bug_component/final/moz.build | 3 + .../bug_component/final/subcomponent/moz.build | 2 + .../data/files-info/bug_component/moz.build | 2 + .../data/files-info/bug_component/simple/moz.build | 2 + .../data/files-info/bug_component/static/moz.build | 5 + .../test/frontend/data/files-info/moz.build | 0 .../final-target-pp-files-non-srcdir/moz.build | 7 + .../data/generated-files-absolute-script/moz.build | 9 + .../data/generated-files-absolute-script/script.py | 0 .../frontend/data/generated-files-force/moz.build | 11 + .../data/generated-files-method-names/moz.build | 13 + .../data/generated-files-method-names/script.py | 0 .../data/generated-files-no-inputs/moz.build | 9 + .../data/generated-files-no-inputs/script.py | 0 .../generated-files-no-python-script/moz.build | 8 + .../generated-files-no-python-script/script.rb | 0 .../data/generated-files-no-script/moz.build | 8 + .../test/frontend/data/generated-files/moz.build | 9 + .../test/frontend/data/generated-sources/a.cpp | 0 .../test/frontend/data/generated-sources/b.cc | 0 .../test/frontend/data/generated-sources/c.cxx | 0 .../test/frontend/data/generated-sources/d.c | 0 .../test/frontend/data/generated-sources/e.m | 0 .../test/frontend/data/generated-sources/f.mm | 0 .../test/frontend/data/generated-sources/g.S | 0 .../test/frontend/data/generated-sources/h.s | 0 .../test/frontend/data/generated-sources/i.asm | 0 .../test/frontend/data/generated-sources/moz.build | 39 + .../frontend/data/generated_includes/moz.build | 5 + .../frontend/data/host-compile-flags/moz.build | 22 + .../test/frontend/data/host-compile-flags/test1.c | 0 .../data/host-program-paths/final-target/moz.build | 5 + .../data/host-program-paths/installed/moz.build | 4 + .../frontend/data/host-program-paths/moz.build | 14 + .../host-program-paths/not-installed/moz.build | 5 + .../frontend/data/host-rust-libraries/Cargo.toml | 15 + .../frontend/data/host-rust-libraries/moz.build | 22 + .../data/host-rust-program-no-cargo-toml/moz.build | 1 + .../host-rust-program-nonexistent-name/Cargo.toml | 7 + .../host-rust-program-nonexistent-name/moz.build | 1 + .../frontend/data/host-rust-programs/Cargo.toml | 7 + .../frontend/data/host-rust-programs/moz.build | 1 + .../mozbuild/test/frontend/data/host-sources/a.cpp | 0 .../mozbuild/test/frontend/data/host-sources/b.cc | 0 .../mozbuild/test/frontend/data/host-sources/c.cxx | 0 .../mozbuild/test/frontend/data/host-sources/d.c | 0 .../mozbuild/test/frontend/data/host-sources/e.mm | 0 .../mozbuild/test/frontend/data/host-sources/f.mm | 0 .../test/frontend/data/host-sources/moz.build | 27 + .../frontend/data/include-basic/included.build | 4 + .../test/frontend/data/include-basic/moz.build | 7 + .../data/include-file-stack/included-1.build | 4 + .../data/include-file-stack/included-2.build | 4 + .../frontend/data/include-file-stack/moz.build | 5 + .../test/frontend/data/include-missing/moz.build | 5 + .../data/include-outside-topsrcdir/relative.build | 4 + .../include-relative-from-child/child/child.build | 4 + .../include-relative-from-child/child/child2.build | 4 + .../child/grandchild/grandchild.build | 4 + .../data/include-relative-from-child/parent.build | 4 + .../data/include-topsrcdir-relative/moz.build | 5 + .../data/include-topsrcdir-relative/sibling.build | 4 + .../data/inheriting-variables/bar/moz.build | 5 + .../data/inheriting-variables/foo/baz/moz.build | 7 + .../data/inheriting-variables/foo/moz.build | 7 + .../frontend/data/inheriting-variables/moz.build | 10 + .../test/frontend/data/ipdl_sources/bar/moz.build | 14 + .../test/frontend/data/ipdl_sources/foo/moz.build | 14 + .../test/frontend/data/ipdl_sources/moz.build | 10 + .../data/jar-manifests-multiple-files/moz.build | 7 + .../test/frontend/data/jar-manifests/moz.build | 7 + .../frontend/data/library-defines/liba/moz.build | 5 + .../frontend/data/library-defines/libb/moz.build | 7 + .../frontend/data/library-defines/libc/moz.build | 5 + .../frontend/data/library-defines/libd/moz.build | 5 + .../test/frontend/data/library-defines/moz.build | 11 + .../test/frontend/data/link-flags/moz.build | 16 + .../mozbuild/test/frontend/data/link-flags/test1.c | 0 .../frontend/data/local_includes-filename/foo.h | 0 .../data/local_includes-filename/moz.build | 5 + .../data/local_includes-invalid/objdir/moz.build | 5 + .../data/local_includes-invalid/srcdir/moz.build | 5 + .../bar/baz/dummy_file_for_nonempty_directory | 0 .../foo/dummy_file_for_nonempty_directory | 0 .../test/frontend/data/local_includes/moz.build | 5 + .../data/localized-files-from-generated/moz.build | 6 + .../data/localized-files-no-en-us/en-US/bar.ini | 0 .../frontend/data/localized-files-no-en-us/foo.js | 0 .../inner/locales/en-US/bar.ini | 0 .../data/localized-files-no-en-us/moz.build | 9 + .../moz.build | 6 + .../frontend/data/localized-files/en-US/bar.ini | 0 .../frontend/data/localized-files/en-US/foo.js | 0 .../test/frontend/data/localized-files/moz.build | 9 + .../moz.build | 6 + .../data/localized-generated-files-force/moz.build | 6 + .../data/localized-generated-files/moz.build | 5 + .../frontend/data/localized-pp-files/en-US/bar.ini | 0 .../frontend/data/localized-pp-files/en-US/foo.js | 0 .../frontend/data/localized-pp-files/moz.build | 8 + .../frontend/data/missing-local-includes/moz.build | 5 + .../test/frontend/data/missing-xpidl/moz.build | 6 + .../data/multiple-rust-libraries/moz.build | 29 + .../data/multiple-rust-libraries/rust1/Cargo.toml | 15 + .../data/multiple-rust-libraries/rust1/moz.build | 4 + .../data/multiple-rust-libraries/rust2/Cargo.toml | 15 + .../data/multiple-rust-libraries/rust2/moz.build | 4 + .../test/frontend/data/object-conflicts/1/Test.c | 0 .../test/frontend/data/object-conflicts/1/Test.cpp | 0 .../frontend/data/object-conflicts/1/moz.build | 4 + .../test/frontend/data/object-conflicts/2/Test.cpp | 0 .../frontend/data/object-conflicts/2/moz.build | 4 + .../data/object-conflicts/2/subdir/Test.cpp | 0 .../test/frontend/data/object-conflicts/3/Test.c | 0 .../test/frontend/data/object-conflicts/3/Test.cpp | 0 .../frontend/data/object-conflicts/3/moz.build | 7 + .../test/frontend/data/object-conflicts/4/Test.c | 0 .../test/frontend/data/object-conflicts/4/Test.cpp | 0 .../frontend/data/object-conflicts/4/moz.build | 4 + .../frontend/data/program-paths/dist-bin/moz.build | 4 + .../data/program-paths/dist-subdir/moz.build | 5 + .../data/program-paths/final-target/moz.build | 5 + .../test/frontend/data/program-paths/moz.build | 15 + .../data/program-paths/not-installed/moz.build | 5 + .../mozbuild/test/frontend/data/program/moz.build | 18 + .../test/frontend/data/program/test_program1.cpp | 0 .../test/frontend/data/program/test_program2.cpp | 0 .../frontend/data/reader-error-bad-dir/moz.build | 5 + .../frontend/data/reader-error-basic/moz.build | 5 + .../data/reader-error-empty-list/moz.build | 5 + .../data/reader-error-error-func/moz.build | 5 + .../data/reader-error-included-from/child.build | 4 + .../data/reader-error-included-from/moz.build | 5 + .../data/reader-error-missing-include/moz.build | 5 + .../data/reader-error-outside-topsrcdir/moz.build | 5 + .../reader-error-read-unknown-global/moz.build | 5 + .../data/reader-error-repeated-dir/moz.build | 7 + .../data/reader-error-script-error/moz.build | 5 + .../frontend/data/reader-error-syntax/moz.build | 5 + .../data/reader-error-write-bad-value/moz.build | 5 + .../reader-error-write-unknown-global/moz.build | 7 + .../reader-relevant-mozbuild/d1/every-level/a/file | 0 .../d1/every-level/a/moz.build | 0 .../reader-relevant-mozbuild/d1/every-level/b/file | 0 .../d1/every-level/b/moz.build | 0 .../d1/every-level/moz.build | 0 .../data/reader-relevant-mozbuild/d1/file1 | 0 .../data/reader-relevant-mozbuild/d1/file2 | 0 .../data/reader-relevant-mozbuild/d1/moz.build | 0 .../d1/no-intermediate-moz-build/child/file | 0 .../d1/no-intermediate-moz-build/child/moz.build | 0 .../d1/parent-is-far/dir1/dir2/dir3/file | 0 .../d1/parent-is-far/moz.build | 0 .../data/reader-relevant-mozbuild/d2/dir1/file | 0 .../reader-relevant-mozbuild/d2/dir1/moz.build | 0 .../data/reader-relevant-mozbuild/d2/dir2/file | 0 .../reader-relevant-mozbuild/d2/dir2/moz.build | 0 .../data/reader-relevant-mozbuild/d2/moz.build | 0 .../frontend/data/reader-relevant-mozbuild/file | 0 .../data/reader-relevant-mozbuild/moz.build | 0 .../frontend/data/resolved-flags-error/moz.build | 17 + .../frontend/data/resolved-flags-error/test1.c | 0 .../data/rust-library-dash-folding/Cargo.toml | 15 + .../data/rust-library-dash-folding/moz.build | 19 + .../rust-library-duplicate-features/Cargo.toml | 15 + .../data/rust-library-duplicate-features/moz.build | 20 + .../frontend/data/rust-library-features/Cargo.toml | 15 + .../frontend/data/rust-library-features/moz.build | 20 + .../rust-library-invalid-crate-type/Cargo.toml | 15 + .../data/rust-library-invalid-crate-type/moz.build | 19 + .../data/rust-library-name-mismatch/Cargo.toml | 12 + .../data/rust-library-name-mismatch/moz.build | 19 + .../data/rust-library-no-cargo-toml/moz.build | 19 + .../data/rust-library-no-lib-section/Cargo.toml | 12 + .../data/rust-library-no-lib-section/moz.build | 19 + .../data/rust-program-no-cargo-toml/moz.build | 1 + .../data/rust-program-nonexistent-name/Cargo.toml | 7 + .../data/rust-program-nonexistent-name/moz.build | 1 + .../test/frontend/data/rust-programs/Cargo.toml | 7 + .../test/frontend/data/rust-programs/moz.build | 1 + .../test/frontend/data/schedules/moz.build | 19 + .../test/frontend/data/schedules/subd/moz.build | 5 + .../mozbuild/test/frontend/data/sources-just-c/d.c | 0 .../mozbuild/test/frontend/data/sources-just-c/e.m | 0 .../mozbuild/test/frontend/data/sources-just-c/g.S | 0 .../mozbuild/test/frontend/data/sources-just-c/h.s | 0 .../test/frontend/data/sources-just-c/i.asm | 0 .../test/frontend/data/sources-just-c/moz.build | 29 + .../mozbuild/test/frontend/data/sources/a.cpp | 0 .../mozbuild/test/frontend/data/sources/b.cc | 0 .../mozbuild/test/frontend/data/sources/c.cxx | 0 .../mozbuild/test/frontend/data/sources/d.c | 0 .../mozbuild/test/frontend/data/sources/e.m | 0 .../mozbuild/test/frontend/data/sources/f.mm | 0 .../mozbuild/test/frontend/data/sources/g.S | 0 .../mozbuild/test/frontend/data/sources/h.s | 0 .../mozbuild/test/frontend/data/sources/i.asm | 0 .../mozbuild/test/frontend/data/sources/moz.build | 39 + .../frontend/data/templates/templates.mozbuild | 21 + .../data/test-harness-files-root/moz.build | 4 + .../frontend/data/test-harness-files/mochitest.ini | 1 + .../frontend/data/test-harness-files/mochitest.py | 1 + .../frontend/data/test-harness-files/moz.build | 7 + .../frontend/data/test-harness-files/runtests.py | 1 + .../test/frontend/data/test-harness-files/utils.py | 1 + .../data/test-install-shared-lib/moz.build | 16 + .../data/test-linkables-cxx-link/moz.build | 14 + .../data/test-linkables-cxx-link/one/foo.cpp | 0 .../data/test-linkables-cxx-link/one/moz.build | 11 + .../data/test-linkables-cxx-link/three/moz.build | 5 + .../data/test-linkables-cxx-link/two/foo.c | 0 .../data/test-linkables-cxx-link/two/moz.build | 11 + .../absolute-support.ini | 4 + .../data/test-manifest-absolute-support/foo.txt | 1 + .../data/test-manifest-absolute-support/moz.build | 4 + .../test-manifest-absolute-support/test_file.js | 0 .../test/frontend/data/test-manifest-dupes/bar.js | 0 .../test/frontend/data/test-manifest-dupes/foo.js | 0 .../data/test-manifest-dupes/mochitest.ini | 7 + .../frontend/data/test-manifest-dupes/moz.build | 4 + .../frontend/data/test-manifest-dupes/test_baz.js | 0 .../included-reftest.list | 1 + .../data/test-manifest-emitted-includes/moz.build | 1 + .../test-manifest-emitted-includes/reftest.list | 2 + .../frontend/data/test-manifest-empty/empty.ini | 2 + .../frontend/data/test-manifest-empty/moz.build | 4 + .../test_inactive.html | 0 .../data/test-manifest-install-includes/common.ini | 1 + .../test-manifest-install-includes/mochitest.ini | 3 + .../data/test-manifest-install-includes/moz.build | 4 + .../test-manifest-install-includes/test_foo.html | 1 + .../data/test-manifest-just-support/foo.txt | 1 + .../test-manifest-just-support/just-support.ini | 2 + .../data/test-manifest-just-support/moz.build | 4 + .../a11y-support/dir1/bar | 0 .../test-manifest-keys-extracted/a11y-support/foo | 0 .../data/test-manifest-keys-extracted/a11y.ini | 4 + .../data/test-manifest-keys-extracted/browser.ini | 4 + .../data/test-manifest-keys-extracted/chrome.ini | 3 + .../test-manifest-keys-extracted/crashtest.list | 1 + .../data/test-manifest-keys-extracted/metro.ini | 3 + .../test-manifest-keys-extracted/mochitest.ini | 5 + .../data/test-manifest-keys-extracted/moz.build | 12 + .../data/test-manifest-keys-extracted/python.ini | 1 + .../data/test-manifest-keys-extracted/reftest.list | 1 + .../data/test-manifest-keys-extracted/test_a11y.js | 0 .../test-manifest-keys-extracted/test_browser.js | 0 .../test-manifest-keys-extracted/test_chrome.js | 0 .../data/test-manifest-keys-extracted/test_foo.py | 0 .../test-manifest-keys-extracted/test_metro.js | 0 .../test-manifest-keys-extracted/test_mochitest.js | 0 .../test-manifest-keys-extracted/test_xpcshell.js | 0 .../data/test-manifest-keys-extracted/xpcshell.ini | 5 + .../data/test-manifest-missing-manifest/moz.build | 4 + .../moz.build | 4 + .../xpcshell.ini | 4 + .../test-manifest-missing-test-file/mochitest.ini | 1 + .../data/test-manifest-missing-test-file/moz.build | 4 + .../child/mochitest.ini | 4 + .../child/test_foo.js | 0 .../moz.build | 4 + .../support-file.txt | 0 .../child/another-file.sjs | 0 .../test-manifest-shared-missing/child/browser.ini | 6 + .../child/data/one.txt | 0 .../child/data/two.txt | 0 .../test-manifest-shared-missing/child/test_sub.js | 0 .../test-manifest-shared-missing/mochitest.ini | 9 + .../data/test-manifest-shared-missing/moz.build | 5 + .../test-manifest-shared-missing/support-file.txt | 0 .../data/test-manifest-shared-missing/test_foo.js | 0 .../child/another-file.sjs | 0 .../test-manifest-shared-support/child/browser.ini | 6 + .../child/data/one.txt | 0 .../child/data/two.txt | 0 .../test-manifest-shared-support/child/test_sub.js | 0 .../test-manifest-shared-support/mochitest.ini | 8 + .../data/test-manifest-shared-support/moz.build | 5 + .../test-manifest-shared-support/support-file.txt | 0 .../data/test-manifest-shared-support/test_foo.js | 0 .../test-manifest-unmatched-generated/moz.build | 4 + .../test-manifest-unmatched-generated/test.ini | 4 + .../test-manifest-unmatched-generated/test_foo | 0 .../moz.build | 12 + .../frontend/data/test-symbols-file-objdir/foo.py | 0 .../data/test-symbols-file-objdir/moz.build | 15 + .../frontend/data/test-symbols-file/foo.symbols | 1 + .../test/frontend/data/test-symbols-file/moz.build | 12 + .../frontend/data/traversal-all-vars/moz.build | 6 + .../data/traversal-all-vars/parallel/moz.build | 0 .../data/traversal-all-vars/regular/moz.build | 0 .../data/traversal-all-vars/test/moz.build | 0 .../data/traversal-outside-topsrcdir/moz.build | 5 + .../data/traversal-relative-dirs/bar/moz.build | 0 .../data/traversal-relative-dirs/foo/moz.build | 5 + .../data/traversal-relative-dirs/moz.build | 5 + .../data/traversal-repeated-dirs/bar/moz.build | 5 + .../data/traversal-repeated-dirs/foo/moz.build | 5 + .../data/traversal-repeated-dirs/moz.build | 5 + .../frontend/data/traversal-simple/bar/moz.build | 0 .../data/traversal-simple/foo/biz/moz.build | 0 .../frontend/data/traversal-simple/foo/moz.build | 2 + .../test/frontend/data/traversal-simple/moz.build | 5 + .../data/unified-sources-non-unified/bar.cxx | 0 .../frontend/data/unified-sources-non-unified/c1.c | 0 .../frontend/data/unified-sources-non-unified/c2.c | 0 .../data/unified-sources-non-unified/foo.cpp | 0 .../data/unified-sources-non-unified/moz.build | 30 + .../data/unified-sources-non-unified/objc1.mm | 0 .../data/unified-sources-non-unified/objc2.mm | 0 .../data/unified-sources-non-unified/quux.cc | 0 .../test/frontend/data/unified-sources/bar.cxx | 0 .../test/frontend/data/unified-sources/c1.c | 0 .../test/frontend/data/unified-sources/c2.c | 0 .../test/frontend/data/unified-sources/foo.cpp | 0 .../test/frontend/data/unified-sources/moz.build | 30 + .../test/frontend/data/unified-sources/objc1.mm | 0 .../test/frontend/data/unified-sources/objc2.mm | 0 .../test/frontend/data/unified-sources/quux.cc | 0 .../mozbuild/test/frontend/data/use-nasm/moz.build | 15 + .../mozbuild/test/frontend/data/use-nasm/test1.S | 0 .../test/frontend/data/variable-passthru/bans.S | 0 .../test/frontend/data/variable-passthru/baz.def | 0 .../test/frontend/data/variable-passthru/moz.build | 13 + .../test/frontend/data/variable-passthru/test1.c | 0 .../test/frontend/data/variable-passthru/test1.cpp | 0 .../test/frontend/data/variable-passthru/test1.mm | 0 .../test/frontend/data/variable-passthru/test2.c | 0 .../test/frontend/data/variable-passthru/test2.cpp | 0 .../test/frontend/data/variable-passthru/test2.mm | 0 .../test/frontend/data/visibility-flags/moz.build | 21 + .../test/frontend/data/visibility-flags/test1.c | 0 .../frontend/data/wasm-compile-flags/moz.build | 14 + .../test/frontend/data/wasm-compile-flags/test1.c | 0 .../mozbuild/test/frontend/data/wasm-sources/a.cpp | 0 .../mozbuild/test/frontend/data/wasm-sources/b.cc | 0 .../mozbuild/test/frontend/data/wasm-sources/c.cxx | 0 .../mozbuild/test/frontend/data/wasm-sources/d.c | 0 .../test/frontend/data/wasm-sources/moz.build | 15 + .../data/xpidl-module-no-sources/moz.build | 5 + .../mozbuild/test/frontend/test_context.py | 736 +++++ .../mozbuild/test/frontend/test_emitter.py | 1877 ++++++++++++ .../mozbuild/test/frontend/test_namespaces.py | 225 ++ .../mozbuild/mozbuild/test/frontend/test_reader.py | 531 ++++ .../mozbuild/test/frontend/test_sandbox.py | 536 ++++ python/mozbuild/mozbuild/test/python.ini | 64 + .../mozbuild/mozbuild/test/repackaging/test_deb.py | 551 ++++ .../mozbuild/test/test_android_version_code.py | 111 + .../mozbuild/mozbuild/test/test_artifact_cache.py | 145 + python/mozbuild/mozbuild/test/test_artifacts.py | 115 + python/mozbuild/mozbuild/test/test_base.py | 446 +++ python/mozbuild/mozbuild/test/test_containers.py | 224 ++ .../mozbuild/mozbuild/test/test_dotproperties.py | 183 ++ python/mozbuild/mozbuild/test/test_expression.py | 88 + python/mozbuild/mozbuild/test/test_jarmaker.py | 493 +++ python/mozbuild/mozbuild/test/test_licenses.py | 33 + python/mozbuild/mozbuild/test/test_line_endings.py | 45 + python/mozbuild/mozbuild/test/test_makeutil.py | 164 + python/mozbuild/mozbuild/test/test_manifest.py | 2081 +++++++++++++ python/mozbuild/mozbuild/test/test_mozconfig.py | 275 ++ python/mozbuild/mozbuild/test/test_mozinfo.py | 318 ++ python/mozbuild/mozbuild/test/test_preprocessor.py | 832 ++++++ python/mozbuild/mozbuild/test/test_pythonutil.py | 24 + .../mozbuild/test/test_rewrite_mozbuild.py | 515 ++++ python/mozbuild/mozbuild/test/test_telemetry.py | 102 + .../mozbuild/test/test_telemetry_settings.py | 174 ++ python/mozbuild/mozbuild/test/test_util.py | 889 ++++++ .../mozbuild/test/test_util_fileavoidwrite.py | 110 + python/mozbuild/mozbuild/test/test_vendor.py | 48 + python/mozbuild/mozbuild/test/test_vendor_tools.py | 90 + .../mozbuild/mozbuild/test/vendor_requirements.in | 5 + .../mozbuild/mozbuild/test/vendor_requirements.txt | 416 +++ python/mozbuild/mozbuild/testing.py | 266 ++ python/mozbuild/mozbuild/toolchains.py | 32 + python/mozbuild/mozbuild/util.py | 1407 +++++++++ python/mozbuild/mozbuild/vendor/__init__.py | 0 python/mozbuild/mozbuild/vendor/host_angle.py | 37 + python/mozbuild/mozbuild/vendor/host_base.py | 77 + python/mozbuild/mozbuild/vendor/host_codeberg.py | 28 + python/mozbuild/mozbuild/vendor/host_github.py | 27 + python/mozbuild/mozbuild/vendor/host_gitlab.py | 26 + .../mozbuild/mozbuild/vendor/host_googlesource.py | 32 + python/mozbuild/mozbuild/vendor/mach_commands.py | 232 ++ python/mozbuild/mozbuild/vendor/moz.build | 8 + python/mozbuild/mozbuild/vendor/moz_yaml.py | 770 +++++ .../mozbuild/mozbuild/vendor/rewrite_mozbuild.py | 1286 ++++++++ .../mozbuild/vendor/test_vendor_changes.sh | 65 + python/mozbuild/mozbuild/vendor/vendor_manifest.py | 789 +++++ python/mozbuild/mozbuild/vendor/vendor_python.py | 228 ++ python/mozbuild/mozbuild/vendor/vendor_rust.py | 961 ++++++ python/mozbuild/mozpack/__init__.py | 0 .../mozpack/apple_pkg/Distribution.template | 19 + .../mozpack/apple_pkg/PackageInfo.template | 19 + python/mozbuild/mozpack/archive.py | 153 + python/mozbuild/mozpack/chrome/__init__.py | 0 python/mozbuild/mozpack/chrome/flags.py | 278 ++ python/mozbuild/mozpack/chrome/manifest.py | 400 +++ python/mozbuild/mozpack/copier.py | 605 ++++ python/mozbuild/mozpack/dmg.py | 230 ++ python/mozbuild/mozpack/errors.py | 151 + python/mozbuild/mozpack/executables.py | 140 + python/mozbuild/mozpack/files.py | 1271 ++++++++ python/mozbuild/mozpack/macpkg.py | 217 ++ python/mozbuild/mozpack/manifests.py | 483 +++ python/mozbuild/mozpack/mozjar.py | 842 ++++++ python/mozbuild/mozpack/packager/__init__.py | 445 +++ python/mozbuild/mozpack/packager/formats.py | 354 +++ python/mozbuild/mozpack/packager/l10n.py | 304 ++ python/mozbuild/mozpack/packager/unpack.py | 200 ++ python/mozbuild/mozpack/path.py | 246 ++ python/mozbuild/mozpack/pkg.py | 299 ++ python/mozbuild/mozpack/test/__init__.py | 0 python/mozbuild/mozpack/test/data/test_data | 1 + python/mozbuild/mozpack/test/python.ini | 18 + .../mozpack/test/support/minify_js_verify.py | 15 + python/mozbuild/mozpack/test/test_archive.py | 197 ++ python/mozbuild/mozpack/test/test_chrome_flags.py | 150 + .../mozbuild/mozpack/test/test_chrome_manifest.py | 176 ++ python/mozbuild/mozpack/test/test_copier.py | 548 ++++ python/mozbuild/mozpack/test/test_errors.py | 95 + python/mozbuild/mozpack/test/test_files.py | 1362 +++++++++ python/mozbuild/mozpack/test/test_manifests.py | 465 +++ python/mozbuild/mozpack/test/test_mozjar.py | 350 +++ python/mozbuild/mozpack/test/test_packager.py | 630 ++++ .../mozbuild/mozpack/test/test_packager_formats.py | 537 ++++ python/mozbuild/mozpack/test/test_packager_l10n.py | 153 + .../mozbuild/mozpack/test/test_packager_unpack.py | 67 + python/mozbuild/mozpack/test/test_path.py | 152 + python/mozbuild/mozpack/test/test_pkg.py | 138 + python/mozbuild/mozpack/test/test_unify.py | 250 ++ python/mozbuild/mozpack/unify.py | 265 ++ python/mozbuild/setup.py | 29 + 920 files changed, 96930 insertions(+) create mode 100644 python/mozbuild/.ruff.toml create mode 100644 python/mozbuild/metrics.yaml create mode 100644 python/mozbuild/mozbuild/__init__.py create mode 100644 python/mozbuild/mozbuild/action/__init__.py create mode 100644 python/mozbuild/mozbuild/action/buildlist.py create mode 100644 python/mozbuild/mozbuild/action/check_binary.py create mode 100644 python/mozbuild/mozbuild/action/download_wpt_manifest.py create mode 100644 python/mozbuild/mozbuild/action/dump_env.py create mode 100644 python/mozbuild/mozbuild/action/dumpsymbols.py create mode 100644 python/mozbuild/mozbuild/action/exe_7z_archive.py create mode 100644 python/mozbuild/mozbuild/action/fat_aar.py create mode 100644 python/mozbuild/mozbuild/action/file_generate.py create mode 100644 python/mozbuild/mozbuild/action/file_generate_wrapper.py create mode 100644 python/mozbuild/mozbuild/action/generate_symbols_file.py create mode 100644 python/mozbuild/mozbuild/action/html_fragment_preprocesor.py create mode 100644 python/mozbuild/mozbuild/action/install.py create mode 100644 python/mozbuild/mozbuild/action/jar_maker.py create mode 100644 python/mozbuild/mozbuild/action/l10n_merge.py create mode 100644 python/mozbuild/mozbuild/action/langpack_localeNames.json create mode 100644 python/mozbuild/mozbuild/action/langpack_manifest.py create mode 100644 python/mozbuild/mozbuild/action/make_dmg.py create mode 100644 python/mozbuild/mozbuild/action/make_unzip.py create mode 100644 python/mozbuild/mozbuild/action/node.py create mode 100644 python/mozbuild/mozbuild/action/package_generated_sources.py create mode 100644 python/mozbuild/mozbuild/action/preprocessor.py create mode 100644 python/mozbuild/mozbuild/action/process_define_files.py create mode 100644 python/mozbuild/mozbuild/action/process_install_manifest.py create mode 100644 python/mozbuild/mozbuild/action/symbols_archive.py create mode 100644 python/mozbuild/mozbuild/action/test_archive.py create mode 100755 python/mozbuild/mozbuild/action/tooltool.py create mode 100644 python/mozbuild/mozbuild/action/unify_symbols.py create mode 100644 python/mozbuild/mozbuild/action/unify_tests.py create mode 100644 python/mozbuild/mozbuild/action/unpack_dmg.py create mode 100644 python/mozbuild/mozbuild/action/util.py create mode 100644 python/mozbuild/mozbuild/action/webidl.py create mode 100644 python/mozbuild/mozbuild/action/wrap_rustc.py create mode 100644 python/mozbuild/mozbuild/action/xpccheck.py create mode 100755 python/mozbuild/mozbuild/action/xpidl-process.py create mode 100644 python/mozbuild/mozbuild/action/zip.py create mode 100644 python/mozbuild/mozbuild/analyze/__init__.py create mode 100644 python/mozbuild/mozbuild/analyze/hg.py create mode 100644 python/mozbuild/mozbuild/android_version_code.py create mode 100644 python/mozbuild/mozbuild/artifact_builds.py create mode 100644 python/mozbuild/mozbuild/artifact_cache.py create mode 100644 python/mozbuild/mozbuild/artifact_commands.py create mode 100644 python/mozbuild/mozbuild/artifacts.py create mode 100644 python/mozbuild/mozbuild/backend/__init__.py create mode 100644 python/mozbuild/mozbuild/backend/base.py create mode 100644 python/mozbuild/mozbuild/backend/cargo_build_defs.py create mode 100644 python/mozbuild/mozbuild/backend/clangd.py create mode 100644 python/mozbuild/mozbuild/backend/common.py create mode 100644 python/mozbuild/mozbuild/backend/configenvironment.py create mode 100644 python/mozbuild/mozbuild/backend/cpp_eclipse.py create mode 100644 python/mozbuild/mozbuild/backend/fastermake.py create mode 100644 python/mozbuild/mozbuild/backend/mach_commands.py create mode 100644 python/mozbuild/mozbuild/backend/make.py create mode 100644 python/mozbuild/mozbuild/backend/recursivemake.py create mode 100644 python/mozbuild/mozbuild/backend/static_analysis.py create mode 100644 python/mozbuild/mozbuild/backend/test_manifest.py create mode 100644 python/mozbuild/mozbuild/backend/visualstudio.py create mode 100644 python/mozbuild/mozbuild/base.py create mode 100644 python/mozbuild/mozbuild/bootstrap.py create mode 100644 python/mozbuild/mozbuild/build_commands.py create mode 100644 python/mozbuild/mozbuild/chunkify.py create mode 100644 python/mozbuild/mozbuild/code_analysis/__init__.py create mode 100644 python/mozbuild/mozbuild/code_analysis/mach_commands.py create mode 100644 python/mozbuild/mozbuild/code_analysis/moz.build create mode 100644 python/mozbuild/mozbuild/code_analysis/utils.py create mode 100644 python/mozbuild/mozbuild/codecoverage/__init__.py create mode 100644 python/mozbuild/mozbuild/codecoverage/chrome_map.py create mode 100644 python/mozbuild/mozbuild/codecoverage/lcov_rewriter.py create mode 100644 python/mozbuild/mozbuild/codecoverage/manifest_handler.py create mode 100644 python/mozbuild/mozbuild/codecoverage/packager.py create mode 100644 python/mozbuild/mozbuild/compilation/__init__.py create mode 100644 python/mozbuild/mozbuild/compilation/codecomplete.py create mode 100644 python/mozbuild/mozbuild/compilation/database.py create mode 100644 python/mozbuild/mozbuild/compilation/util.py create mode 100644 python/mozbuild/mozbuild/compilation/warnings.py create mode 100644 python/mozbuild/mozbuild/config_status.py create mode 100644 python/mozbuild/mozbuild/configure/__init__.py create mode 100644 python/mozbuild/mozbuild/configure/check_debug_ranges.py create mode 100644 python/mozbuild/mozbuild/configure/constants.py create mode 100644 python/mozbuild/mozbuild/configure/help.py create mode 100644 python/mozbuild/mozbuild/configure/lint.py create mode 100644 python/mozbuild/mozbuild/configure/options.py create mode 100644 python/mozbuild/mozbuild/configure/util.py create mode 100644 python/mozbuild/mozbuild/controller/__init__.py create mode 100644 python/mozbuild/mozbuild/controller/building.py create mode 100644 python/mozbuild/mozbuild/controller/clobber.py create mode 100644 python/mozbuild/mozbuild/doctor.py create mode 100644 python/mozbuild/mozbuild/dotproperties.py create mode 100644 python/mozbuild/mozbuild/faster_daemon.py create mode 100644 python/mozbuild/mozbuild/frontend/__init__.py create mode 100644 python/mozbuild/mozbuild/frontend/context.py create mode 100644 python/mozbuild/mozbuild/frontend/data.py create mode 100644 python/mozbuild/mozbuild/frontend/emitter.py create mode 100644 python/mozbuild/mozbuild/frontend/gyp_reader.py create mode 100644 python/mozbuild/mozbuild/frontend/mach_commands.py create mode 100644 python/mozbuild/mozbuild/frontend/reader.py create mode 100644 python/mozbuild/mozbuild/frontend/sandbox.py create mode 100644 python/mozbuild/mozbuild/gen_test_backend.py create mode 100644 python/mozbuild/mozbuild/generated_sources.py create mode 100644 python/mozbuild/mozbuild/gn_processor.py create mode 100644 python/mozbuild/mozbuild/html_build_viewer.py create mode 100644 python/mozbuild/mozbuild/jar.py create mode 100644 python/mozbuild/mozbuild/mach_commands.py create mode 100644 python/mozbuild/mozbuild/makeutil.py create mode 100644 python/mozbuild/mozbuild/mozconfig.py create mode 100755 python/mozbuild/mozbuild/mozconfig_loader create mode 100644 python/mozbuild/mozbuild/mozinfo.py create mode 100644 python/mozbuild/mozbuild/nodeutil.py create mode 100644 python/mozbuild/mozbuild/preprocessor.py create mode 100644 python/mozbuild/mozbuild/pythonutil.py create mode 100644 python/mozbuild/mozbuild/repackaging/__init__.py create mode 100644 python/mozbuild/mozbuild/repackaging/application_ini.py create mode 100644 python/mozbuild/mozbuild/repackaging/deb.py create mode 100644 python/mozbuild/mozbuild/repackaging/dmg.py create mode 100644 python/mozbuild/mozbuild/repackaging/installer.py create mode 100644 python/mozbuild/mozbuild/repackaging/mar.py create mode 100644 python/mozbuild/mozbuild/repackaging/msi.py create mode 100644 python/mozbuild/mozbuild/repackaging/msix.py create mode 100644 python/mozbuild/mozbuild/repackaging/pkg.py create mode 100644 python/mozbuild/mozbuild/repackaging/test/python.ini create mode 100644 python/mozbuild/mozbuild/repackaging/test/test_msix.py create mode 100644 python/mozbuild/mozbuild/resources/html-build-viewer/build_resources.html create mode 100644 python/mozbuild/mozbuild/schedules.py create mode 100644 python/mozbuild/mozbuild/settings.py create mode 100644 python/mozbuild/mozbuild/shellutil.py create mode 100644 python/mozbuild/mozbuild/sphinx.py create mode 100644 python/mozbuild/mozbuild/telemetry.py create mode 100644 python/mozbuild/mozbuild/test/__init__.py create mode 100644 python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_basic.xml create mode 100644 python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_multiple_templates.xml create mode 100644 python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_xul.xml create mode 100644 python/mozbuild/mozbuild/test/action/data/invalid/region.properties create mode 100644 python/mozbuild/mozbuild/test/action/data/node/node-test-script.js create mode 100644 python/mozbuild/mozbuild/test/action/test_buildlist.py create mode 100644 python/mozbuild/mozbuild/test/action/test_html_fragment_preprocessor.py create mode 100644 python/mozbuild/mozbuild/test/action/test_langpack_manifest.py create mode 100644 python/mozbuild/mozbuild/test/action/test_node.py create mode 100644 python/mozbuild/mozbuild/test/action/test_process_install_manifest.py create mode 100644 python/mozbuild/mozbuild/test/backend/__init__.py create mode 100644 python/mozbuild/mozbuild/test/backend/common.py create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/app/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/bar.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/bar.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/bar.jsm create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/baz.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/baz.jsm create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/components.manifest create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/foo.css create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/foo.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/foo.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/foo.jsm create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/jar.mn create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/prefs.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/qux.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/qux.jsm create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/resource create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/resource2 create mode 100644 python/mozbuild/mozbuild/test/backend/data/build/subdir/bar.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/database/bar.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/database/baz.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/database/build/non-unified-compat create mode 100644 python/mozbuild/mozbuild/test/backend/data/database/foo.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/database/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/database/qux.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/defines/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/dist-files/install.rdf create mode 100644 python/mozbuild/mozbuild/test/backend/data/dist-files/main.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/dist-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports-generated/dom1.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports-generated/foo.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports-generated/gfx.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports-generated/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports-generated/mozilla1.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports/dom1.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports/dom2.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports/foo.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports/gfx.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports/mozilla1.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports/mozilla2.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/exports/pprio.h create mode 100644 python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/bar.xyz create mode 100644 python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/foo.xyz create mode 100644 python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/final_target/both/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/final_target/dist-subdir/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/final_target/final-target/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/final_target/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/final_target/xpi-name/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/generated-files-force/foo-data create mode 100644 python/mozbuild/mozbuild/test/backend/data/generated-files-force/generate-bar.py create mode 100644 python/mozbuild/mozbuild/test/backend/data/generated-files-force/generate-foo.py create mode 100644 python/mozbuild/mozbuild/test/backend/data/generated-files-force/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/generated-files/foo-data create mode 100644 python/mozbuild/mozbuild/test/backend/data/generated-files/generate-bar.py create mode 100644 python/mozbuild/mozbuild/test/backend/data/generated-files/generate-foo.py create mode 100644 python/mozbuild/mozbuild/test/backend/data/generated-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/generated_includes/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/host-defines/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/host-rust-library-features/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/backend/data/host-rust-library-features/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/host-rust-library/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/backend/data/host-rust-library/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/foo.h.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/ipdl_sources/bar/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/ipdl_sources/foo/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/ipdl_sources/ipdl/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/ipdl_sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/jar-manifests/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/prog/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/prog/qux/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/prog/qux/qux1.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/foo1.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/foo2.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/real/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/shared/baz/baz1.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/shared/baz/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/shared/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar1.cc create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar2.cc create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar_helper/bar_helper1.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar_helper/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/static/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/linkage/templates.mozbuild create mode 100644 python/mozbuild/mozbuild/test/backend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory create mode 100644 python/mozbuild/mozbuild/test/backend/data/local_includes/foo/dummy_file_for_nonempty_directory create mode 100644 python/mozbuild/mozbuild/test/backend/data/local_includes/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-files/en-US/bar.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-files/en-US/foo.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/en-US/localized-input create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/foo-data create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/generate-foo.py create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/inner/locales/en-US/localized-input create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/locales/en-US/localized-input create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/non-localized-input create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/en-US/localized-input create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/foo-data create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/generate-foo.py create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/non-localized-input create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-generated-files/en-US/localized-input create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-generated-files/foo-data create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-generated-files/generate-foo.py create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-generated-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-generated-files/non-localized-input create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-pp-files/en-US/bar.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-pp-files/en-US/foo.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/localized-pp-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-library/c-library.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-library/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-program/c_test_program.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-program/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-simple-programs/c_simple_program.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-simple-programs/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/c-source.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/cxx-library.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-program/cxx_test_program.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-program/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-simple-programs/cxx_simple_program.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-simple-programs/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/simple-programs/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/program-paths/dist-bin/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/program-paths/dist-subdir/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/program-paths/final-target/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/program-paths/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/program-paths/not-installed/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/bar.res.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/cursor.cur create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/desktop1.ttf create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/desktop2.ttf create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/extra.manifest create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/font1.ttf create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/font2.ttf create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/foo.res create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/mobile.ttf create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/resources/test.manifest create mode 100644 python/mozbuild/mozbuild/test/backend/data/rust-library-features/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/backend/data/rust-library-features/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/rust-library/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/backend/data/rust-library/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/rust-programs/code/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/backend/data/rust-programs/code/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/rust-programs/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/bar.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/bar.s create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/baz.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/foo.asm create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/foo.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/fuga.mm create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/hoge.mm create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/qux.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/titi.S create mode 100644 python/mozbuild/mozbuild/test/backend/data/sources/toto.S create mode 100644 python/mozbuild/mozbuild/test/backend/data/stub0/Makefile.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/stub0/dir1/Makefile.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/stub0/dir1/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/stub0/dir2/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/stub0/dir3/Makefile.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/stub0/dir3/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/stub0/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/substitute_config_files/Makefile.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/substitute_config_files/foo.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/substitute_config_files/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/another-file.sjs create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/browser.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/one.txt create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/two.txt create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/test_sub.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/support-file.txt create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/test_foo.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/mochitest-common.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/test_bar.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/test_foo.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest1.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest2.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_bar.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_foo.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/instrumentation.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/not_packaged.java create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/test_bar.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/xpcshell.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-written/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.ini create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.js create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/src/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/test-one.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/test-two.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/test_config/file.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/test_config/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/variable_passthru/Makefile.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/variable_passthru/baz.def create mode 100644 python/mozbuild/mozbuild/test/backend/data/variable_passthru/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.mm create mode 100644 python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.c create mode 100644 python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.mm create mode 100644 python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/bar.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/foo.cpp create mode 100644 python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/visual-studio/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/data/xpidl/bar.idl create mode 100644 python/mozbuild/mozbuild/test/backend/data/xpidl/config/makefiles/xpidl/Makefile.in create mode 100644 python/mozbuild/mozbuild/test/backend/data/xpidl/foo.idl create mode 100644 python/mozbuild/mozbuild/test/backend/data/xpidl/moz.build create mode 100644 python/mozbuild/mozbuild/test/backend/test_build.py create mode 100644 python/mozbuild/mozbuild/test/backend/test_configenvironment.py create mode 100644 python/mozbuild/mozbuild/test/backend/test_database.py create mode 100644 python/mozbuild/mozbuild/test/backend/test_fastermake.py create mode 100644 python/mozbuild/mozbuild/test/backend/test_partialconfigenvironment.py create mode 100644 python/mozbuild/mozbuild/test/backend/test_recursivemake.py create mode 100644 python/mozbuild/mozbuild/test/backend/test_test_manifest.py create mode 100644 python/mozbuild/mozbuild/test/backend/test_visualstudio.py create mode 100644 python/mozbuild/mozbuild/test/code_analysis/test_mach_commands.py create mode 100644 python/mozbuild/mozbuild/test/codecoverage/sample_lcov.info create mode 100644 python/mozbuild/mozbuild/test/codecoverage/test_lcov_rewrite.py create mode 100644 python/mozbuild/mozbuild/test/common.py create mode 100644 python/mozbuild/mozbuild/test/compilation/__init__.py create mode 100644 python/mozbuild/mozbuild/test/compilation/test_warnings.py create mode 100644 python/mozbuild/mozbuild/test/configure/common.py create mode 100644 python/mozbuild/mozbuild/test/configure/data/decorators.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/empty_mozconfig create mode 100644 python/mozbuild/mozbuild/test/configure/data/extra.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/imply_option/imm.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/imply_option/infer.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/imply_option/infer_ko.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/imply_option/negative.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/imply_option/simple.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/imply_option/values.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/included.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/moz.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/set_config.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/set_define.configure create mode 100644 python/mozbuild/mozbuild/test/configure/data/subprocess.configure create mode 100644 python/mozbuild/mozbuild/test/configure/lint.py create mode 100644 python/mozbuild/mozbuild/test/configure/macos_fake_sdk/SDKSettings.plist create mode 100644 python/mozbuild/mozbuild/test/configure/test_bootstrap.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_checks_configure.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_compile_checks.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_configure.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_lint.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_moz_configure.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_options.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_toolchain_configure.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py create mode 100644 python/mozbuild/mozbuild/test/configure/test_util.py create mode 100644 python/mozbuild/mozbuild/test/controller/__init__.py create mode 100644 python/mozbuild/mozbuild/test/controller/test_ccachestats.py create mode 100644 python/mozbuild/mozbuild/test/controller/test_clobber.py create mode 100644 python/mozbuild/mozbuild/test/data/Makefile create mode 100644 python/mozbuild/mozbuild/test/data/bad.properties create mode 100644 python/mozbuild/mozbuild/test/data/test-dir/Makefile create mode 100644 python/mozbuild/mozbuild/test/data/test-dir/with/Makefile create mode 100644 python/mozbuild/mozbuild/test/data/test-dir/with/without/with/Makefile create mode 100644 python/mozbuild/mozbuild/test/data/test-dir/without/with/Makefile create mode 100644 python/mozbuild/mozbuild/test/data/valid.properties create mode 100644 python/mozbuild/mozbuild/test/frontend/__init__.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/allow-compiler-warnings/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/allow-compiler-warnings/test1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/asflags/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/asflags/test1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/asflags/test2.S create mode 100644 python/mozbuild/mozbuild/test/frontend/data/branding-files/bar.ico create mode 100644 python/mozbuild/mozbuild/test/frontend/data/branding-files/baz.png create mode 100644 python/mozbuild/mozbuild/test/frontend/data/branding-files/foo.xpm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/branding-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/branding-files/quux.icns create mode 100644 python/mozbuild/mozbuild/test/frontend/data/compile-defines/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/compile-defines/test1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/compile-flags-field-validation/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/compile-flags-field-validation/test1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/compile-flags-templates/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/compile-flags-templates/test1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/compile-flags-type-validation/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/compile-flags-type-validation/test1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/compile-flags/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/compile-flags/test1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/compile-includes/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/compile-includes/subdir/header.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/compile-includes/test1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/config-file-substitution/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/shallow/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/the/depths/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/defines/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/disable-compiler-warnings/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/disable-compiler-warnings/test1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/disable-stl-wrapping/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/disable-stl-wrapping/test1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/install.rdf create mode 100644 python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/dist-files/install.rdf create mode 100644 python/mozbuild/mozbuild/test/frontend/data/dist-files/main.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/dist-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports-generated/foo.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports-generated/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports-generated/mozilla1.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/foo.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports-missing/foo.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports-missing/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports-missing/mozilla1.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/bar.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/baz.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/dom1.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/dom2.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/dom3.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/foo.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/gfx.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/mem.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/mem2.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/mozilla1.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/mozilla2.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/pprio.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/exports/pprthred.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/bad-assignment/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/different-matchers/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/subcomponent/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/simple/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/static/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/files-info/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/final-target-pp-files-non-srcdir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/script.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-force/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/script.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/script.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/script.rb create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files-no-script/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/a.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/b.cc create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/c.cxx create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/d.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/e.m create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/f.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/g.S create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/h.s create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/i.asm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated-sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/generated_includes/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-compile-flags/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-compile-flags/test1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-program-paths/final-target/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-program-paths/installed/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-program-paths/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-program-paths/not-installed/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-rust-libraries/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-rust-libraries/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-rust-program-no-cargo-toml/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-rust-program-nonexistent-name/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-rust-program-nonexistent-name/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-rust-programs/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-rust-programs/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-sources/a.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-sources/b.cc create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-sources/c.cxx create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-sources/d.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-sources/e.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-sources/f.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/host-sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-basic/included.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-basic/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-1.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-2.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-file-stack/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-missing/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-outside-topsrcdir/relative.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child2.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/grandchild/grandchild.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/parent.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/sibling.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/bar/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/baz/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/bar/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/foo/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/jar-manifests-multiple-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/jar-manifests/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/library-defines/liba/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/library-defines/libb/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/library-defines/libc/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/library-defines/libd/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/library-defines/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/link-flags/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/link-flags/test1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/local_includes-filename/foo.h create mode 100644 python/mozbuild/mozbuild/test/frontend/data/local_includes-filename/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/local_includes-invalid/objdir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/local_includes-invalid/srcdir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory create mode 100644 python/mozbuild/mozbuild/test/frontend/data/local_includes/foo/dummy_file_for_nonempty_directory create mode 100644 python/mozbuild/mozbuild/test/frontend/data/local_includes/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/localized-files-from-generated/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/en-US/bar.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/foo.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/inner/locales/en-US/bar.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/localized-files-not-localized-generated/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/localized-files/en-US/bar.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/localized-files/en-US/foo.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/localized-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/localized-generated-files-final-target-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/localized-generated-files-force/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/localized-generated-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/en-US/bar.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/en-US/foo.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/missing-local-includes/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/missing-xpidl/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/Test.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/Test.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/Test.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/subdir/Test.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/Test.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/Test.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/Test.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/Test.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/program-paths/dist-bin/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/program-paths/dist-subdir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/program-paths/final-target/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/program-paths/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/program-paths/not-installed/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/program/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/program/test_program1.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/program/test_program2.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-bad-dir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-basic/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-empty-list/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-error-func/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/child.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-missing-include/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-outside-topsrcdir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-read-unknown-global/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-repeated-dir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-script-error/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-syntax/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-write-bad-value/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-error-write-unknown-global/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/file create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/file create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file1 create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file2 create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/file create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/dir1/dir2/dir3/file create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/file create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/file create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/file create mode 100644 python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/resolved-flags-error/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/resolved-flags-error/test1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-duplicate-features/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-duplicate-features/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-features/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-features/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-no-cargo-toml/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-program-no-cargo-toml/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-program-nonexistent-name/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-program-nonexistent-name/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-programs/Cargo.toml create mode 100644 python/mozbuild/mozbuild/test/frontend/data/rust-programs/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/schedules/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/schedules/subd/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources-just-c/d.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources-just-c/e.m create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources-just-c/g.S create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources-just-c/h.s create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources-just-c/i.asm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources-just-c/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/a.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/b.cc create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/c.cxx create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/d.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/e.m create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/f.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/g.S create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/h.s create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/i.asm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/templates/templates.mozbuild create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-harness-files-root/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-harness-files/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-harness-files/runtests.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-harness-files/utils.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-install-shared-lib/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/foo.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/three/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/foo.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/absolute-support.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/foo.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/test_file.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/bar.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/foo.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/test_baz.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/included-reftest.list create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest.list create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/empty.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-inactive-ignored/test_inactive.html create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/common.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/test_foo.html create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/foo.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/just-support.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/dir1/bar create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/foo create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/browser.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/chrome.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/crashtest.list create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/metro.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/python.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest.list create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_a11y.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_browser.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_chrome.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_foo.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_metro.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_mochitest.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_xpcshell.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/xpcshell.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-manifest/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/xpcshell.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/test_foo.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/support-file.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/another-file.sjs create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/browser.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/one.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/two.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/test_sub.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/support-file.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/test_foo.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/another-file.sjs create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/browser.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/one.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/two.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/test_sub.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/mochitest.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/support-file.txt create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/test_foo.js create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test.ini create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test_foo create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir-missing-generated/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/foo.py create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/foo.symbols create mode 100644 python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/parallel/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/regular/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/test/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-outside-topsrcdir/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/bar/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/foo/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/bar/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/foo/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-simple/bar/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/biz/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/traversal-simple/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/bar.cxx create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c2.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/foo.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc1.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc2.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/quux.cc create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources/bar.cxx create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources/c1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources/c2.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources/foo.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc1.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc2.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/unified-sources/quux.cc create mode 100644 python/mozbuild/mozbuild/test/frontend/data/use-nasm/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/use-nasm/test1.S create mode 100644 python/mozbuild/mozbuild/test/frontend/data/variable-passthru/bans.S create mode 100644 python/mozbuild/mozbuild/test/frontend/data/variable-passthru/baz.def create mode 100644 python/mozbuild/mozbuild/test/frontend/data/variable-passthru/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.mm create mode 100644 python/mozbuild/mozbuild/test/frontend/data/visibility-flags/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/visibility-flags/test1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/wasm-compile-flags/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/wasm-compile-flags/test1.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/wasm-sources/a.cpp create mode 100644 python/mozbuild/mozbuild/test/frontend/data/wasm-sources/b.cc create mode 100644 python/mozbuild/mozbuild/test/frontend/data/wasm-sources/c.cxx create mode 100644 python/mozbuild/mozbuild/test/frontend/data/wasm-sources/d.c create mode 100644 python/mozbuild/mozbuild/test/frontend/data/wasm-sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/data/xpidl-module-no-sources/moz.build create mode 100644 python/mozbuild/mozbuild/test/frontend/test_context.py create mode 100644 python/mozbuild/mozbuild/test/frontend/test_emitter.py create mode 100644 python/mozbuild/mozbuild/test/frontend/test_namespaces.py create mode 100644 python/mozbuild/mozbuild/test/frontend/test_reader.py create mode 100644 python/mozbuild/mozbuild/test/frontend/test_sandbox.py create mode 100644 python/mozbuild/mozbuild/test/python.ini create mode 100644 python/mozbuild/mozbuild/test/repackaging/test_deb.py create mode 100644 python/mozbuild/mozbuild/test/test_android_version_code.py create mode 100644 python/mozbuild/mozbuild/test/test_artifact_cache.py create mode 100644 python/mozbuild/mozbuild/test/test_artifacts.py create mode 100644 python/mozbuild/mozbuild/test/test_base.py create mode 100644 python/mozbuild/mozbuild/test/test_containers.py create mode 100644 python/mozbuild/mozbuild/test/test_dotproperties.py create mode 100644 python/mozbuild/mozbuild/test/test_expression.py create mode 100644 python/mozbuild/mozbuild/test/test_jarmaker.py create mode 100644 python/mozbuild/mozbuild/test/test_licenses.py create mode 100644 python/mozbuild/mozbuild/test/test_line_endings.py create mode 100644 python/mozbuild/mozbuild/test/test_makeutil.py create mode 100644 python/mozbuild/mozbuild/test/test_manifest.py create mode 100644 python/mozbuild/mozbuild/test/test_mozconfig.py create mode 100755 python/mozbuild/mozbuild/test/test_mozinfo.py create mode 100644 python/mozbuild/mozbuild/test/test_preprocessor.py create mode 100644 python/mozbuild/mozbuild/test/test_pythonutil.py create mode 100644 python/mozbuild/mozbuild/test/test_rewrite_mozbuild.py create mode 100644 python/mozbuild/mozbuild/test/test_telemetry.py create mode 100644 python/mozbuild/mozbuild/test/test_telemetry_settings.py create mode 100644 python/mozbuild/mozbuild/test/test_util.py create mode 100644 python/mozbuild/mozbuild/test/test_util_fileavoidwrite.py create mode 100644 python/mozbuild/mozbuild/test/test_vendor.py create mode 100644 python/mozbuild/mozbuild/test/test_vendor_tools.py create mode 100644 python/mozbuild/mozbuild/test/vendor_requirements.in create mode 100644 python/mozbuild/mozbuild/test/vendor_requirements.txt create mode 100644 python/mozbuild/mozbuild/testing.py create mode 100644 python/mozbuild/mozbuild/toolchains.py create mode 100644 python/mozbuild/mozbuild/util.py create mode 100644 python/mozbuild/mozbuild/vendor/__init__.py create mode 100644 python/mozbuild/mozbuild/vendor/host_angle.py create mode 100644 python/mozbuild/mozbuild/vendor/host_base.py create mode 100644 python/mozbuild/mozbuild/vendor/host_codeberg.py create mode 100644 python/mozbuild/mozbuild/vendor/host_github.py create mode 100644 python/mozbuild/mozbuild/vendor/host_gitlab.py create mode 100644 python/mozbuild/mozbuild/vendor/host_googlesource.py create mode 100644 python/mozbuild/mozbuild/vendor/mach_commands.py create mode 100644 python/mozbuild/mozbuild/vendor/moz.build create mode 100644 python/mozbuild/mozbuild/vendor/moz_yaml.py create mode 100644 python/mozbuild/mozbuild/vendor/rewrite_mozbuild.py create mode 100755 python/mozbuild/mozbuild/vendor/test_vendor_changes.sh create mode 100644 python/mozbuild/mozbuild/vendor/vendor_manifest.py create mode 100644 python/mozbuild/mozbuild/vendor/vendor_python.py create mode 100644 python/mozbuild/mozbuild/vendor/vendor_rust.py create mode 100644 python/mozbuild/mozpack/__init__.py create mode 100644 python/mozbuild/mozpack/apple_pkg/Distribution.template create mode 100644 python/mozbuild/mozpack/apple_pkg/PackageInfo.template create mode 100644 python/mozbuild/mozpack/archive.py create mode 100644 python/mozbuild/mozpack/chrome/__init__.py create mode 100644 python/mozbuild/mozpack/chrome/flags.py create mode 100644 python/mozbuild/mozpack/chrome/manifest.py create mode 100644 python/mozbuild/mozpack/copier.py create mode 100644 python/mozbuild/mozpack/dmg.py create mode 100644 python/mozbuild/mozpack/errors.py create mode 100644 python/mozbuild/mozpack/executables.py create mode 100644 python/mozbuild/mozpack/files.py create mode 100644 python/mozbuild/mozpack/macpkg.py create mode 100644 python/mozbuild/mozpack/manifests.py create mode 100644 python/mozbuild/mozpack/mozjar.py create mode 100644 python/mozbuild/mozpack/packager/__init__.py create mode 100644 python/mozbuild/mozpack/packager/formats.py create mode 100644 python/mozbuild/mozpack/packager/l10n.py create mode 100644 python/mozbuild/mozpack/packager/unpack.py create mode 100644 python/mozbuild/mozpack/path.py create mode 100644 python/mozbuild/mozpack/pkg.py create mode 100644 python/mozbuild/mozpack/test/__init__.py create mode 100644 python/mozbuild/mozpack/test/data/test_data create mode 100644 python/mozbuild/mozpack/test/python.ini create mode 100644 python/mozbuild/mozpack/test/support/minify_js_verify.py create mode 100644 python/mozbuild/mozpack/test/test_archive.py create mode 100644 python/mozbuild/mozpack/test/test_chrome_flags.py create mode 100644 python/mozbuild/mozpack/test/test_chrome_manifest.py create mode 100644 python/mozbuild/mozpack/test/test_copier.py create mode 100644 python/mozbuild/mozpack/test/test_errors.py create mode 100644 python/mozbuild/mozpack/test/test_files.py create mode 100644 python/mozbuild/mozpack/test/test_manifests.py create mode 100644 python/mozbuild/mozpack/test/test_mozjar.py create mode 100644 python/mozbuild/mozpack/test/test_packager.py create mode 100644 python/mozbuild/mozpack/test/test_packager_formats.py create mode 100644 python/mozbuild/mozpack/test/test_packager_l10n.py create mode 100644 python/mozbuild/mozpack/test/test_packager_unpack.py create mode 100644 python/mozbuild/mozpack/test/test_path.py create mode 100644 python/mozbuild/mozpack/test/test_pkg.py create mode 100644 python/mozbuild/mozpack/test/test_unify.py create mode 100644 python/mozbuild/mozpack/unify.py create mode 100644 python/mozbuild/setup.py (limited to 'python/mozbuild') diff --git a/python/mozbuild/.ruff.toml b/python/mozbuild/.ruff.toml new file mode 100644 index 0000000000..ba54f854aa --- /dev/null +++ b/python/mozbuild/.ruff.toml @@ -0,0 +1,9 @@ +extend = "../../pyproject.toml" +src = [ + # Treat direct imports in the test modules as first party. + "mozpack/test", + "mozbuild/test", +] + +[isort] +known-first-party = ["mozbuild"] diff --git a/python/mozbuild/metrics.yaml b/python/mozbuild/metrics.yaml new file mode 100644 index 0000000000..068dd6a389 --- /dev/null +++ b/python/mozbuild/metrics.yaml @@ -0,0 +1,140 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# If this file is changed, update the generated docs: +# https://firefox-source-docs.mozilla.org/mach/telemetry.html#updating-generated-metrics-docs + +# Adding a new metric? We have docs for that! +# https://mozilla.github.io/glean/book/user/metrics/adding-new-metrics.html + +--- +$schema: moz://mozilla.org/schemas/glean/metrics/1-0-0 + +mozbuild: + compiler: + type: string + description: The compiler type in use (CC_TYPE), such as "clang" or "gcc". + lifetime: application + bugs: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053 + data_reviews: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34 + notification_emails: + - build-telemetry@mozilla.com + - mhentges@mozilla.com + expires: never + send_in_pings: + - usage + artifact: + type: boolean + description: True if `--enable-artifact-builds`. + lifetime: application + bugs: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053 + data_reviews: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34 + notification_emails: + - build-telemetry@mozilla.com + - mhentges@mozilla.com + expires: never + send_in_pings: + - usage + debug: + type: boolean + description: True if `--enable-debug`. + lifetime: application + bugs: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053 + data_reviews: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34 + notification_emails: + - build-telemetry@mozilla.com + - mhentges@mozilla.com + expires: never + send_in_pings: + - usage + opt: + type: boolean + description: True if `--enable-optimize`. + lifetime: application + bugs: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053 + data_reviews: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34 + notification_emails: + - build-telemetry@mozilla.com + - mhentges@mozilla.com + expires: never + send_in_pings: + - usage + ccache: + type: boolean + description: True if `--with-ccache`. + lifetime: application + bugs: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053 + data_reviews: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34 + notification_emails: + - build-telemetry@mozilla.com + - mhentges@mozilla.com + expires: never + send_in_pings: + - usage + sccache: + type: boolean + description: True if ccache in use is sccache. + lifetime: application + bugs: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053 + data_reviews: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34 + notification_emails: + - build-telemetry@mozilla.com + - mhentges@mozilla.com + expires: never + send_in_pings: + - usage + icecream: + type: boolean + description: True if icecream in use. + lifetime: application + bugs: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053 + data_reviews: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34 + notification_emails: + - build-telemetry@mozilla.com + - mhentges@mozilla.com + expires: never + send_in_pings: + - usage + clobber: + type: boolean + description: True if the build was a clobber/full build. + lifetime: application + bugs: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1526072 + data_reviews: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1526072#c15 + notification_emails: + - build-telemetry@mozilla.com + - mhentges@mozilla.com + expires: never + send_in_pings: + - usage + project: + type: string + description: The project being built. + lifetime: application + bugs: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1654084 + data_reviews: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1654084#c2 + notification_emails: + - build-telemetry@mozilla.com + - mhentges@mozilla.com + expires: never + send_in_pings: + - usage diff --git a/python/mozbuild/mozbuild/__init__.py b/python/mozbuild/mozbuild/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/action/__init__.py b/python/mozbuild/mozbuild/action/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/action/buildlist.py b/python/mozbuild/mozbuild/action/buildlist.py new file mode 100644 index 0000000000..ab32ad92cc --- /dev/null +++ b/python/mozbuild/mozbuild/action/buildlist.py @@ -0,0 +1,49 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +"""A generic script to add entries to a file +if the entry does not already exist. + +Usage: buildlist.py [ ...] +""" +import io +import os +import sys + +from mozbuild.action.util import log_build_task +from mozbuild.util import ensureParentDir, lock_file + + +def addEntriesToListFile(listFile, entries): + """Given a file ``listFile`` containing one entry per line, + add each entry in ``entries`` to the file, unless it is already + present.""" + ensureParentDir(listFile) + lock = lock_file(listFile + ".lck") + try: + if os.path.exists(listFile): + f = io.open(listFile) + existing = set(x.strip() for x in f.readlines()) + f.close() + else: + existing = set() + for e in entries: + if e not in existing: + existing.add(e) + with io.open(listFile, "w", newline="\n") as f: + f.write("\n".join(sorted(existing)) + "\n") + finally: + del lock # Explicitly release the lock_file to free it + + +def main(args): + if len(args) < 2: + print("Usage: buildlist.py [ ...]", file=sys.stderr) + return 1 + + return addEntriesToListFile(args[0], args[1:]) + + +if __name__ == "__main__": + sys.exit(log_build_task(main, sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/action/check_binary.py b/python/mozbuild/mozbuild/action/check_binary.py new file mode 100644 index 0000000000..baf39860de --- /dev/null +++ b/python/mozbuild/mozbuild/action/check_binary.py @@ -0,0 +1,343 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import os +import re +import subprocess +import sys + +import buildconfig +from mozpack.executables import ELF, UNKNOWN, get_type +from packaging.version import Version + +from mozbuild.action.util import log_build_task +from mozbuild.util import memoize + +STDCXX_MAX_VERSION = Version("3.4.19") +CXXABI_MAX_VERSION = Version("1.3.7") +GLIBC_MAX_VERSION = Version("2.17") +LIBGCC_MAX_VERSION = Version("4.8") + +HOST = {"platform": buildconfig.substs["HOST_OS_ARCH"], "readelf": "readelf"} + +TARGET = { + "platform": buildconfig.substs["OS_TARGET"], + "readelf": buildconfig.substs.get("READELF", "readelf"), +} + +ADDR_RE = re.compile(r"[0-9a-f]{8,16}") + +if buildconfig.substs.get("HAVE_64BIT_BUILD"): + GUESSED_NSMODULE_SIZE = 8 +else: + GUESSED_NSMODULE_SIZE = 4 + + +get_type = memoize(get_type) + + +@memoize +def get_output(*cmd): + env = dict(os.environ) + env[b"LC_ALL"] = b"C" + return subprocess.check_output(cmd, env=env, universal_newlines=True).splitlines() + + +class Skip(RuntimeError): + pass + + +class Empty(RuntimeError): + pass + + +def at_least_one(iter): + saw_one = False + for item in iter: + saw_one = True + yield item + if not saw_one: + raise Empty() + + +# Iterates the symbol table on ELF binaries. +def iter_elf_symbols(target, binary, all=False): + ty = get_type(binary) + # Static libraries are ar archives. Assume they are ELF. + if ty == UNKNOWN and open(binary, "rb").read(8) == b"!\n": + ty = ELF + assert ty == ELF + for line in get_output( + target["readelf"], "--wide", "--syms" if all else "--dyn-syms", binary + ): + data = line.split() + if not (len(data) >= 8 and data[0].endswith(":") and data[0][:-1].isdigit()): + continue + n, addr, size, type, bind, vis, index, name = data[:8] + + if "@" in name: + name, ver = name.rsplit("@", 1) + while name.endswith("@"): + name = name[:-1] + else: + ver = None + yield { + "addr": int(addr, 16), + # readelf output may contain decimal values or hexadecimal + # values prefixed with 0x for the size. Let python autodetect. + "size": int(size, 0), + "name": name, + "version": ver, + } + + +def iter_readelf_dynamic(target, binary): + for line in get_output(target["readelf"], "-d", binary): + data = line.split(None, 2) + if data and len(data) == 3 and data[0].startswith("0x"): + yield data[1].rstrip(")").lstrip("("), data[2] + + +def check_binary_compat(target, binary): + if get_type(binary) != ELF: + raise Skip() + checks = ( + ("libstdc++", "GLIBCXX_", STDCXX_MAX_VERSION), + ("libstdc++", "CXXABI_", CXXABI_MAX_VERSION), + ("libgcc", "GCC_", LIBGCC_MAX_VERSION), + ("libc", "GLIBC_", GLIBC_MAX_VERSION), + ) + + unwanted = {} + try: + for sym in at_least_one(iter_elf_symbols(target, binary)): + # Only check versions on undefined symbols + if sym["addr"] != 0: + continue + + # No version to check + if not sym["version"]: + continue + + for _, prefix, max_version in checks: + if sym["version"].startswith(prefix): + version = Version(sym["version"][len(prefix) :]) + if version > max_version: + unwanted.setdefault(prefix, []).append(sym) + except Empty: + raise RuntimeError("Could not parse llvm-objdump output?") + if unwanted: + error = [] + for lib, prefix, _ in checks: + if prefix in unwanted: + error.append( + "We do not want these {} symbol versions to be used:".format(lib) + ) + error.extend( + " {} ({})".format(s["name"], s["version"]) for s in unwanted[prefix] + ) + raise RuntimeError("\n".join(error)) + + +def check_textrel(target, binary): + if target is HOST or get_type(binary) != ELF: + raise Skip() + try: + for tag, value in at_least_one(iter_readelf_dynamic(target, binary)): + if tag == "TEXTREL" or (tag == "FLAGS" and "TEXTREL" in value): + raise RuntimeError( + "We do not want text relocations in libraries and programs" + ) + except Empty: + raise RuntimeError("Could not parse readelf output?") + + +def ishex(s): + try: + int(s, 16) + return True + except ValueError: + return False + + +def is_libxul(binary): + basename = os.path.basename(binary).lower() + return "xul" in basename + + +def check_pt_load(target, binary): + if target is HOST or get_type(binary) != ELF or not is_libxul(binary): + raise Skip() + count = 0 + for line in get_output(target["readelf"], "-l", binary): + data = line.split() + if data and data[0] == "LOAD": + count += 1 + if count <= 1: + raise RuntimeError("Expected more than one PT_LOAD segment") + + +def check_mozglue_order(target, binary): + if target is HOST or target["platform"] != "Android": + raise Skip() + # While this is very unlikely (libc being added by the compiler at the end + # of the linker command line), if libmozglue.so ends up after libc.so, all + # hell breaks loose, so better safe than sorry, and check it's actually the + # case. + try: + mozglue = libc = None + for n, (tag, value) in enumerate( + at_least_one(iter_readelf_dynamic(target, binary)) + ): + if tag == "NEEDED": + if "[libmozglue.so]" in value: + mozglue = n + elif "[libc.so]" in value: + libc = n + if libc is None: + raise RuntimeError("libc.so is not linked?") + if mozglue is not None and libc < mozglue: + raise RuntimeError("libmozglue.so must be linked before libc.so") + except Empty: + raise RuntimeError("Could not parse readelf output?") + + +def check_networking(target, binary): + retcode = 0 + networking_functions = set( + [ + # socketpair is not concerning; it is restricted to AF_UNIX + "connect", + "accept", + "listen", + "getsockname", + "getsockopt", + "recv", + "send", + # We would be concerned by recvmsg and sendmsg; but we believe + # they are okay as documented in 1376621#c23 + "gethostbyname", + "gethostbyaddr", + "gethostent", + "sethostent", + "endhostent", + "gethostent_r", + "gethostbyname2", + "gethostbyaddr_r", + "gethostbyname_r", + "gethostbyname2_r", + "getservent", + "getservbyname", + "getservbyport", + "setservent", + "getprotoent", + "getprotobyname", + "getprotobynumber", + "setprotoent", + "endprotoent", + ] + ) + bad_occurences_names = set() + + try: + for sym in at_least_one(iter_elf_symbols(target, binary, all=True)): + if sym["addr"] == 0 and sym["name"] in networking_functions: + bad_occurences_names.add(sym["name"]) + except Empty: + raise RuntimeError("Could not parse llvm-objdump output?") + + basename = os.path.basename(binary) + if bad_occurences_names: + s = ( + "TEST-UNEXPECTED-FAIL | check_networking | {} | Identified {} " + + "networking function(s) being imported in the rust static library ({})" + ) + print( + s.format( + basename, + len(bad_occurences_names), + ",".join(sorted(bad_occurences_names)), + ), + file=sys.stderr, + ) + retcode = 1 + elif buildconfig.substs.get("MOZ_AUTOMATION"): + print("TEST-PASS | check_networking | {}".format(basename)) + return retcode + + +def checks(target, binary): + # The clang-plugin is built as target but is really a host binary. + # Cheat and pretend we were passed the right argument. + if "clang-plugin" in binary: + target = HOST + checks = [] + if buildconfig.substs.get("MOZ_STDCXX_COMPAT") and target["platform"] == "Linux": + checks.append(check_binary_compat) + + # Disabled for local builds because of readelf performance: See bug 1472496 + if not buildconfig.substs.get("DEVELOPER_OPTIONS"): + checks.append(check_textrel) + checks.append(check_pt_load) + checks.append(check_mozglue_order) + + retcode = 0 + basename = os.path.basename(binary) + for c in checks: + try: + name = c.__name__ + c(target, binary) + if buildconfig.substs.get("MOZ_AUTOMATION"): + print("TEST-PASS | {} | {}".format(name, basename)) + except Skip: + pass + except RuntimeError as e: + print( + "TEST-UNEXPECTED-FAIL | {} | {} | {}".format(name, basename, str(e)), + file=sys.stderr, + ) + retcode = 1 + return retcode + + +def main(args): + parser = argparse.ArgumentParser(description="Check built binaries") + + parser.add_argument( + "--host", action="store_true", help="Perform checks for a host binary" + ) + parser.add_argument( + "--target", action="store_true", help="Perform checks for a target binary" + ) + parser.add_argument( + "--networking", + action="store_true", + help="Perform checks for networking functions", + ) + + parser.add_argument( + "binary", metavar="PATH", help="Location of the binary to check" + ) + + options = parser.parse_args(args) + + if options.host == options.target: + print("Exactly one of --host or --target must be given", file=sys.stderr) + return 1 + + if options.networking and options.host: + print("--networking is only valid with --target", file=sys.stderr) + return 1 + + if options.networking: + return check_networking(TARGET, options.binary) + elif options.host: + return checks(HOST, options.binary) + elif options.target: + return checks(TARGET, options.binary) + + +if __name__ == "__main__": + sys.exit(log_build_task(main, sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/action/download_wpt_manifest.py b/python/mozbuild/mozbuild/action/download_wpt_manifest.py new file mode 100644 index 0000000000..84f4a15d14 --- /dev/null +++ b/python/mozbuild/mozbuild/action/download_wpt_manifest.py @@ -0,0 +1,21 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# This action is used to generate the wpt manifest + +import sys + +import buildconfig + + +def main(): + print("Downloading wpt manifest") + sys.path.insert(0, buildconfig.topsrcdir) + import manifestupdate + + return 0 if manifestupdate.run(buildconfig.topsrcdir, buildconfig.topobjdir) else 1 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/python/mozbuild/mozbuild/action/dump_env.py b/python/mozbuild/mozbuild/action/dump_env.py new file mode 100644 index 0000000000..ec178700eb --- /dev/null +++ b/python/mozbuild/mozbuild/action/dump_env.py @@ -0,0 +1,30 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# We invoke a Python program to dump our environment in order to get +# native paths printed on Windows so that these paths can be incorporated +# into Python configure's environment. +import os +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), "..")) + +from shellutil import quote + + +def environ(): + # We would use six.ensure_text but the global Python isn't guaranteed to have + # the correct version of six installed. + def ensure_text(s): + if sys.version_info > (3, 0) or isinstance(s, unicode): + # os.environ always returns string keys and values in Python 3. + return s + else: + return s.decode("utf-8") + + return [(ensure_text(k), ensure_text(v)) for (k, v) in os.environ.items()] + + +for key, value in environ(): + print("%s=%s" % (key, quote(value))) diff --git a/python/mozbuild/mozbuild/action/dumpsymbols.py b/python/mozbuild/mozbuild/action/dumpsymbols.py new file mode 100644 index 0000000000..0af2c1c4e5 --- /dev/null +++ b/python/mozbuild/mozbuild/action/dumpsymbols.py @@ -0,0 +1,109 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import os +import shutil +import subprocess +import sys + +import buildconfig + + +def dump_symbols(target, tracking_file, count_ctors=False): + # Our tracking file, if present, will contain path(s) to the previously generated + # symbols. Remove them in this case so we don't simply accumulate old symbols + # during incremental builds. + if os.path.isfile(os.path.normpath(tracking_file)): + with open(tracking_file, "r") as fh: + files = fh.read().splitlines() + dirs = set(os.path.dirname(f) for f in files) + for d in dirs: + shutil.rmtree( + os.path.join(buildconfig.topobjdir, "dist", "crashreporter-symbols", d), + ignore_errors=True, + ) + + # Build default args for symbolstore.py based on platform. + sym_store_args = [] + + dump_syms_bin = buildconfig.substs["DUMP_SYMS"] + os_arch = buildconfig.substs["OS_ARCH"] + if os_arch == "WINNT": + sym_store_args.extend(["-c", "--vcs-info"]) + if "PDBSTR" in buildconfig.substs: + sym_store_args.append("-i") + elif os_arch == "Darwin": + cpu = { + "x86": "i386", + "aarch64": "arm64", + }.get(buildconfig.substs["TARGET_CPU"], buildconfig.substs["TARGET_CPU"]) + sym_store_args.extend(["-c", "-a", cpu, "--vcs-info"]) + elif os_arch == "Linux": + sym_store_args.extend(["-c", "--vcs-info"]) + + sym_store_args.append( + "--install-manifest=%s,%s" + % ( + os.path.join( + buildconfig.topobjdir, "_build_manifests", "install", "dist_include" + ), + os.path.join(buildconfig.topobjdir, "dist", "include"), + ) + ) + objcopy = buildconfig.substs.get("OBJCOPY") + if objcopy: + os.environ["OBJCOPY"] = objcopy + + if buildconfig.substs.get("MOZ_THUNDERBIRD"): + sym_store_args.extend(["-s", os.path.join(buildconfig.topsrcdir, "comm")]) + + args = ( + [ + sys.executable, + os.path.join( + buildconfig.topsrcdir, + "toolkit", + "crashreporter", + "tools", + "symbolstore.py", + ), + ] + + sym_store_args + + [ + "-s", + buildconfig.topsrcdir, + dump_syms_bin, + os.path.join(buildconfig.topobjdir, "dist", "crashreporter-symbols"), + os.path.abspath(target), + ] + ) + if count_ctors: + args.append("--count-ctors") + print("Running: %s" % " ".join(args)) + out_files = subprocess.check_output(args, universal_newlines=True) + with open(tracking_file, "w", encoding="utf-8", newline="\n") as fh: + fh.write(out_files) + fh.flush() + + +def main(argv): + parser = argparse.ArgumentParser( + usage="Usage: dumpsymbols.py " + ) + parser.add_argument( + "--count-ctors", + action="store_true", + default=False, + help="Count static initializers", + ) + parser.add_argument("library_or_program", help="Path to library or program") + parser.add_argument("tracking_file", help="Tracking file") + args = parser.parse_args() + + return dump_symbols(args.library_or_program, args.tracking_file, args.count_ctors) + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/action/exe_7z_archive.py b/python/mozbuild/mozbuild/action/exe_7z_archive.py new file mode 100644 index 0000000000..b0d35be2bf --- /dev/null +++ b/python/mozbuild/mozbuild/action/exe_7z_archive.py @@ -0,0 +1,89 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import shutil +import subprocess +import sys +import tempfile + +import buildconfig +import mozpack.path as mozpath + +from mozbuild.base import BuildEnvironmentNotFoundException + + +def archive_exe(pkg_dir, tagfile, sfx_package, package, use_upx): + tmpdir = tempfile.mkdtemp(prefix="tmp") + try: + if pkg_dir: + shutil.move(pkg_dir, "core") + + if use_upx: + final_sfx = mozpath.join(tmpdir, "7zSD.sfx") + upx = buildconfig.substs.get("UPX", "upx") + wine = buildconfig.substs.get("WINE") + if wine and upx.lower().endswith(".exe"): + cmd = [wine, upx] + else: + cmd = [upx] + subprocess.check_call( + cmd + + [ + "--best", + "-o", + final_sfx, + sfx_package, + ] + ) + else: + final_sfx = sfx_package + + try: + sevenz = buildconfig.config.substs["7Z"] + except BuildEnvironmentNotFoundException: + # configure hasn't been run, just use the default + sevenz = "7z" + subprocess.check_call( + [ + sevenz, + "a", + "-r", + "-t7z", + mozpath.join(tmpdir, "app.7z"), + "-mx", + "-m0=BCJ2", + "-m1=LZMA:d25", + "-m2=LZMA:d19", + "-m3=LZMA:d19", + "-mb0:1", + "-mb0s1:2", + "-mb0s2:3", + ] + ) + + with open(package, "wb") as o: + for i in [final_sfx, tagfile, mozpath.join(tmpdir, "app.7z")]: + shutil.copyfileobj(open(i, "rb"), o) + os.chmod(package, 0o0755) + finally: + if pkg_dir: + shutil.move("core", pkg_dir) + shutil.rmtree(tmpdir) + + +def main(args): + if len(args) != 4: + print( + "Usage: exe_7z_archive.py ", + file=sys.stderr, + ) + return 1 + else: + archive_exe(args[0], args[1], args[2], args[3], args[4]) + return 0 + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/action/fat_aar.py b/python/mozbuild/mozbuild/action/fat_aar.py new file mode 100644 index 0000000000..d17d4696a0 --- /dev/null +++ b/python/mozbuild/mozbuild/action/fat_aar.py @@ -0,0 +1,185 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +Fetch and unpack architecture-specific Maven zips, verify cross-architecture +compatibility, and ready inputs to an Android multi-architecture fat AAR build. +""" + +import argparse +import sys +from collections import OrderedDict, defaultdict +from hashlib import sha1 # We don't need a strong hash to compare inputs. +from io import BytesIO +from zipfile import ZipFile + +import mozpack.path as mozpath +import six +from mozpack.copier import FileCopier +from mozpack.files import JarFinder +from mozpack.mozjar import JarReader +from mozpack.packager.unpack import UnpackFinder + + +def fat_aar(distdir, aars_paths, no_process=False, no_compatibility_check=False): + if no_process: + print("Not processing architecture-specific artifact Maven AARs.") + return 0 + + # Map {filename: {fingerprint: [arch1, arch2, ...]}}. + diffs = defaultdict(lambda: defaultdict(list)) + missing_arch_prefs = set() + # Collect multi-architecture inputs to the fat AAR. + copier = FileCopier() + + for arch, aar_path in aars_paths.items(): + # Map old non-architecture-specific path to new architecture-specific path. + old_rewrite_map = { + "greprefs.js": "{}/greprefs.js".format(arch), + "defaults/pref/geckoview-prefs.js": "defaults/pref/{}/geckoview-prefs.js".format( + arch + ), + } + + # Architecture-specific preferences files. + arch_prefs = set(old_rewrite_map.values()) + missing_arch_prefs |= set(arch_prefs) + + jar_finder = JarFinder(aar_path, JarReader(aar_path)) + for path, fileobj in UnpackFinder(jar_finder): + # Native libraries go straight through. + if mozpath.match(path, "jni/**"): + copier.add(path, fileobj) + + elif path in arch_prefs: + copier.add(path, fileobj) + + elif path in ("classes.jar", "annotations.zip"): + # annotations.zip differs due to timestamps, but the contents should not. + + # `JarReader` fails on the non-standard `classes.jar` produced by Gradle/aapt, + # and it's not worth working around, so we use Python's zip functionality + # instead. + z = ZipFile(BytesIO(fileobj.open().read())) + for r in z.namelist(): + fingerprint = sha1(z.open(r).read()).hexdigest() + diffs["{}!/{}".format(path, r)][fingerprint].append(arch) + + else: + fingerprint = sha1(six.ensure_binary(fileobj.open().read())).hexdigest() + # There's no need to distinguish `target.maven.zip` from `assets/omni.ja` here, + # since in practice they will never overlap. + diffs[path][fingerprint].append(arch) + + missing_arch_prefs.discard(path) + + # Some differences are allowed across the architecture-specific AARs. We could allow-list + # the actual content, but it's not necessary right now. + allow_pattern_list = { + "AndroidManifest.xml", # Min SDK version is different for 32- and 64-bit builds. + "classes.jar!/org/mozilla/gecko/util/HardwareUtils.class", # Min SDK as well. + "classes.jar!/org/mozilla/geckoview/BuildConfig.class", + # Each input captures its CPU architecture. + "chrome/toolkit/content/global/buildconfig.html", + # Bug 1556162: localized resources are not deterministic across + # per-architecture builds triggered from the same push. + "**/*.ftl", + "**/*.dtd", + "**/*.properties", + } + + not_allowed = OrderedDict() + + def format_diffs(ds): + # Like ' armeabi-v7a, arm64-v8a -> XXX\n x86, x86_64 -> YYY'. + return "\n".join( + sorted( + " {archs} -> {fingerprint}".format( + archs=", ".join(sorted(archs)), fingerprint=fingerprint + ) + for fingerprint, archs in ds.items() + ) + ) + + for p, ds in sorted(diffs.items()): + if len(ds) <= 1: + # Only one hash across all inputs: roll on. + continue + + if any(mozpath.match(p, pat) for pat in allow_pattern_list): + print( + 'Allowed: Path "{path}" has architecture-specific versions:\n{ds_repr}'.format( + path=p, ds_repr=format_diffs(ds) + ) + ) + continue + + not_allowed[p] = ds + + for p, ds in not_allowed.items(): + print( + 'Disallowed: Path "{path}" has architecture-specific versions:\n{ds_repr}'.format( + path=p, ds_repr=format_diffs(ds) + ) + ) + + for missing in sorted(missing_arch_prefs): + print( + "Disallowed: Inputs missing expected architecture-specific input: {missing}".format( + missing=missing + ) + ) + + if not no_compatibility_check and (missing_arch_prefs or not_allowed): + return 1 + + output_dir = mozpath.join(distdir, "output") + copier.copy(output_dir) + + return 0 + + +_ALL_ARCHS = ("armeabi-v7a", "arm64-v8a", "x86_64", "x86") + + +def main(argv): + description = """Unpack architecture-specific Maven AARs, verify cross-architecture +compatibility, and ready inputs to an Android multi-architecture fat AAR build.""" + + parser = argparse.ArgumentParser(description=description) + parser.add_argument( + "--no-process", action="store_true", help="Do not process Maven AARs." + ) + parser.add_argument( + "--no-compatibility-check", + action="store_true", + help="Do not fail if Maven AARs are not compatible.", + ) + parser.add_argument("--distdir", required=True) + + for arch in _ALL_ARCHS: + command_line_flag = arch.replace("_", "-") + parser.add_argument("--{}".format(command_line_flag), dest=arch) + + args = parser.parse_args(argv) + + args_dict = vars(args) + + aars_paths = { + arch: args_dict.get(arch) for arch in _ALL_ARCHS if args_dict.get(arch) + } + + if not aars_paths: + raise ValueError("You must provide at least one AAR file!") + + return fat_aar( + args.distdir, + aars_paths, + no_process=args.no_process, + no_compatibility_check=args.no_compatibility_check, + ) + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/action/file_generate.py b/python/mozbuild/mozbuild/action/file_generate.py new file mode 100644 index 0000000000..98dec4e359 --- /dev/null +++ b/python/mozbuild/mozbuild/action/file_generate.py @@ -0,0 +1,155 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Given a Python script and arguments describing the output file, and +# the arguments that can be used to generate the output file, call the +# script's |main| method with appropriate arguments. + +import argparse +import importlib.util +import os +import sys +import traceback + +import buildconfig +import six + +from mozbuild.action.util import log_build_task +from mozbuild.makeutil import Makefile +from mozbuild.pythonutil import iter_modules_in_path +from mozbuild.util import FileAvoidWrite + + +def main(argv): + parser = argparse.ArgumentParser( + "Generate a file from a Python script", add_help=False + ) + parser.add_argument( + "--locale", metavar="locale", type=six.text_type, help="The locale in use." + ) + parser.add_argument( + "python_script", + metavar="python-script", + type=six.text_type, + help="The Python script to run", + ) + parser.add_argument( + "method_name", + metavar="method-name", + type=six.text_type, + help="The method of the script to invoke", + ) + parser.add_argument( + "output_file", + metavar="output-file", + type=six.text_type, + help="The file to generate", + ) + parser.add_argument( + "dep_file", + metavar="dep-file", + type=six.text_type, + help="File to write any additional make dependencies to", + ) + parser.add_argument( + "dep_target", + metavar="dep-target", + type=six.text_type, + help="Make target to use in the dependencies file", + ) + parser.add_argument( + "additional_arguments", + metavar="arg", + nargs=argparse.REMAINDER, + help="Additional arguments to the script's main() method", + ) + + args = parser.parse_args(argv) + + kwargs = {} + if args.locale: + kwargs["locale"] = args.locale + script = args.python_script + # Permit the script to import modules from the same directory in which it + # resides. The justification for doing this is that if we were invoking + # the script as: + # + # python script arg1... + # + # then importing modules from the script's directory would come for free. + # Since we're invoking the script in a roundabout way, we provide this + # bit of convenience. + sys.path.append(os.path.dirname(script)) + spec = importlib.util.spec_from_file_location("script", script) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + method = args.method_name + if not hasattr(module, method): + print( + 'Error: script "{0}" is missing a {1} method'.format(script, method), + file=sys.stderr, + ) + return 1 + + ret = 1 + try: + with FileAvoidWrite(args.output_file, readmode="rb") as output: + try: + ret = module.__dict__[method]( + output, *args.additional_arguments, **kwargs + ) + except Exception: + # Ensure that we don't overwrite the file if the script failed. + output.avoid_writing_to_file() + raise + + # The following values indicate a statement of success: + # - a set() (see below) + # - 0 + # - False + # - None + # + # Everything else is an error (so scripts can conveniently |return + # 1| or similar). If a set is returned, the elements of the set + # indicate additional dependencies that will be listed in the deps + # file. Python module imports are automatically included as + # dependencies. + if isinstance(ret, set): + deps = set(six.ensure_text(s) for s in ret) + # The script succeeded, so reset |ret| to indicate that. + ret = None + else: + deps = set() + + # Only write out the dependencies if the script was successful + if not ret: + # Add dependencies on any python modules that were imported by + # the script. + deps |= set( + six.ensure_text(s) + for s in iter_modules_in_path( + buildconfig.topsrcdir, buildconfig.topobjdir + ) + ) + # Add dependencies on any buildconfig items that were accessed + # by the script. + deps |= set(six.ensure_text(s) for s in buildconfig.get_dependencies()) + + mk = Makefile() + mk.create_rule([args.dep_target]).add_dependencies(deps) + with FileAvoidWrite(args.dep_file) as dep_file: + mk.dump(dep_file) + else: + # Ensure that we don't overwrite the file if the script failed. + output.avoid_writing_to_file() + + except IOError as e: + print('Error opening file "{0}"'.format(e.filename), file=sys.stderr) + traceback.print_exc() + return 1 + return ret + + +if __name__ == "__main__": + sys.exit(log_build_task(main, sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/action/file_generate_wrapper.py b/python/mozbuild/mozbuild/action/file_generate_wrapper.py new file mode 100644 index 0000000000..b6c030bbf6 --- /dev/null +++ b/python/mozbuild/mozbuild/action/file_generate_wrapper.py @@ -0,0 +1,38 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import json +import os +import subprocess +import sys +from pathlib import Path + +import buildconfig + + +def action(fh, script, target_dir, *args): + fh.close() + os.unlink(fh.name) + + args = list(args) + objdir = Path.cwd() + topsrcdir = Path(buildconfig.topsrcdir) + + def make_absolute(base_path, p): + return Path(base_path) / Path(p.lstrip("/")) + + try: + abs_target_dir = str(make_absolute(objdir, target_dir)) + abs_script = make_absolute(topsrcdir, script) + script = [str(abs_script)] + if abs_script.suffix == ".py": + script = [sys.executable] + script + subprocess.check_call(script + args, cwd=abs_target_dir) + except Exception: + relative = os.path.relpath(__file__, topsrcdir) + print( + "%s:action caught exception. params=%s\n" + % (relative, json.dumps([script, target_dir] + args, indent=2)) + ) + raise diff --git a/python/mozbuild/mozbuild/action/generate_symbols_file.py b/python/mozbuild/mozbuild/action/generate_symbols_file.py new file mode 100644 index 0000000000..955a676c08 --- /dev/null +++ b/python/mozbuild/mozbuild/action/generate_symbols_file.py @@ -0,0 +1,95 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import os +from io import StringIO + +import buildconfig + +from mozbuild.preprocessor import Preprocessor +from mozbuild.util import DefinesAction + + +def generate_symbols_file(output, *args): + """ """ + parser = argparse.ArgumentParser() + parser.add_argument("input") + parser.add_argument("-D", action=DefinesAction) + parser.add_argument("-U", action="append", default=[]) + args = parser.parse_args(args) + input = os.path.abspath(args.input) + + pp = Preprocessor() + pp.context.update(buildconfig.defines["ALLDEFINES"]) + if args.D: + pp.context.update(args.D) + for undefine in args.U: + if undefine in pp.context: + del pp.context[undefine] + # Hack until MOZ_DEBUG_FLAGS are simply part of buildconfig.defines + if buildconfig.substs.get("MOZ_DEBUG"): + pp.context["DEBUG"] = "1" + # Ensure @DATA@ works as expected (see the Windows section further below) + if buildconfig.substs["OS_TARGET"] == "WINNT": + pp.context["DATA"] = "DATA" + else: + pp.context["DATA"] = "" + pp.out = StringIO() + pp.do_filter("substitution") + pp.do_include(input) + + symbols = [s.strip() for s in pp.out.getvalue().splitlines() if s.strip()] + + libname, ext = os.path.splitext(os.path.basename(output.name)) + + if buildconfig.substs["OS_TARGET"] == "WINNT": + # A def file is generated for MSVC link.exe that looks like the + # following: + # LIBRARY library.dll + # EXPORTS + # symbol1 + # symbol2 + # ... + # + # link.exe however requires special markers for data symbols, so in + # that case the symbols look like: + # data_symbol1 DATA + # data_symbol2 DATA + # ... + # + # In the input file, this is just annotated with the following syntax: + # data_symbol1 @DATA@ + # data_symbol2 @DATA@ + # ... + # The DATA variable is "simply" expanded by the preprocessor, to + # nothing on non-Windows, such that we only get the symbol name on + # those platforms, and to DATA on Windows, so that the "DATA" part + # is, in fact, part of the symbol name as far as the symbols variable + # is concerned. + assert ext == ".def" + output.write("LIBRARY %s\nEXPORTS\n %s\n" % (libname, "\n ".join(symbols))) + elif ( + buildconfig.substs.get("GCC_USE_GNU_LD") + or buildconfig.substs["OS_TARGET"] == "SunOS" + ): + # A linker version script is generated for GNU LD that looks like the + # following: + # liblibrary.so { + # global: + # symbol1; + # symbol2; + # ... + # local: + # *; + # }; + output.write( + "%s {\nglobal:\n %s;\nlocal:\n *;\n};" % (libname, ";\n ".join(symbols)) + ) + elif buildconfig.substs["OS_TARGET"] == "Darwin": + # A list of symbols is generated for Apple ld that simply lists all + # symbols, with an underscore prefix. + output.write("".join("_%s\n" % s for s in symbols)) + + return set(pp.includes) diff --git a/python/mozbuild/mozbuild/action/html_fragment_preprocesor.py b/python/mozbuild/mozbuild/action/html_fragment_preprocesor.py new file mode 100644 index 0000000000..f957318a7f --- /dev/null +++ b/python/mozbuild/mozbuild/action/html_fragment_preprocesor.py @@ -0,0 +1,101 @@ +import json +import re +import xml.etree.ElementTree as ET +from pathlib import Path + +JS_FILE_TEMPLATE = """\ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +const EXPORTED_SYMBOLS = ["getHTMLFragment"]; + +const Fragments = {json_string}; + +/* + * Loads HTML fragment strings pulled from fragment documents. + * @param key - key identifying HTML fragment + * + * @return raw HTML/XHTML string + */ +const getHTMLFragment = key => Fragments[key]; +""" + +RE_COLLAPSE_WHITESPACE = re.compile(r"\s+") + + +def get_fragment_key(path, template_name=None): + key = Path(path).stem + if template_name: + key += "/" + template_name + return key + + +def fill_html_fragments_map(fragment_map, path, template, doctype=None): + # collape white space + for elm in template.iter(): + if elm.text: + elm.text = RE_COLLAPSE_WHITESPACE.sub(" ", elm.text) + if elm.tail: + elm.tail = RE_COLLAPSE_WHITESPACE.sub(" ", elm.tail) + key = get_fragment_key(path, template.attrib.get("name")) + xml = "".join(ET.tostring(elm, encoding="unicode") for elm in template).strip() + if doctype: + xml = doctype + "\n" + xml + fragment_map[key] = xml + + +def get_html_fragments_from_file(fragment_map, path): + for _, (name, value) in ET.iterparse(path, events=["start-ns"]): + ET.register_namespace(name, value) + tree = ET.parse(path) + root = tree.getroot() + sub_templates = root.findall("{http://www.w3.org/1999/xhtml}template") + # if all nested nodes are templates then treat as list of templates + if len(sub_templates) == len(root): + doctype = "" + for template in sub_templates: + if template.get("doctype") == "true": + doctype = template.text.strip() + break + for template in sub_templates: + if template.get("doctype") != "true": + fill_html_fragments_map(fragment_map, path, template, doctype) + else: + fill_html_fragments_map(fragment_map, path, root, None) + + +def generate(output, *inputs): + """Builds an html fragments loader JS file from the input xml file(s) + + The xml files are expected to be in the format of: + `` + + or `` + Where there are multiple templates. All markup is expected to be properly namespaced. + + In the JS file, calling getHTMLFragment(key) will return the HTML string from the xml file + that matches the key. + + The key format is `filename_without_extension/template_name` for files with + multiple templates, or just `filename_without_extension` for files with one template. + `filename_without_extension` is the xml filename without the .xml extension + and `template_name` is the name attribute of template node containing the xml fragment. + + Arguments: + output -- File handle to JS file being generated + inputs -- list of xml filenames to include in loader + + Returns: + The set of dependencies which should trigger this command to be re-run. + This is ultimately returned to the build system for use by the backend + to ensure that incremental rebuilds happen when any dependency changes. + """ + + fragment_map = {} + for file in inputs: + get_html_fragments_from_file(fragment_map, file) + json_string = json.dumps(fragment_map, separators=(",", ":")) + contents = JS_FILE_TEMPLATE.format(json_string=json_string) + output.write(contents) + return set(inputs) diff --git a/python/mozbuild/mozbuild/action/install.py b/python/mozbuild/mozbuild/action/install.py new file mode 100644 index 0000000000..02f0f2694a --- /dev/null +++ b/python/mozbuild/mozbuild/action/install.py @@ -0,0 +1,22 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# A simple script to invoke mozinstall from the command line without depending +# on a build config. + +import sys + +import mozinstall + + +def main(args): + if len(args) != 2: + print("Usage: install.py [src] [dest]") + return 1 + src, dest = args + mozinstall.install(src, dest) + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/action/jar_maker.py b/python/mozbuild/mozbuild/action/jar_maker.py new file mode 100644 index 0000000000..a244b66a52 --- /dev/null +++ b/python/mozbuild/mozbuild/action/jar_maker.py @@ -0,0 +1,16 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import sys + +import mozbuild.jar +from mozbuild.action.util import log_build_task + + +def main(args): + return mozbuild.jar.main(args) + + +if __name__ == "__main__": + sys.exit(log_build_task(main, sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/action/l10n_merge.py b/python/mozbuild/mozbuild/action/l10n_merge.py new file mode 100644 index 0000000000..1a04d60107 --- /dev/null +++ b/python/mozbuild/mozbuild/action/l10n_merge.py @@ -0,0 +1,42 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import os +import shutil +import sys + +from mozbuild.util import ensureParentDir + + +def main(argv): + parser = argparse.ArgumentParser(description="Merge l10n files.") + parser.add_argument("--output", help="Path to write merged output") + parser.add_argument("--ref-file", help="Path to reference file (en-US)") + parser.add_argument("--l10n-file", help="Path to locale file") + + args = parser.parse_args(argv) + + from compare_locales.compare import ContentComparer, Observer + from compare_locales.paths import File + + cc = ContentComparer([Observer()]) + cc.compare( + File(args.ref_file, args.ref_file, ""), + File(args.l10n_file, args.l10n_file, ""), + args.output, + ) + + ensureParentDir(args.output) + if not os.path.exists(args.output): + src = args.l10n_file + if not os.path.exists(args.l10n_file): + src = args.ref_file + shutil.copy(src, args.output) + + return 0 + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/python/mozbuild/mozbuild/action/langpack_localeNames.json b/python/mozbuild/mozbuild/action/langpack_localeNames.json new file mode 100644 index 0000000000..9014c7717a --- /dev/null +++ b/python/mozbuild/mozbuild/action/langpack_localeNames.json @@ -0,0 +1,426 @@ +{ + "ach": { + "english": "Acoli", + "native": "Acholi" + }, + "af": { + "native": "Afrikaans" + }, + "an": { + "english": "Aragonese", + "native": "Aragonés" + }, + "ar": { + "english": "Arabic", + "native": "العربية" + }, + "ast": { + "english": "Asturian", + "native": "Asturianu" + }, + "az": { + "english": "Azerbaijani", + "native": "AzÉ™rbaycanca" + }, + "be": { + "english": "Belarusian", + "native": "БеларуÑкаÑ" + }, + "bg": { + "english": "Bulgarian", + "native": "БългарÑки" + }, + "bn": { + "english": "Bangla", + "native": "বাংলা" + }, + "bo": { + "english": "Tibetan", + "native": "བོད་སà¾à½‘" + }, + "br": { + "english": "Breton", + "native": "Brezhoneg" + }, + "brx": { + "english": "Bodo", + "native": "बड़ो" + }, + "bs": { + "english": "Bosnian", + "native": "Bosanski" + }, + "ca": { + "english": "Catalan", + "native": "Català" + }, + "ca-valencia": { + "english": "Catalan, Valencian", + "native": "Català (Valencià)" + }, + "cak": { + "native": "Kaqchikel" + }, + "cs": { + "english": "Czech", + "native": "ÄŒeÅ¡tina" + }, + "cy": { + "english": "Welsh", + "native": "Cymraeg" + }, + "da": { + "english": "Danish", + "native": "Dansk" + }, + "de": { + "english": "German", + "native": "Deutsch" + }, + "dsb": { + "english": "Lower Sorbian", + "native": "Dolnoserbšćina" + }, + "el": { + "english": "Greek", + "native": "Ελληνικά" + }, + "en-CA": { + "native": "English (CA)" + }, + "en-GB": { + "native": "English (GB)" + }, + "en-US": { + "native": "English (US)" + }, + "eo": { + "native": "Esperanto" + }, + "es-AR": { + "english": "Spanish, Argentina", + "native": "Español (AR)" + }, + "es-CL": { + "english": "Spanish, Chile", + "native": "Español (CL)" + }, + "es-ES": { + "english": "Spanish, Spain", + "native": "Español (ES)" + }, + "es-MX": { + "english": "Spanish, Mexico", + "native": "Español (MX)" + }, + "et": { + "english": "Estonian", + "native": "Eesti" + }, + "eu": { + "english": "Basque", + "native": "Euskara" + }, + "fa": { + "english": "Persian", + "native": "Ùارسی" + }, + "ff": { + "english": "Fulah", + "native": "Pulaar" + }, + "fi": { + "english": "Finnish", + "native": "Suomi" + }, + "fr": { + "english": "French", + "native": "Français" + }, + "fur": { + "english": "Friulian", + "native": "Furlan" + }, + "fy-NL": { + "english": "Frisian", + "native": "Frysk" + }, + "ga-IE": { + "english": "Irish", + "native": "Gaeilge" + }, + "gd": { + "english": "Scottish Gaelic", + "native": "Gàidhlig" + }, + "gl": { + "english": "Galician", + "native": "Galego" + }, + "gn": { + "native": "Guarani" + }, + "gu-IN": { + "english": "Gujarati", + "native": "ગà«àªœàª°àª¾àª¤à«€" + }, + "he": { + "english": "Hebrew", + "native": "עברית" + }, + "hi-IN": { + "english": "Hindi", + "native": "हिनà¥à¤¦à¥€" + }, + "hr": { + "english": "Croatian", + "native": "Hrvatski" + }, + "hsb": { + "english": "Upper Sorbian", + "native": "Hornjoserbšćina" + }, + "hu": { + "english": "Hungarian", + "native": "Magyar" + }, + "hy-AM": { + "english": "Armenian", + "native": "Õ°Õ¡ÕµÕ¥Ö€Õ¥Õ¶" + }, + "ia": { + "native": "Interlingua" + }, + "id": { + "english": "Indonesian", + "native": "Indonesia" + }, + "is": { + "english": "Icelandic", + "native": "Islenska" + }, + "it": { + "english": "Italian", + "native": "Italiano" + }, + "ja": { + "english": "Japanese", + "native": "日本語" + }, + "ja-JP-mac": { + "english": "Japanese", + "native": "日本語" + }, + "ka": { + "english": "Georgian", + "native": "ქáƒáƒ áƒ—ული" + }, + "kab": { + "english": "Kabyle", + "native": "Taqbaylit" + }, + "kk": { + "english": "Kazakh", + "native": "қазақ тілі" + }, + "km": { + "english": "Khmer", + "native": "ážáŸ’មែរ" + }, + "kn": { + "english": "Kannada", + "native": "ಕನà³à²¨à²¡" + }, + "ko": { + "english": "Korean", + "native": "한국어" + }, + "lij": { + "english": "Ligurian", + "native": "Ligure" + }, + "lo": { + "english": "Lao", + "native": "ລາວ" + }, + "lt": { + "english": "Lithuanian", + "native": "Lietuvių" + }, + "ltg": { + "english": "Latgalian", + "native": "LatgalÄ«Å¡u" + }, + "lv": { + "english": "Latvian", + "native": "LatvieÅ¡u" + }, + "mk": { + "english": "Macedonian", + "native": "македонÑки" + }, + "ml": { + "english": "Malayalam", + "native": "മലയാളം" + }, + "mr": { + "english": "Marathi", + "native": "मराठी" + }, + "ms": { + "english": "Malay", + "native": "Melayu" + }, + "my": { + "english": "Burmese", + "native": "မြန်မာ" + }, + "nb-NO": { + "english": "Norwegian BokmÃ¥l", + "native": "Norsk BokmÃ¥l" + }, + "ne-NP": { + "english": "Nepali", + "native": "नेपाली" + }, + "nl": { + "english": "Dutch", + "native": "Nederlands" + }, + "nn-NO": { + "english": "Norwegian Nynorsk", + "native": "Nynorsk" + }, + "oc": { + "native": "Occitan" + }, + "or": { + "english": "Odia", + "native": "ଓଡ଼ିଆ" + }, + "pa-IN": { + "english": "Punjabi", + "native": "ਪੰਜਾਬੀ" + }, + "pl": { + "english": "Polish", + "native": "Polski" + }, + "pt-BR": { + "english": "Brazilian Portuguese", + "native": "Português (BR)" + }, + "pt-PT": { + "english": "Portuguese", + "native": "Português (PT)" + }, + "rm": { + "english": "Romansh", + "native": "Rumantsch" + }, + "ro": { + "english": "Romanian", + "native": "Română" + }, + "ru": { + "english": "Russian", + "native": "РуÑÑкий" + }, + "sc": { + "english": "Sardinian", + "native": "Sardu" + }, + "sco": { + "native": "Scots" + }, + "si": { + "english": "Sinhala", + "native": "සිංහල" + }, + "sk": { + "english": "Slovak", + "native": "SlovenÄina" + }, + "sl": { + "english": "Slovenian", + "native": "SlovenÅ¡Äina" + }, + "son": { + "english": "Songhai", + "native": "SoÅ‹ay" + }, + "sq": { + "english": "Albanian", + "native": "Shqip" + }, + "sr": { + "english": "Serbian", + "native": "CрпÑки" + }, + "sv-SE": { + "english": "Swedish", + "native": "Svenska" + }, + "szl": { + "english": "Silesian", + "native": "ÅšlÅnsko" + }, + "ta": { + "english": "Tamil", + "native": "தமிழà¯" + }, + "te": { + "english": "Telugu", + "native": "తెలà±à°—à±" + }, + "tg": { + "english": "Tajik", + "native": "Тоҷикӣ" + }, + "th": { + "english": "Thai", + "native": "ไทย" + }, + "tl": { + "english": "Filipino", + "native": "Tagalog" + }, + "tr": { + "english": "Turkish", + "native": "Türkçe" + }, + "trs": { + "native": "Triqui" + }, + "uk": { + "english": "Ukrainian", + "native": "УкраїнÑька" + }, + "ur": { + "english": "Urdu", + "native": "اردو" + }, + "uz": { + "english": "Uzbek", + "native": "O‘zbek" + }, + "vi": { + "english": "Vietnamese", + "native": "Tiếng Việt" + }, + "wo": { + "native": "Wolof" + }, + "xh": { + "english": "Xhosa", + "native": "IsiXhosa" + }, + "zh-CN": { + "english": "Simplified Chinese", + "native": "简体中文" + }, + "zh-TW": { + "english": "Traditional Chinese", + "native": "正體中文" + } +} diff --git a/python/mozbuild/mozbuild/action/langpack_manifest.py b/python/mozbuild/mozbuild/action/langpack_manifest.py new file mode 100644 index 0000000000..c79539cbce --- /dev/null +++ b/python/mozbuild/mozbuild/action/langpack_manifest.py @@ -0,0 +1,587 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +### +# This script generates a web manifest JSON file based on the xpi-stage +# directory structure. It extracts data necessary to produce the complete +# manifest file for a language pack: +# from the `langpack-manifest.ftl` file in the locale directory; +# from chrome registry entries; +# and from other information in the `xpi-stage` directory. +### + +import argparse +import datetime +import io +import json +import logging +import os +import re +import sys +import time + +import fluent.syntax.ast as FTL +import mozpack.path as mozpath +import mozversioncontrol +import requests +from fluent.syntax.parser import FluentParser +from mozpack.chrome.manifest import Manifest, ManifestLocale, parse_manifest + +from mozbuild.configure.util import Version + + +def write_file(path, content): + with io.open(path, "w", encoding="utf-8") as out: + out.write(content + "\n") + + +pushlog_api_url = "{0}/json-rev/{1}" + + +def get_build_date(): + """Return the current date or SOURCE_DATE_EPOCH, if set.""" + return datetime.datetime.utcfromtimestamp( + int(os.environ.get("SOURCE_DATE_EPOCH", time.time())) + ) + + +### +# Retrieves a UTC datetime of the push for the current commit from a +# mercurial clone directory. The SOURCE_DATE_EPOCH environment +# variable is honored, for reproducibility. +# +# Args: +# path (str) - path to a directory +# +# Returns: +# (datetime) - a datetime object +# +# Example: +# dt = get_dt_from_hg("/var/vcs/l10n-central/pl") +# dt == datetime(2017, 10, 11, 23, 31, 54, 0) +### +def get_dt_from_hg(path): + with mozversioncontrol.get_repository_object(path=path) as repo: + phase = repo._run("log", "-r", ".", "-T" "{phase}") + if phase.strip() != "public": + return get_build_date() + repo_url = repo._run("paths", "default") + repo_url = repo_url.strip().replace("ssh://", "https://") + repo_url = repo_url.replace("hg://", "https://") + cs = repo._run("log", "-r", ".", "-T" "{node}") + + url = pushlog_api_url.format(repo_url, cs) + session = requests.Session() + try: + response = session.get(url) + except Exception as e: + msg = "Failed to retrieve push timestamp using {}\nError: {}".format(url, e) + raise Exception(msg) + + data = response.json() + + try: + date = data["pushdate"][0] + except KeyError as exc: + msg = "{}\ndata is: {}".format( + str(exc), json.dumps(data, indent=2, sort_keys=True) + ) + raise KeyError(msg) + + return datetime.datetime.utcfromtimestamp(date) + + +### +# Generates timestamp for a locale based on its path. +# If possible, will use the commit timestamp from HG repository, +# and if that fails, will generate the timestamp for `now`. +# +# The timestamp format is "{year}{month}{day}{hour}{minute}{second}" and +# the datetime stored in it is using UTC timezone. +# +# Args: +# path (str) - path to the locale directory +# +# Returns: +# (str) - a timestamp string +# +# Example: +# ts = get_timestamp_for_locale("/var/vcs/l10n-central/pl") +# ts == "20170914215617" +### +def get_timestamp_for_locale(path): + dt = None + if os.path.isdir(os.path.join(path, ".hg")): + dt = get_dt_from_hg(path) + + if dt is None: + dt = get_build_date() + + dt = dt.replace(microsecond=0) + return dt.strftime("%Y%m%d%H%M%S") + + +### +# Parses an FTL file into a key-value pair object. +# Does not support attributes, terms, variables, functions or selectors; +# only messages with values consisting of text elements and literals. +# +# Args: +# path (str) - a path to an FTL file +# +# Returns: +# (dict) - A mapping of message keys to formatted string values. +# Empty if the file at `path` was not found. +# +# Example: +# res = parse_flat_ftl('./browser/langpack-metadata.ftl') +# res == { +# 'langpack-title': 'Polski', +# 'langpack-creator': 'mozilla.org', +# 'langpack-contributors': 'Joe Solon, Suzy Solon' +# } +### +def parse_flat_ftl(path): + parser = FluentParser(with_spans=False) + try: + with open(path, encoding="utf-8") as file: + res = parser.parse(file.read()) + except FileNotFoundError as err: + logging.warning(err) + return {} + + result = {} + for entry in res.body: + if isinstance(entry, FTL.Message) and isinstance(entry.value, FTL.Pattern): + flat = "" + for elem in entry.value.elements: + if isinstance(elem, FTL.TextElement): + flat += elem.value + elif isinstance(elem.expression, FTL.Literal): + flat += elem.expression.parse()["value"] + else: + name = type(elem.expression).__name__ + raise Exception(f"Unsupported {name} for {entry.id.name} in {path}") + result[entry.id.name] = flat.strip() + return result + + +## +# Generates the title and description for the langpack. +# +# Uses data stored in a JSON file next to this source, +# which is expected to have the following format: +# Record +# +# If an English name is given and is different from the native one, +# it will be included in the description and, if within the character limits, +# also in the name. +# +# Length limit for names is 45 characters, for descriptions is 132, +# return values are truncated if needed. +# +# NOTE: If you're updating the native locale names, +# you should also update the data in +# toolkit/components/mozintl/mozIntl.sys.mjs. +# +# Args: +# app (str) - Application name +# locale (str) - Locale identifier +# +# Returns: +# (str, str) - Tuple of title and description +# +### +def get_title_and_description(app, locale): + dir = os.path.dirname(__file__) + with open(os.path.join(dir, "langpack_localeNames.json"), encoding="utf-8") as nf: + names = json.load(nf) + + nameCharLimit = 45 + descCharLimit = 132 + nameTemplate = "Language: {}" + descTemplate = "{} Language Pack for {}" + + if locale in names: + data = names[locale] + native = data["native"] + english = data["english"] if "english" in data else native + + if english != native: + title = nameTemplate.format(f"{native} ({english})") + if len(title) > nameCharLimit: + title = nameTemplate.format(native) + description = descTemplate.format(app, f"{native} ({locale}) – {english}") + else: + title = nameTemplate.format(native) + description = descTemplate.format(app, f"{native} ({locale})") + else: + title = nameTemplate.format(locale) + description = descTemplate.format(app, locale) + + return title[:nameCharLimit], description[:descCharLimit] + + +### +# Build the manifest author string based on the author string +# and optionally adding the list of contributors, if provided. +# +# Args: +# ftl (dict) - a key-value mapping of locale-specific strings +# +# Returns: +# (str) - a string to be placed in the author field of the manifest.json +# +# Example: +# s = get_author({ +# 'langpack-creator': 'mozilla.org', +# 'langpack-contributors': 'Joe Solon, Suzy Solon' +# }) +# s == 'mozilla.org (contributors: Joe Solon, Suzy Solon)' +### +def get_author(ftl): + author = ftl["langpack-creator"] if "langpack-creator" in ftl else "mozilla.org" + contrib = ftl["langpack-contributors"] if "langpack-contributors" in ftl else "" + if contrib: + return f"{author} (contributors: {contrib})" + else: + return author + + +## +# Converts the list of chrome manifest entry flags to the list of platforms +# for the langpack manifest. +# +# The list of result platforms is taken from AppConstants.platform. +# +# Args: +# flags (FlagList) - a list of Chrome Manifest entry flags +# +# Returns: +# (list) - a list of platform the entry applies to +# +# Example: +# str(flags) == "os==MacOS os==Windows" +# platforms = convert_entry_flags_to_platform_codes(flags) +# platforms == ['mac', 'win'] +# +# The method supports only `os` flag name and equality operator. +# It will throw if tried with other flags or operators. +### +def convert_entry_flags_to_platform_codes(flags): + if not flags: + return None + + ret = [] + for key in flags: + if key != "os": + raise Exception("Unknown flag name") + + for value in flags[key].values: + if value[0] != "==": + raise Exception("Inequality flag cannot be converted") + + if value[1] == "Android": + ret.append("android") + elif value[1] == "LikeUnix": + ret.append("linux") + elif value[1] == "Darwin": + ret.append("macosx") + elif value[1] == "WINNT": + ret.append("win") + else: + raise Exception("Unknown flag value {0}".format(value[1])) + + return ret + + +### +# Recursively parse a chrome manifest file appending new entries +# to the result list +# +# The function can handle two entry types: 'locale' and 'manifest' +# +# Args: +# path (str) - a path to a chrome manifest +# base_path (str) - a path to the base directory all chrome registry +# entries will be relative to +# chrome_entries (list) - a list to which entries will be appended to +# +# Example: +# +# chrome_entries = {} +# parse_manifest('./chrome.manifest', './', chrome_entries) +# +# chrome_entries == [ +# { +# 'type': 'locale', +# 'alias': 'devtools', +# 'locale': 'pl', +# 'platforms': null, +# 'path': 'chrome/pl/locale/pl/devtools/' +# }, +# { +# 'type': 'locale', +# 'alias': 'autoconfig', +# 'locale': 'pl', +# 'platforms': ['win', 'mac'], +# 'path': 'chrome/pl/locale/pl/autoconfig/' +# }, +# ] +### +def parse_chrome_manifest(path, base_path, chrome_entries): + for entry in parse_manifest(None, path): + if isinstance(entry, Manifest): + parse_chrome_manifest( + os.path.join(os.path.dirname(path), entry.relpath), + base_path, + chrome_entries, + ) + elif isinstance(entry, ManifestLocale): + entry_path = os.path.join( + os.path.relpath(os.path.dirname(path), base_path), entry.relpath + ) + chrome_entries.append( + { + "type": "locale", + "alias": entry.name, + "locale": entry.id, + "platforms": convert_entry_flags_to_platform_codes(entry.flags), + "path": mozpath.normsep(entry_path), + } + ) + else: + raise Exception("Unknown type {0}".format(entry.name)) + + +### +# Gets the version to use in the langpack. +# +# This uses the env variable MOZ_BUILD_DATE if it exists to expand the version +# to be unique in automation. +# +# Args: +# app_version - Application version +# +# Returns: +# str - Version to use +# +### +def get_version_maybe_buildid(app_version): + def _extract_numeric_part(part): + matches = re.compile("[^\d]").search(part) + if matches: + part = part[0 : matches.start()] + if len(part) == 0: + return "0" + return part + + parts = [_extract_numeric_part(part) for part in app_version.split(".")] + + buildid = os.environ.get("MOZ_BUILD_DATE") + if buildid and len(buildid) != 14: + print("Ignoring invalid MOZ_BUILD_DATE: %s" % buildid, file=sys.stderr) + buildid = None + + if buildid: + # Use simple versioning format, see: Bug 1793925 - The version string + # should start with: . + version = ".".join(parts[0:2]) + # We then break the buildid into two version parts so that the full + # version looks like: ..YYYYMMDD.HHmmss + date, time = buildid[:8], buildid[8:] + # Leading zeros are not allowed. + time = time.lstrip("0") + if len(time) == 0: + time = "0" + version = f"{version}.{date}.{time}" + else: + version = ".".join(parts) + + return version + + +### +# Generates a new web manifest dict with values specific for a language pack. +# +# Args: +# locstr (str) - A string with a comma separated list of locales +# for which resources are embedded in the +# language pack +# min_app_ver (str) - A minimum version of the application the language +# resources are for +# max_app_ver (str) - A maximum version of the application the language +# resources are for +# app_name (str) - The name of the application the language +# resources are for +# ftl (dict) - A dictionary of locale-specific strings +# chrome_entries (dict) - A dictionary of chrome registry entries +# +# Returns: +# (dict) - a web manifest +# +# Example: +# manifest = create_webmanifest( +# 'pl', +# '57.0', +# '57.0.*', +# 'Firefox', +# '/var/vcs/l10n-central', +# {'langpack-title': 'Polski'}, +# chrome_entries +# ) +# manifest == { +# 'languages': { +# 'pl': { +# 'version': '201709121481', +# 'chrome_resources': { +# 'alert': 'chrome/pl/locale/pl/alert/', +# 'branding': 'browser/chrome/pl/locale/global/', +# 'global-platform': { +# 'macosx': 'chrome/pl/locale/pl/global-platform/mac/', +# 'win': 'chrome/pl/locale/pl/global-platform/win/', +# 'linux': 'chrome/pl/locale/pl/global-platform/unix/', +# 'android': 'chrome/pl/locale/pl/global-platform/unix/', +# }, +# 'forms': 'browser/chrome/pl/locale/forms/', +# ... +# } +# } +# }, +# 'sources': { +# 'browser': { +# 'base_path': 'browser/' +# } +# }, +# 'browser_specific_settings': { +# 'gecko': { +# 'strict_min_version': '57.0', +# 'strict_max_version': '57.0.*', +# 'id': 'langpack-pl@mozilla.org', +# } +# }, +# 'version': '57.0', +# 'name': 'Polski Language Pack', +# ... +# } +### +def create_webmanifest( + locstr, + version, + min_app_ver, + max_app_ver, + app_name, + l10n_basedir, + langpack_eid, + ftl, + chrome_entries, +): + locales = list(map(lambda loc: loc.strip(), locstr.split(","))) + main_locale = locales[0] + title, description = get_title_and_description(app_name, main_locale) + author = get_author(ftl) + + manifest = { + "langpack_id": main_locale, + "manifest_version": 2, + "browser_specific_settings": { + "gecko": { + "id": langpack_eid, + "strict_min_version": min_app_ver, + "strict_max_version": max_app_ver, + } + }, + "name": title, + "description": description, + "version": get_version_maybe_buildid(version), + "languages": {}, + "sources": {"browser": {"base_path": "browser/"}}, + "author": author, + } + + cr = {} + for entry in chrome_entries: + if entry["type"] == "locale": + platforms = entry["platforms"] + if platforms: + if entry["alias"] not in cr: + cr[entry["alias"]] = {} + for platform in platforms: + cr[entry["alias"]][platform] = entry["path"] + else: + assert entry["alias"] not in cr + cr[entry["alias"]] = entry["path"] + else: + raise Exception("Unknown type {0}".format(entry["type"])) + + for loc in locales: + manifest["languages"][loc] = { + "version": get_timestamp_for_locale(os.path.join(l10n_basedir, loc)), + "chrome_resources": cr, + } + + return json.dumps(manifest, indent=2, ensure_ascii=False) + + +def main(args): + parser = argparse.ArgumentParser() + parser.add_argument( + "--locales", help="List of language codes provided by the langpack" + ) + parser.add_argument("--app-version", help="Version of the application") + parser.add_argument( + "--max-app-ver", help="Max version of the application the langpack is for" + ) + parser.add_argument( + "--app-name", help="Name of the application the langpack is for" + ) + parser.add_argument( + "--l10n-basedir", help="Base directory for locales used in the language pack" + ) + parser.add_argument( + "--langpack-eid", help="Language pack id to use for this locale" + ) + parser.add_argument( + "--metadata", + help="FTL file defining langpack metadata", + ) + parser.add_argument("--input", help="Langpack directory.") + + args = parser.parse_args(args) + + chrome_entries = [] + parse_chrome_manifest( + os.path.join(args.input, "chrome.manifest"), args.input, chrome_entries + ) + + ftl = parse_flat_ftl(args.metadata) + + # Mangle the app version to set min version (remove patch level) + min_app_version = args.app_version + if "a" not in min_app_version: # Don't mangle alpha versions + v = Version(min_app_version) + if args.app_name == "SeaMonkey": + # SeaMonkey is odd in that hasn't changed for many years. + # So min is ..0 + min_app_version = "{}.{}.0".format(v.major, v.minor) + else: + # Language packs should be minversion of {major}.0 + min_app_version = "{}.0".format(v.major) + + res = create_webmanifest( + args.locales, + args.app_version, + min_app_version, + args.max_app_ver, + args.app_name, + args.l10n_basedir, + args.langpack_eid, + ftl, + chrome_entries, + ) + write_file(os.path.join(args.input, "manifest.json"), res) + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/python/mozbuild/mozbuild/action/make_dmg.py b/python/mozbuild/mozbuild/action/make_dmg.py new file mode 100644 index 0000000000..6dc19450fb --- /dev/null +++ b/python/mozbuild/mozbuild/action/make_dmg.py @@ -0,0 +1,67 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import platform +import sys +from pathlib import Path + +from mozpack import dmg + +from mozbuild.bootstrap import bootstrap_toolchain +from mozbuild.repackaging.application_ini import get_application_ini_value + +is_linux = platform.system() == "Linux" + + +def main(args): + parser = argparse.ArgumentParser( + description="Explode a DMG into its relevant files" + ) + + parser.add_argument("--dsstore", help="DSStore file from") + parser.add_argument("--background", help="Background file from") + parser.add_argument("--icon", help="Icon file from") + parser.add_argument("--volume-name", help="Disk image volume name") + + parser.add_argument("inpath", metavar="PATH_IN", help="Location of files to pack") + parser.add_argument("dmgfile", metavar="DMG_OUT", help="DMG File to create") + + options = parser.parse_args(args) + + extra_files = [] + if options.dsstore: + extra_files.append((options.dsstore, ".DS_Store")) + if options.background: + extra_files.append((options.background, ".background/background.png")) + if options.icon: + extra_files.append((options.icon, ".VolumeIcon.icns")) + + if options.volume_name: + volume_name = options.volume_name + else: + volume_name = get_application_ini_value( + options.inpath, "App", "CodeName", fallback="Name" + ) + + # Resolve required tools + dmg_tool = bootstrap_toolchain("dmg/dmg") + hfs_tool = bootstrap_toolchain("dmg/hfsplus") + mkfshfs_tool = bootstrap_toolchain("hfsplus/newfs_hfs") + + dmg.create_dmg( + source_directory=Path(options.inpath), + output_dmg=Path(options.dmgfile), + volume_name=volume_name, + extra_files=extra_files, + dmg_tool=dmg_tool, + hfs_tool=hfs_tool, + mkfshfs_tool=mkfshfs_tool, + ) + + return 0 + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/action/make_unzip.py b/python/mozbuild/mozbuild/action/make_unzip.py new file mode 100644 index 0000000000..e4d2902f53 --- /dev/null +++ b/python/mozbuild/mozbuild/action/make_unzip.py @@ -0,0 +1,25 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import subprocess +import sys + +import buildconfig + + +def make_unzip(package): + subprocess.check_call([buildconfig.substs["UNZIP"], package]) + + +def main(args): + if len(args) != 1: + print("Usage: make_unzip.py ", file=sys.stderr) + return 1 + else: + make_unzip(args[0]) + return 0 + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/action/node.py b/python/mozbuild/mozbuild/action/node.py new file mode 100644 index 0000000000..fca0745b80 --- /dev/null +++ b/python/mozbuild/mozbuild/action/node.py @@ -0,0 +1,137 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import pipes +import subprocess +import sys + +import buildconfig +import six + +SCRIPT_ALLOWLIST = [buildconfig.topsrcdir + "/devtools/client/shared/build/build.js"] + +ALLOWLIST_ERROR = """ +%s is not +in SCRIPT_ALLOWLIST in python/mozbuild/mozbuild/action/node.py. +Using NodeJS from moz.build is currently in beta, and node +scripts to be executed need to be added to the allowlist and +reviewed by a build peer so that we can get a better sense of +how support should evolve. (To consult a build peer, raise a +question in the #build channel at https://chat.mozilla.org.) +""" + + +def is_script_in_allowlist(script_path): + if script_path in SCRIPT_ALLOWLIST: + return True + + return False + + +def execute_node_cmd(node_cmd_list): + """Execute the given node command list. + + Arguments: + node_cmd_list -- a list of the command and arguments to be executed + + Returns: + The set of dependencies which should trigger this command to be re-run. + This is ultimately returned to the build system for use by the backend + to ensure that incremental rebuilds happen when any dependency changes. + + The node script is expected to output lines for all of the dependencies + to stdout, each prefixed by the string "dep:". These lines will make up + the returned set of dependencies. Any line not so-prefixed will simply be + printed to stderr instead. + """ + + try: + printable_cmd = " ".join(pipes.quote(arg) for arg in node_cmd_list) + print('Executing "{}"'.format(printable_cmd), file=sys.stderr) + sys.stderr.flush() + + # We need to redirect stderr to a pipe because + # https://github.com/nodejs/node/issues/14752 causes issues with make. + proc = subprocess.Popen( + node_cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + + stdout, stderr = proc.communicate() + retcode = proc.wait() + + if retcode != 0: + print(stderr, file=sys.stderr) + sys.stderr.flush() + sys.exit(retcode) + + # Process the node script output + # + # XXX Starting with an empty list means that node scripts can + # (intentionally or inadvertently) remove deps. Do we want this? + deps = [] + for line in stdout.splitlines(): + line = six.ensure_text(line) + if "dep:" in line: + deps.append(line.replace("dep:", "")) + else: + print(line, file=sys.stderr) + sys.stderr.flush() + + return set(deps) + + except subprocess.CalledProcessError as err: + # XXX On Mac (and elsewhere?) "OSError: [Errno 13] Permission denied" + # (at least sometimes) means "node executable not found". Can we + # disambiguate this from real "Permission denied" errors so that we + # can log such problems more clearly? + print( + """Failed with %s. Be sure to check that your mozconfig doesn't + have --disable-nodejs in it. If it does, try removing that line and + building again.""" + % str(err), + file=sys.stderr, + ) + sys.exit(1) + + +def generate(output, node_script, *files): + """Call the given node_script to transform the given modules. + + Arguments: + output -- a dummy file, used by the build system. Can be ignored. + node_script -- the script to be executed. Must be in the SCRIPT_ALLOWLIST + files -- files to be transformed, will be passed to the script as arguments + + Returns: + The set of dependencies which should trigger this command to be re-run. + This is ultimately returned to the build system for use by the backend + to ensure that incremental rebuilds happen when any dependency changes. + """ + + node_interpreter = buildconfig.substs.get("NODEJS") + if not node_interpreter: + print( + """NODEJS not set. Be sure to check that your mozconfig doesn't + have --disable-nodejs in it. If it does, try removing that line + and building again.""", + file=sys.stderr, + ) + sys.exit(1) + + node_script = six.ensure_text(node_script) + if not isinstance(node_script, six.text_type): + print( + "moz.build file didn't pass a valid node script name to execute", + file=sys.stderr, + ) + sys.exit(1) + + if not is_script_in_allowlist(node_script): + print(ALLOWLIST_ERROR % (node_script), file=sys.stderr) + sys.exit(1) + + node_cmd_list = [node_interpreter, node_script] + node_cmd_list.extend(files) + + return execute_node_cmd(node_cmd_list) diff --git a/python/mozbuild/mozbuild/action/package_generated_sources.py b/python/mozbuild/mozbuild/action/package_generated_sources.py new file mode 100644 index 0000000000..d87a75fc6f --- /dev/null +++ b/python/mozbuild/mozbuild/action/package_generated_sources.py @@ -0,0 +1,42 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import sys + +import buildconfig +import mozpack.path as mozpath +from mozpack.archive import create_tar_gz_from_files +from mozpack.files import BaseFile + +from mozbuild.generated_sources import get_generated_sources + + +def main(argv): + parser = argparse.ArgumentParser(description="Produce archive of generated sources") + parser.add_argument("outputfile", help="File to write output to") + args = parser.parse_args(argv) + + objdir_abspath = mozpath.abspath(buildconfig.topobjdir) + + def is_valid_entry(entry): + if isinstance(entry[1], BaseFile): + entry_abspath = mozpath.abspath(entry[1].path) + else: + entry_abspath = mozpath.abspath(entry[1]) + if not entry_abspath.startswith(objdir_abspath): + print( + "Warning: omitting generated source [%s] from archive" % entry_abspath, + file=sys.stderr, + ) + return False + return True + + files = dict(filter(is_valid_entry, get_generated_sources())) + with open(args.outputfile, "wb") as fh: + create_tar_gz_from_files(fh, files, compresslevel=5) + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/action/preprocessor.py b/python/mozbuild/mozbuild/action/preprocessor.py new file mode 100644 index 0000000000..c59a05a90b --- /dev/null +++ b/python/mozbuild/mozbuild/action/preprocessor.py @@ -0,0 +1,24 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import sys + +from mozbuild.action.util import log_build_task +from mozbuild.preprocessor import Preprocessor + + +def generate(output, *args): + pp = Preprocessor() + pp.out = output + pp.handleCommandLine(list(args), True) + return set(pp.includes) + + +def main(args): + pp = Preprocessor() + pp.handleCommandLine(args, True) + + +if __name__ == "__main__": + log_build_task(main, sys.argv[1:]) diff --git a/python/mozbuild/mozbuild/action/process_define_files.py b/python/mozbuild/mozbuild/action/process_define_files.py new file mode 100644 index 0000000000..d775b52b57 --- /dev/null +++ b/python/mozbuild/mozbuild/action/process_define_files.py @@ -0,0 +1,115 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import os +import re +import sys + +import mozpack.path as mozpath +from buildconfig import topobjdir, topsrcdir + +from mozbuild.backend.configenvironment import PartialConfigEnvironment + + +def process_define_file(output, input): + """Creates the given config header. A config header is generated by + taking the corresponding source file and replacing some *#define/#undef* + occurences: + + - "#undef NAME" is turned into "#define NAME VALUE" + - "#define NAME" is unchanged + - "#define NAME ORIGINAL_VALUE" is turned into "#define NAME VALUE" + - "#undef UNKNOWN_NAME" is turned into "/* #undef UNKNOWN_NAME */" + - Whitespaces are preserved. + + As a special rule, "#undef ALLDEFINES" is turned into "#define NAME + VALUE" for all the defined variables. + """ + + path = os.path.abspath(input) + + config = PartialConfigEnvironment(topobjdir) + + if mozpath.basedir( + path, [mozpath.join(topsrcdir, "js/src")] + ) and not config.substs.get("JS_STANDALONE"): + config = PartialConfigEnvironment(mozpath.join(topobjdir, "js", "src")) + + with open(path, "r") as input: + r = re.compile( + "^\s*#\s*(?P[a-z]+)(?:\s+(?P\S+)(?:\s+(?P\S+))?)?", re.U + ) + for l in input: + m = r.match(l) + if m: + cmd = m.group("cmd") + name = m.group("name") + value = m.group("value") + if name: + if name == "ALLDEFINES": + if cmd == "define": + raise Exception( + "`#define ALLDEFINES` is not allowed in a " + "CONFIGURE_DEFINE_FILE" + ) + + def define_for_name(name, val): + """WebRTC files like to define WINVER and _WIN32_WINNT + via the command line, which raises a mass of macro + redefinition warnings. Just handle those macros + specially here.""" + define = "#define {name} {val}".format(name=name, val=val) + if name in ("_WIN32_IE", "_WIN32_WINNT", "WIN32", "WINVER"): + return "#if !defined({name})\n{define}\n#endif".format( + name=name, define=define + ) + return define + + defines = "\n".join( + sorted( + define_for_name(name, val) + for name, val in config.defines["ALLDEFINES"].items() + ) + ) + l = l[: m.start("cmd") - 1] + defines + l[m.end("name") :] + elif cmd == "define": + if value and name in config.defines: + l = ( + l[: m.start("value")] + + str(config.defines[name]) + + l[m.end("value") :] + ) + elif cmd == "undef": + if name in config.defines: + l = ( + l[: m.start("cmd")] + + "define" + + l[m.end("cmd") : m.end("name")] + + " " + + str(config.defines[name]) + + l[m.end("name") :] + ) + else: + l = "/* " + l[: m.end("name")] + " */" + l[m.end("name") :] + + output.write(l) + + deps = {path} + deps.update(config.get_dependencies()) + return deps + + +def main(argv): + parser = argparse.ArgumentParser(description="Process define files.") + + parser.add_argument("input", help="Input define file.") + + args = parser.parse_args(argv) + + return process_define_file(sys.stdout, args.input) + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/action/process_install_manifest.py b/python/mozbuild/mozbuild/action/process_install_manifest.py new file mode 100644 index 0000000000..faf1376dba --- /dev/null +++ b/python/mozbuild/mozbuild/action/process_install_manifest.py @@ -0,0 +1,125 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import os +import sys +import time + +from mozpack.copier import FileCopier, FileRegistry +from mozpack.errors import errors +from mozpack.files import BaseFile, FileFinder +from mozpack.manifests import InstallManifest + +from mozbuild.action.util import log_build_task +from mozbuild.util import DefinesAction + +COMPLETE = ( + "Elapsed: {elapsed:.2f}s; From {dest}: Kept {existing} existing; " + "Added/updated {updated}; " + "Removed {rm_files} files and {rm_dirs} directories." +) + + +def process_manifest(destdir, paths, track, no_symlinks=False, defines={}): + + if os.path.exists(track): + # We use the same format as install manifests for the tracking + # data. + manifest = InstallManifest(path=track) + remove_unaccounted = FileRegistry() + dummy_file = BaseFile() + + finder = FileFinder(destdir, find_dotfiles=True) + for dest in manifest._dests: + for p, f in finder.find(dest): + remove_unaccounted.add(p, dummy_file) + + remove_empty_directories = True + remove_all_directory_symlinks = True + + else: + # If tracking is enabled and there is no file, we don't want to + # be removing anything. + remove_unaccounted = False + remove_empty_directories = False + remove_all_directory_symlinks = False + + manifest = InstallManifest() + for path in paths: + manifest |= InstallManifest(path=path) + + copier = FileCopier() + link_policy = "copy" if no_symlinks else "symlink" + manifest.populate_registry( + copier, defines_override=defines, link_policy=link_policy + ) + with errors.accumulate(): + result = copier.copy( + destdir, + remove_unaccounted=remove_unaccounted, + remove_all_directory_symlinks=remove_all_directory_symlinks, + remove_empty_directories=remove_empty_directories, + ) + + if track: + # We should record files that we actually copied. + # It is too late to expand wildcards when the track file is read. + manifest.write(path=track, expand_pattern=True) + + return result + + +def main(argv): + parser = argparse.ArgumentParser(description="Process install manifest files.") + + parser.add_argument("destdir", help="Destination directory.") + parser.add_argument("manifests", nargs="+", help="Path to manifest file(s).") + parser.add_argument( + "--no-symlinks", + action="store_true", + help="Do not install symbolic links. Always copy files", + ) + parser.add_argument( + "--track", + metavar="PATH", + required=True, + help="Use installed files tracking information from the given path.", + ) + parser.add_argument( + "-D", + action=DefinesAction, + dest="defines", + metavar="VAR[=VAL]", + help="Define a variable to override what is specified in the manifest", + ) + + args = parser.parse_args(argv) + + start = time.monotonic() + + result = process_manifest( + args.destdir, + args.manifests, + track=args.track, + no_symlinks=args.no_symlinks, + defines=args.defines, + ) + + elapsed = time.monotonic() - start + + print( + COMPLETE.format( + elapsed=elapsed, + dest=args.destdir, + existing=result.existing_files_count, + updated=result.updated_files_count, + rm_files=result.removed_files_count, + rm_dirs=result.removed_directories_count, + ) + ) + + +if __name__ == "__main__": + log_build_task(main, sys.argv[1:]) diff --git a/python/mozbuild/mozbuild/action/symbols_archive.py b/python/mozbuild/mozbuild/action/symbols_archive.py new file mode 100644 index 0000000000..75ecb71d17 --- /dev/null +++ b/python/mozbuild/mozbuild/action/symbols_archive.py @@ -0,0 +1,89 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import os +import sys + +import mozpack.path as mozpath +from mozpack.files import FileFinder + + +def make_archive(archive_name, base, exclude, include): + compress = ["**/*.sym"] + finder = FileFinder(base, ignore=exclude) + if not include: + include = ["*"] + archive_basename = os.path.basename(archive_name) + + def fill_archive(add_file): + for pat in include: + for p, f in finder.find(pat): + print(' Adding to "%s":\n\t"%s"' % (archive_basename, p)) + add_file(p, f) + + with open(archive_name, "wb") as fh: + if archive_basename.endswith(".zip"): + from mozpack.mozjar import JarWriter + + with JarWriter(fileobj=fh, compress_level=5) as writer: + + def add_file(p, f): + should_compress = any(mozpath.match(p, pat) for pat in compress) + writer.add( + p.encode("utf-8"), + f, + mode=f.mode, + compress=should_compress, + skip_duplicates=True, + ) + + fill_archive(add_file) + elif archive_basename.endswith(".tar.zst"): + import tarfile + + import zstandard + + ctx = zstandard.ZstdCompressor(threads=-1) + with ctx.stream_writer(fh) as zstdwriter: + with tarfile.open( + mode="w|", fileobj=zstdwriter, bufsize=1024 * 1024 + ) as tar: + + def add_file(p, f): + info = tar.gettarinfo(os.path.join(base, p), p) + tar.addfile(info, f.open()) + + fill_archive(add_file) + else: + raise Exception( + "Unsupported archive format for {}".format(archive_basename) + ) + + +def main(argv): + parser = argparse.ArgumentParser(description="Produce a symbols archive") + parser.add_argument("archive", help="Which archive to generate") + parser.add_argument("base", help="Base directory to package") + parser.add_argument( + "--full-archive", action="store_true", help="Generate a full symbol archive" + ) + + args = parser.parse_args(argv) + + excludes = [] + includes = [] + + if args.full_archive: + # We allow symbols for tests to be included when building on try + if os.environ.get("MH_BRANCH", "unknown") != "try": + excludes = ["*test*", "*Test*"] + else: + includes = ["**/*.sym"] + + make_archive(args.archive, args.base, excludes, includes) + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/python/mozbuild/mozbuild/action/test_archive.py b/python/mozbuild/mozbuild/action/test_archive.py new file mode 100644 index 0000000000..06fef60f8d --- /dev/null +++ b/python/mozbuild/mozbuild/action/test_archive.py @@ -0,0 +1,875 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# This action is used to produce test archives. +# +# Ideally, the data in this file should be defined in moz.build files. +# It is defined inline because this was easiest to make test archive +# generation faster. + +import argparse +import itertools +import os +import sys +import time + +import buildconfig +import mozpack.path as mozpath +from manifestparser import TestManifest +from mozpack.archive import create_tar_gz_from_files +from mozpack.copier import FileRegistry +from mozpack.files import ExistingFile, FileFinder +from mozpack.manifests import InstallManifest +from mozpack.mozjar import JarWriter +from reftest import ReftestManifest + +from mozbuild.util import ensureParentDir + +STAGE = mozpath.join(buildconfig.topobjdir, "dist", "test-stage") + +TEST_HARNESS_BINS = [ + "BadCertAndPinningServer", + "DelegatedCredentialsServer", + "EncryptedClientHelloServer", + "FaultyServer", + "GenerateOCSPResponse", + "OCSPStaplingServer", + "SanctionsTestServer", + "SmokeDMD", + "certutil", + "crashinject", + "geckodriver", + "http3server", + "minidumpwriter", + "pk12util", + "screenshot", + "screentopng", + "ssltunnel", + "xpcshell", +] + +TEST_HARNESS_DLLS = ["crashinjectdll", "mozglue"] + +GMP_TEST_PLUGIN_DIRS = ["gmp-fake/**", "gmp-fakeopenh264/**"] + +# These entries will be used by artifact builds to re-construct an +# objdir with the appropriate generated support files. +OBJDIR_TEST_FILES = { + "xpcshell": { + "source": buildconfig.topobjdir, + "base": "_tests/xpcshell", + "pattern": "**", + "dest": "xpcshell/tests", + }, + "mochitest": { + "source": buildconfig.topobjdir, + "base": "_tests/testing", + "pattern": "mochitest/**", + }, +} + + +ARCHIVE_FILES = { + "common": [ + { + "source": STAGE, + "base": "", + "pattern": "**", + "ignore": [ + "cppunittest/**", + "condprof/**", + "gtest/**", + "mochitest/**", + "reftest/**", + "talos/**", + "raptor/**", + "awsy/**", + "web-platform/**", + "xpcshell/**", + "updater-dep/**", + "jsreftest/**", + "jit-test/**", + "jittest/**", # To make the ignore checker happy + "perftests/**", + "fuzztest/**", + ], + }, + {"source": buildconfig.topobjdir, "base": "_tests", "pattern": "modules/**"}, + { + "source": buildconfig.topsrcdir, + "base": "testing/marionette", + "patterns": ["client/**", "harness/**", "mach_test_package_commands.py"], + "dest": "marionette", + "ignore": ["client/docs", "harness/marionette_harness/tests"], + }, + { + "source": buildconfig.topsrcdir, + "base": "", + "manifests": [ + "testing/marionette/harness/marionette_harness/tests/unit-tests.ini" + ], + # We also need the manifests and harness_unit tests + "pattern": "testing/marionette/harness/marionette_harness/tests/**", + "dest": "marionette/tests", + }, + {"source": buildconfig.topobjdir, "base": "_tests", "pattern": "mozbase/**"}, + { + "source": buildconfig.topsrcdir, + "base": "testing", + "pattern": "firefox-ui/**", + "ignore": ["firefox-ui/tests"], + }, + { + "source": buildconfig.topsrcdir, + "base": "", + "pattern": "testing/firefox-ui/tests", + "dest": "firefox-ui/tests", + }, + { + "source": buildconfig.topsrcdir, + "base": "toolkit/components/telemetry/tests/marionette", + "pattern": "/**", + "dest": "telemetry/marionette", + }, + {"source": buildconfig.topsrcdir, "base": "testing", "pattern": "tps/**"}, + { + "source": buildconfig.topsrcdir, + "base": "services/sync/", + "pattern": "tps/**", + }, + { + "source": buildconfig.topsrcdir, + "base": "services/sync/tests/tps", + "pattern": "**", + "dest": "tps/tests", + }, + { + "source": buildconfig.topsrcdir, + "base": "testing/web-platform/tests/tools/wptserve", + "pattern": "**", + "dest": "tools/wptserve", + }, + { + "source": buildconfig.topsrcdir, + "base": "testing/web-platform/tests/tools/third_party", + "pattern": "**", + "dest": "tools/wpt_third_party", + }, + { + "source": buildconfig.topsrcdir, + "base": "python/mozterm", + "pattern": "**", + "dest": "tools/mozterm", + }, + { + "source": buildconfig.topsrcdir, + "base": "xpcom/geckoprocesstypes_generator", + "pattern": "**", + "dest": "tools/geckoprocesstypes_generator", + }, + { + "source": buildconfig.topsrcdir, + "base": "third_party/python/six", + "pattern": "**", + "dest": "tools/six", + }, + { + "source": buildconfig.topsrcdir, + "base": "third_party/python/distro", + "pattern": "**", + "dest": "tools/distro", + }, + {"source": buildconfig.topobjdir, "base": "", "pattern": "mozinfo.json"}, + { + "source": buildconfig.topobjdir, + "base": "dist/bin", + "patterns": [ + "%s%s" % (f, buildconfig.substs["BIN_SUFFIX"]) + for f in TEST_HARNESS_BINS + ] + + [ + "%s%s%s" + % ( + buildconfig.substs["DLL_PREFIX"], + f, + buildconfig.substs["DLL_SUFFIX"], + ) + for f in TEST_HARNESS_DLLS + ], + "dest": "bin", + }, + { + "source": buildconfig.topobjdir, + "base": "dist/bin", + "patterns": GMP_TEST_PLUGIN_DIRS, + "dest": "bin/plugins", + }, + { + "source": buildconfig.topobjdir, + "base": "dist/bin", + "patterns": ["dmd.py", "fix_stacks.py"], + "dest": "bin", + }, + { + "source": buildconfig.topobjdir, + "base": "dist/bin/components", + "patterns": ["httpd.js"], + "dest": "bin/components", + }, + { + "source": buildconfig.topsrcdir, + "base": "build/pgo/certs", + "pattern": "**", + "dest": "certs", + }, + ], + "cppunittest": [ + {"source": STAGE, "base": "", "pattern": "cppunittest/**"}, + # We don't ship these files if startup cache is disabled, which is + # rare. But it shouldn't matter for test archives. + { + "source": buildconfig.topsrcdir, + "base": "startupcache/test", + "pattern": "TestStartupCacheTelemetry.*", + "dest": "cppunittest", + }, + { + "source": buildconfig.topsrcdir, + "base": "testing", + "pattern": "runcppunittests.py", + "dest": "cppunittest", + }, + { + "source": buildconfig.topsrcdir, + "base": "testing", + "pattern": "remotecppunittests.py", + "dest": "cppunittest", + }, + { + "source": buildconfig.topsrcdir, + "base": "testing", + "pattern": "cppunittest.ini", + "dest": "cppunittest", + }, + { + "source": buildconfig.topobjdir, + "base": "", + "pattern": "mozinfo.json", + "dest": "cppunittest", + }, + ], + "gtest": [{"source": STAGE, "base": "", "pattern": "gtest/**"}], + "mochitest": [ + OBJDIR_TEST_FILES["mochitest"], + { + "source": buildconfig.topobjdir, + "base": "_tests/testing", + "pattern": "mochitest/**", + }, + {"source": STAGE, "base": "", "pattern": "mochitest/**"}, + { + "source": buildconfig.topobjdir, + "base": "", + "pattern": "mozinfo.json", + "dest": "mochitest", + }, + { + "source": buildconfig.topobjdir, + "base": "dist/xpi-stage", + "pattern": "mochijar/**", + "dest": "mochitest", + }, + { + "source": buildconfig.topobjdir, + "base": "dist/xpi-stage", + "pattern": "specialpowers/**", + "dest": "mochitest/extensions", + }, + ], + "mozharness": [ + { + "source": buildconfig.topsrcdir, + "base": "testing/mozharness", + "pattern": "**", + }, + { + "source": buildconfig.topsrcdir, + "base": "", + "pattern": "third_party/python/_venv/**", + }, + { + "source": buildconfig.topsrcdir, + "base": "testing/mozbase/manifestparser", + "pattern": "manifestparser/**", + }, + { + "source": buildconfig.topsrcdir, + "base": "testing/mozbase/mozfile", + "pattern": "mozfile/**", + }, + { + "source": buildconfig.topsrcdir, + "base": "testing/mozbase/mozinfo", + "pattern": "mozinfo/**", + }, + { + "source": buildconfig.topsrcdir, + "base": "testing/mozbase/mozprocess", + "pattern": "mozprocess/**", + }, + { + "source": buildconfig.topsrcdir, + "base": "third_party/python/six", + "pattern": "six.py", + }, + { + "source": buildconfig.topsrcdir, + "base": "third_party/python/distro", + "pattern": "distro.py", + }, + { + "source": buildconfig.topsrcdir, + "base": "third_party/python/packaging", + "pattern": "**", + }, + { + "source": buildconfig.topsrcdir, + "base": "python/mozbuild/mozbuild/action", + "pattern": "tooltool.py", + "dest": "external_tools", + }, + ], + "reftest": [ + {"source": buildconfig.topobjdir, "base": "_tests", "pattern": "reftest/**"}, + { + "source": buildconfig.topobjdir, + "base": "", + "pattern": "mozinfo.json", + "dest": "reftest", + }, + { + "source": buildconfig.topsrcdir, + "base": "", + "manifests": [ + "layout/reftests/reftest.list", + "layout/reftests/reftest-qr.list", + "testing/crashtest/crashtests.list", + ], + "dest": "reftest/tests", + }, + { + "source": buildconfig.topobjdir, + "base": "dist/xpi-stage", + "pattern": "reftest/**", + "dest": "reftest", + }, + { + "source": buildconfig.topobjdir, + "base": "dist/xpi-stage", + "pattern": "specialpowers/**", + "dest": "reftest", + }, + ], + "talos": [ + {"source": buildconfig.topsrcdir, "base": "testing", "pattern": "talos/**"}, + { + "source": buildconfig.topsrcdir, + "base": "testing/profiles", + "pattern": "**", + "dest": "talos/talos/profile_data", + }, + { + "source": buildconfig.topsrcdir, + "base": "third_party/webkit/PerformanceTests", + "pattern": "**", + "dest": "talos/talos/tests/webkit/PerformanceTests/", + }, + ], + "perftests": [ + {"source": buildconfig.topsrcdir, "pattern": "testing/mozbase/**"}, + {"source": buildconfig.topsrcdir, "pattern": "testing/condprofile/**"}, + {"source": buildconfig.topsrcdir, "pattern": "testing/performance/**"}, + {"source": buildconfig.topsrcdir, "pattern": "third_party/python/**"}, + {"source": buildconfig.topsrcdir, "pattern": "tools/lint/eslint/**"}, + {"source": buildconfig.topsrcdir, "pattern": "**/perftest_*.js"}, + {"source": buildconfig.topsrcdir, "pattern": "**/hooks_*py"}, + {"source": buildconfig.topsrcdir, "pattern": "build/autoconf/**"}, + {"source": buildconfig.topsrcdir, "pattern": "build/moz.configure/**"}, + {"source": buildconfig.topsrcdir, "pattern": "python/**"}, + {"source": buildconfig.topsrcdir, "pattern": "build/mach_initialize.py"}, + { + "source": buildconfig.topsrcdir, + "pattern": "python/sites/build.txt", + }, + { + "source": buildconfig.topsrcdir, + "pattern": "python/sites/common.txt", + }, + { + "source": buildconfig.topsrcdir, + "pattern": "python/sites/mach.txt", + }, + {"source": buildconfig.topsrcdir, "pattern": "mach/**"}, + { + "source": buildconfig.topsrcdir, + "pattern": "testing/web-platform/tests/tools/third_party/certifi/**", + }, + {"source": buildconfig.topsrcdir, "pattern": "testing/mozharness/**"}, + {"source": buildconfig.topsrcdir, "pattern": "browser/config/**"}, + { + "source": buildconfig.topobjdir, + "base": "_tests/modules", + "pattern": "**", + "dest": "bin/modules", + }, + { + "source": buildconfig.topobjdir, + "base": "dist/bin", + "patterns": [ + "browser/**", + "chrome/**", + "chrome.manifest", + "components/**", + "http3server", + "*.ini", + "localization/**", + "modules/**", + "update.locale", + "greprefs.js", + ], + "dest": "bin", + }, + { + "source": buildconfig.topsrcdir, + "base": "netwerk/test/http3serverDB", + "pattern": "**", + "dest": "netwerk/test/http3serverDB", + }, + ], + "condprof": [ + { + "source": buildconfig.topsrcdir, + "base": "testing", + "pattern": "condprofile/**", + }, + { + "source": buildconfig.topsrcdir, + "base": "testing/mozbase/mozfile", + "pattern": "**", + "dest": "condprofile/mozfile", + }, + { + "source": buildconfig.topsrcdir, + "base": "testing/mozbase/mozprofile", + "pattern": "**", + "dest": "condprofile/mozprofile", + }, + { + "source": buildconfig.topsrcdir, + "base": "testing/mozbase/mozdevice", + "pattern": "**", + "dest": "condprofile/mozdevice", + }, + { + "source": buildconfig.topsrcdir, + "base": "testing/mozbase/mozlog", + "pattern": "**", + "dest": "condprofile/mozlog", + }, + { + "source": buildconfig.topsrcdir, + "base": "third_party/python/virtualenv", + "pattern": "**", + "dest": "condprofile/virtualenv", + }, + ], + "raptor": [ + {"source": buildconfig.topsrcdir, "base": "testing", "pattern": "raptor/**"}, + { + "source": buildconfig.topsrcdir, + "base": "testing/profiles", + "pattern": "**", + "dest": "raptor/raptor/profile_data", + }, + { + "source": buildconfig.topsrcdir, + "base": "third_party/webkit/PerformanceTests", + "pattern": "**", + "dest": "raptor/raptor/tests/webkit/PerformanceTests/", + }, + ], + "awsy": [ + {"source": buildconfig.topsrcdir, "base": "testing", "pattern": "awsy/**"} + ], + "web-platform": [ + { + "source": buildconfig.topsrcdir, + "base": "testing", + "pattern": "web-platform/meta/**", + }, + { + "source": buildconfig.topsrcdir, + "base": "testing", + "pattern": "web-platform/mozilla/**", + }, + { + "source": buildconfig.topsrcdir, + "base": "testing", + "pattern": "web-platform/tests/**", + "ignore": ["web-platform/tests/tools/wpt_third_party"], + }, + { + "source": buildconfig.topobjdir, + "base": "_tests", + "pattern": "web-platform/**", + }, + { + "source": buildconfig.topobjdir, + "base": "", + "pattern": "mozinfo.json", + "dest": "web-platform", + }, + ], + "xpcshell": [ + OBJDIR_TEST_FILES["xpcshell"], + { + "source": buildconfig.topsrcdir, + "base": "testing/xpcshell", + "patterns": [ + "head.js", + "mach_test_package_commands.py", + "moz-http2/**", + "node-http2/**", + "node_ip/**", + "node-ws/**", + "dns-packet/**", + "remotexpcshelltests.py", + "runxpcshelltests.py", + "selftest.py", + "xpcshellcommandline.py", + ], + "dest": "xpcshell", + }, + {"source": STAGE, "base": "", "pattern": "xpcshell/**"}, + { + "source": buildconfig.topobjdir, + "base": "", + "pattern": "mozinfo.json", + "dest": "xpcshell", + }, + { + "source": buildconfig.topobjdir, + "base": "build", + "pattern": "automation.py", + "dest": "xpcshell", + }, + { + "source": buildconfig.topsrcdir, + "base": "testing/profiles", + "pattern": "**", + "dest": "xpcshell/profile_data", + }, + { + "source": buildconfig.topobjdir, + "base": "dist/bin", + "pattern": "http3server%s" % buildconfig.substs["BIN_SUFFIX"], + "dest": "xpcshell/http3server", + }, + { + "source": buildconfig.topsrcdir, + "base": "netwerk/test/http3serverDB", + "pattern": "**", + "dest": "xpcshell/http3server/http3serverDB", + }, + ], + "updater-dep": [ + { + "source": buildconfig.topobjdir, + "base": "_tests/updater-dep", + "pattern": "**", + "dest": "updater-dep", + }, + # Required by the updater on Linux + { + "source": buildconfig.topobjdir, + "base": "config/external/sqlite", + "pattern": "libmozsqlite3.so", + "dest": "updater-dep", + }, + ], + "jsreftest": [{"source": STAGE, "base": "", "pattern": "jsreftest/**"}], + "fuzztest": [ + {"source": buildconfig.topsrcdir, "pattern": "tools/fuzzing/smoke/**"} + ], + "jittest": [ + { + "source": buildconfig.topsrcdir, + "base": "js/src", + "pattern": "jit-test/**", + "dest": "jit-test", + }, + { + "source": buildconfig.topsrcdir, + "base": "js/src/tests", + "pattern": "non262/shell.js", + "dest": "jit-test/tests", + }, + { + "source": buildconfig.topsrcdir, + "base": "js/src/tests", + "pattern": "non262/Math/shell.js", + "dest": "jit-test/tests", + }, + { + "source": buildconfig.topsrcdir, + "base": "js/src/tests", + "pattern": "non262/reflect-parse/Match.js", + "dest": "jit-test/tests", + }, + { + "source": buildconfig.topsrcdir, + "base": "js/src/tests", + "pattern": "lib/**", + "dest": "jit-test/tests", + }, + { + "source": buildconfig.topsrcdir, + "base": "js/src", + "pattern": "jsapi.h", + "dest": "jit-test", + }, + ], +} + +if buildconfig.substs.get("MOZ_CODE_COVERAGE"): + ARCHIVE_FILES["common"].append( + { + "source": buildconfig.topsrcdir, + "base": "python/mozbuild/", + "patterns": ["mozpack/**", "mozbuild/codecoverage/**"], + } + ) + + +if buildconfig.substs.get("MOZ_ASAN") and buildconfig.substs.get("CLANG_CL"): + asan_dll = { + "source": buildconfig.topobjdir, + "base": "dist/bin", + "pattern": os.path.basename(buildconfig.substs["MOZ_CLANG_RT_ASAN_LIB_PATH"]), + "dest": "bin", + } + ARCHIVE_FILES["common"].append(asan_dll) + + +if buildconfig.substs.get("commtopsrcdir"): + commtopsrcdir = buildconfig.substs.get("commtopsrcdir") + mozharness_comm = { + "source": commtopsrcdir, + "base": "testing/mozharness", + "pattern": "**", + } + ARCHIVE_FILES["mozharness"].append(mozharness_comm) + marionette_comm = { + "source": commtopsrcdir, + "base": "", + "manifest": "testing/marionette/unit-tests.ini", + "dest": "marionette/tests/comm", + } + ARCHIVE_FILES["common"].append(marionette_comm) + thunderbirdinstance = { + "source": commtopsrcdir, + "base": "testing/marionette", + "pattern": "thunderbirdinstance.py", + "dest": "marionette/client/marionette_driver", + } + ARCHIVE_FILES["common"].append(thunderbirdinstance) + + +# "common" is our catch all archive and it ignores things from other archives. +# Verify nothing sneaks into ARCHIVE_FILES without a corresponding exclusion +# rule in the "common" archive. +for k, v in ARCHIVE_FILES.items(): + # Skip mozharness because it isn't staged. + if k in ("common", "mozharness"): + continue + + ignores = set( + itertools.chain(*(e.get("ignore", []) for e in ARCHIVE_FILES["common"])) + ) + + if not any(p.startswith("%s/" % k) for p in ignores): + raise Exception('"common" ignore list probably should contain %s' % k) + + +def find_generated_harness_files(): + # TEST_HARNESS_FILES end up in an install manifest at + # $topsrcdir/_build_manifests/install/_tests. + manifest = InstallManifest( + mozpath.join(buildconfig.topobjdir, "_build_manifests", "install", "_tests") + ) + registry = FileRegistry() + manifest.populate_registry(registry) + # Conveniently, the generated files we care about will already + # exist in the objdir, so we can identify relevant files if + # they're an `ExistingFile` instance. + return [ + mozpath.join("_tests", p) + for p in registry.paths() + if isinstance(registry[p], ExistingFile) + ] + + +def find_files(archive): + extra_entries = [] + generated_harness_files = find_generated_harness_files() + + if archive == "common": + # Construct entries ensuring all our generated harness files are + # packaged in the common tests archive. + packaged_paths = set() + for entry in OBJDIR_TEST_FILES.values(): + pat = mozpath.join(entry["base"], entry["pattern"]) + del entry["pattern"] + patterns = [] + for path in generated_harness_files: + if mozpath.match(path, pat): + patterns.append(path[len(entry["base"]) + 1 :]) + packaged_paths.add(path) + if patterns: + entry["patterns"] = patterns + extra_entries.append(entry) + entry = {"source": buildconfig.topobjdir, "base": "_tests", "patterns": []} + for path in set(generated_harness_files) - packaged_paths: + entry["patterns"].append(path[len("_tests") + 1 :]) + extra_entries.append(entry) + + for entry in ARCHIVE_FILES[archive] + extra_entries: + source = entry["source"] + dest = entry.get("dest") + base = entry.get("base", "") + + pattern = entry.get("pattern") + patterns = entry.get("patterns", []) + if pattern: + patterns.append(pattern) + + manifest = entry.get("manifest") + manifests = entry.get("manifests", []) + if manifest: + manifests.append(manifest) + if manifests: + dirs = find_manifest_dirs(os.path.join(source, base), manifests) + patterns.extend({"{}/**".format(d) for d in dirs}) + + ignore = list(entry.get("ignore", [])) + ignore.extend(["**/.flake8", "**/.mkdir.done", "**/*.pyc"]) + + if archive not in ("common", "updater-dep") and base.startswith("_tests"): + # We may have generated_harness_files to exclude from this entry. + for path in generated_harness_files: + if path.startswith(base): + ignore.append(path[len(base) + 1 :]) + + common_kwargs = {"find_dotfiles": True, "ignore": ignore} + + finder = FileFinder(os.path.join(source, base), **common_kwargs) + + for pattern in patterns: + for p, f in finder.find(pattern): + if dest: + p = mozpath.join(dest, p) + yield p, f + + +def find_manifest_dirs(topsrcdir, manifests): + """Routine to retrieve directories specified in a manifest, relative to topsrcdir. + + It does not recurse into manifests, as we currently have no need for that. + """ + dirs = set() + + for p in manifests: + p = os.path.join(topsrcdir, p) + + if p.endswith(".ini"): + test_manifest = TestManifest() + test_manifest.read(p) + dirs |= set([os.path.dirname(m) for m in test_manifest.manifests()]) + + elif p.endswith(".list"): + m = ReftestManifest() + m.load(p) + dirs |= m.dirs + + else: + raise Exception( + '"{}" is not a supported manifest format.'.format( + os.path.splitext(p)[1] + ) + ) + + dirs = {mozpath.normpath(d[len(topsrcdir) :]).lstrip("/") for d in dirs} + + # Filter out children captured by parent directories because duplicates + # will confuse things later on. + def parents(p): + while True: + p = mozpath.dirname(p) + if not p: + break + yield p + + seen = set() + for d in sorted(dirs, key=len): + if not any(p in seen for p in parents(d)): + seen.add(d) + + return sorted(seen) + + +def main(argv): + parser = argparse.ArgumentParser(description="Produce test archives") + parser.add_argument("archive", help="Which archive to generate") + parser.add_argument("outputfile", help="File to write output to") + + args = parser.parse_args(argv) + + out_file = args.outputfile + if not out_file.endswith((".tar.gz", ".zip")): + raise Exception("expected tar.gz or zip output file") + + file_count = 0 + t_start = time.monotonic() + ensureParentDir(out_file) + res = find_files(args.archive) + with open(out_file, "wb") as fh: + # Experimentation revealed that level 5 is significantly faster and has + # marginally larger sizes than higher values and is the sweet spot + # for optimal compression. Read the detailed commit message that + # introduced this for raw numbers. + if out_file.endswith(".tar.gz"): + files = dict(res) + create_tar_gz_from_files(fh, files, compresslevel=5) + file_count = len(files) + elif out_file.endswith(".zip"): + with JarWriter(fileobj=fh, compress_level=5) as writer: + for p, f in res: + writer.add( + p.encode("utf-8"), f.read(), mode=f.mode, skip_duplicates=True + ) + file_count += 1 + else: + raise Exception("unhandled file extension: %s" % out_file) + + duration = time.monotonic() - t_start + zip_size = os.path.getsize(args.outputfile) + basename = os.path.basename(args.outputfile) + print( + "Wrote %d files in %d bytes to %s in %.2fs" + % (file_count, zip_size, basename, duration) + ) + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/action/tooltool.py b/python/mozbuild/mozbuild/action/tooltool.py new file mode 100755 index 0000000000..002360cd65 --- /dev/null +++ b/python/mozbuild/mozbuild/action/tooltool.py @@ -0,0 +1,1714 @@ +#!/usr/bin/env python3 + +# tooltool is a lookaside cache implemented in Python +# Copyright (C) 2011 John H. Ford +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation version 2 +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA +# 02110-1301, USA. + +# A manifest file specifies files in that directory that are stored +# elsewhere. This file should only list files in the same directory +# in which the manifest file resides and it should be called +# 'manifest.tt' + +import base64 +import calendar +import hashlib +import hmac +import json +import logging +import math +import optparse +import os +import pprint +import re +import shutil +import ssl +import stat +import sys +import tarfile +import tempfile +import threading +import time +import zipfile +from contextlib import closing, contextmanager +from functools import wraps +from io import BytesIO, open +from random import random +from subprocess import PIPE, Popen + +if os.name == "nt": + import certifi + +__version__ = "1.4.0" + +# Allowed request header characters: +# !#$%&'()*+,-./:;<=>?@[]^_`{|}~ and space, a-z, A-Z, 0-9, \, " +REQUEST_HEADER_ATTRIBUTE_CHARS = re.compile( + r"^[ a-zA-Z0-9_\!#\$%&'\(\)\*\+,\-\./\:;<\=>\?@\[\]\^`\{\|\}~]*$" +) +DEFAULT_MANIFEST_NAME = "manifest.tt" +TOOLTOOL_PACKAGE_SUFFIX = ".TOOLTOOL-PACKAGE" +HAWK_VER = 1 +PY3 = sys.version_info[0] == 3 + +if PY3: + six_binary_type = bytes + unicode = ( + str # Silence `pyflakes` from reporting `undefined name 'unicode'` in Python 3. + ) + import urllib.request as urllib2 + from http.client import HTTPConnection, HTTPSConnection + from urllib.error import HTTPError, URLError + from urllib.parse import urljoin, urlparse + from urllib.request import Request +else: + six_binary_type = str + import urllib2 + from httplib import HTTPConnection, HTTPSConnection + from urllib2 import HTTPError, Request, URLError + from urlparse import urljoin, urlparse + + +log = logging.getLogger(__name__) + + +# Vendored code from `redo` module +def retrier(attempts=5, sleeptime=10, max_sleeptime=300, sleepscale=1.5, jitter=1): + """ + This function originates from redo 2.0.3 https://github.com/mozilla-releng/redo + A generator function that sleeps between retries, handles exponential + backoff and jitter. The action you are retrying is meant to run after + retrier yields. + """ + jitter = jitter or 0 # py35 barfs on the next line if jitter is None + if jitter > sleeptime: + # To prevent negative sleep times + raise Exception( + "jitter ({}) must be less than sleep time ({})".format(jitter, sleeptime) + ) + + sleeptime_real = sleeptime + for _ in range(attempts): + log.debug("attempt %i/%i", _ + 1, attempts) + + yield sleeptime_real + + if jitter: + sleeptime_real = sleeptime + random.uniform(-jitter, jitter) + # our jitter should scale along with the sleeptime + jitter = jitter * sleepscale + else: + sleeptime_real = sleeptime + + sleeptime *= sleepscale + + if sleeptime_real > max_sleeptime: + sleeptime_real = max_sleeptime + + # Don't need to sleep the last time + if _ < attempts - 1: + log.debug( + "sleeping for %.2fs (attempt %i/%i)", sleeptime_real, _ + 1, attempts + ) + time.sleep(sleeptime_real) + + +def retry( + action, + attempts=5, + sleeptime=60, + max_sleeptime=5 * 60, + sleepscale=1.5, + jitter=1, + retry_exceptions=(Exception,), + cleanup=None, + args=(), + kwargs={}, + log_args=True, +): + """ + This function originates from redo 2.0.3 https://github.com/mozilla-releng/redo + Calls an action function until it succeeds, or we give up. + """ + assert callable(action) + assert not cleanup or callable(cleanup) + + action_name = getattr(action, "__name__", action) + if log_args and (args or kwargs): + log_attempt_args = ( + "retry: calling %s with args: %s," " kwargs: %s, attempt #%d", + action_name, + args, + kwargs, + ) + else: + log_attempt_args = ("retry: calling %s, attempt #%d", action_name) + + if max_sleeptime < sleeptime: + log.debug("max_sleeptime %d less than sleeptime %d", max_sleeptime, sleeptime) + + n = 1 + for _ in retrier( + attempts=attempts, + sleeptime=sleeptime, + max_sleeptime=max_sleeptime, + sleepscale=sleepscale, + jitter=jitter, + ): + try: + logfn = log.info if n != 1 else log.debug + logfn_args = log_attempt_args + (n,) + logfn(*logfn_args) + return action(*args, **kwargs) + except retry_exceptions: + log.debug("retry: Caught exception: ", exc_info=True) + if cleanup: + cleanup() + if n == attempts: + log.info("retry: Giving up on %s", action_name) + raise + continue + finally: + n += 1 + + +def retriable(*retry_args, **retry_kwargs): + """ + This function originates from redo 2.0.3 https://github.com/mozilla-releng/redo + A decorator factory for retry(). Wrap your function in @retriable(...) to + give it retry powers! + """ + + def _retriable_factory(func): + @wraps(func) + def _retriable_wrapper(*args, **kwargs): + return retry(func, args=args, kwargs=kwargs, *retry_args, **retry_kwargs) + + return _retriable_wrapper + + return _retriable_factory + + +# end of vendored code from redo module + + +def request_has_data(req): + if PY3: + return req.data is not None + return req.has_data() + + +def get_hexdigest(val): + return hashlib.sha512(val).hexdigest() + + +class FileRecordJSONEncoderException(Exception): + pass + + +class InvalidManifest(Exception): + pass + + +class ExceptionWithFilename(Exception): + def __init__(self, filename): + Exception.__init__(self) + self.filename = filename + + +class BadFilenameException(ExceptionWithFilename): + pass + + +class DigestMismatchException(ExceptionWithFilename): + pass + + +class MissingFileException(ExceptionWithFilename): + pass + + +class InvalidCredentials(Exception): + pass + + +class BadHeaderValue(Exception): + pass + + +def parse_url(url): + url_parts = urlparse(url) + url_dict = { + "scheme": url_parts.scheme, + "hostname": url_parts.hostname, + "port": url_parts.port, + "path": url_parts.path, + "resource": url_parts.path, + "query": url_parts.query, + } + if len(url_dict["query"]) > 0: + url_dict["resource"] = "%s?%s" % ( + url_dict["resource"], # pragma: no cover + url_dict["query"], + ) + + if url_parts.port is None: + if url_parts.scheme == "http": + url_dict["port"] = 80 + elif url_parts.scheme == "https": # pragma: no cover + url_dict["port"] = 443 + return url_dict + + +def utc_now(offset_in_seconds=0.0): + return int(math.floor(calendar.timegm(time.gmtime()) + float(offset_in_seconds))) + + +def random_string(length): + return base64.urlsafe_b64encode(os.urandom(length))[:length] + + +def prepare_header_val(val): + if isinstance(val, six_binary_type): + val = val.decode("utf-8") + + if not REQUEST_HEADER_ATTRIBUTE_CHARS.match(val): + raise BadHeaderValue( # pragma: no cover + "header value value={val} contained an illegal character".format( + val=repr(val) + ) + ) + + return val + + +def parse_content_type(content_type): # pragma: no cover + if content_type: + return content_type.split(";")[0].strip().lower() + else: + return "" + + +def calculate_payload_hash(algorithm, payload, content_type): # pragma: no cover + parts = [ + part if isinstance(part, six_binary_type) else part.encode("utf8") + for part in [ + "hawk." + str(HAWK_VER) + ".payload\n", + parse_content_type(content_type) + "\n", + payload or "", + "\n", + ] + ] + + p_hash = hashlib.new(algorithm) + for p in parts: + p_hash.update(p) + + log.debug( + "calculating payload hash from:\n{parts}".format(parts=pprint.pformat(parts)) + ) + + return base64.b64encode(p_hash.digest()) + + +def validate_taskcluster_credentials(credentials): + if not hasattr(credentials, "__getitem__"): + raise InvalidCredentials( + "credentials must be a dict-like object" + ) # pragma: no cover + try: + credentials["clientId"] + credentials["accessToken"] + except KeyError: # pragma: no cover + etype, val, tb = sys.exc_info() + raise InvalidCredentials("{etype}: {val}".format(etype=etype, val=val)) + + +def normalize_header_attr(val): + if isinstance(val, six_binary_type): + return val.decode("utf-8") + return val # pragma: no cover + + +def normalize_string( + mac_type, + timestamp, + nonce, + method, + name, + host, + port, + content_hash, +): + return "\n".join( + [ + normalize_header_attr(header) + # The blank lines are important. They follow what the Node Hawk lib does. + for header in [ + "hawk." + str(HAWK_VER) + "." + mac_type, + timestamp, + nonce, + method or "", + name or "", + host, + port, + content_hash or "", + "", # for ext which is empty in this case + "", # Add trailing new line. + ] + ] + ) + + +def calculate_mac( + mac_type, + access_token, + algorithm, + timestamp, + nonce, + method, + name, + host, + port, + content_hash, +): + normalized = normalize_string( + mac_type, timestamp, nonce, method, name, host, port, content_hash + ) + log.debug("normalized resource for mac calc: {norm}".format(norm=normalized)) + digestmod = getattr(hashlib, algorithm) + + if not isinstance(normalized, six_binary_type): + normalized = normalized.encode("utf8") + + if not isinstance(access_token, six_binary_type): + access_token = access_token.encode("ascii") + + result = hmac.new(access_token, normalized, digestmod) + return base64.b64encode(result.digest()) + + +def make_taskcluster_header(credentials, req): + validate_taskcluster_credentials(credentials) + + url = req.get_full_url() + method = req.get_method() + algorithm = "sha256" + timestamp = str(utc_now()) + nonce = random_string(6) + url_parts = parse_url(url) + + content_hash = None + if request_has_data(req): + if PY3: + data = req.data + else: + data = req.get_data() + content_hash = calculate_payload_hash( # pragma: no cover + algorithm, + data, + # maybe we should detect this from req.headers but we anyway expect json + content_type="application/json", + ) + + mac = calculate_mac( + "header", + credentials["accessToken"], + algorithm, + timestamp, + nonce, + method, + url_parts["resource"], + url_parts["hostname"], + str(url_parts["port"]), + content_hash, + ) + + header = 'Hawk mac="{}"'.format(prepare_header_val(mac)) + + if content_hash: # pragma: no cover + header = '{}, hash="{}"'.format(header, prepare_header_val(content_hash)) + + header = '{header}, id="{id}", ts="{ts}", nonce="{nonce}"'.format( + header=header, + id=prepare_header_val(credentials["clientId"]), + ts=prepare_header_val(timestamp), + nonce=prepare_header_val(nonce), + ) + + log.debug("Hawk header for URL={} method={}: {}".format(url, method, header)) + + return header + + +class FileRecord(object): + def __init__( + self, + filename, + size, + digest, + algorithm, + unpack=False, + version=None, + visibility=None, + ): + object.__init__(self) + if "/" in filename or "\\" in filename: + log.error( + "The filename provided contains path information and is, therefore, invalid." + ) + raise BadFilenameException(filename=filename) + self.filename = filename + self.size = size + self.digest = digest + self.algorithm = algorithm + self.unpack = unpack + self.version = version + self.visibility = visibility + + def __eq__(self, other): + if self is other: + return True + if ( + self.filename == other.filename + and self.size == other.size + and self.digest == other.digest + and self.algorithm == other.algorithm + and self.version == other.version + and self.visibility == other.visibility + ): + return True + else: + return False + + def __ne__(self, other): + return not self.__eq__(other) + + def __str__(self): + return repr(self) + + def __repr__(self): + return ( + "%s.%s(filename='%s', size=%s, digest='%s', algorithm='%s', visibility=%r)" + % ( + __name__, + self.__class__.__name__, + self.filename, + self.size, + self.digest, + self.algorithm, + self.visibility, + ) + ) + + def present(self): + # Doesn't check validity + return os.path.exists(self.filename) + + def validate_size(self): + if self.present(): + return self.size == os.path.getsize(self.filename) + else: + log.debug("trying to validate size on a missing file, %s", self.filename) + raise MissingFileException(filename=self.filename) + + def validate_digest(self): + if self.present(): + with open(self.filename, "rb") as f: + return self.digest == digest_file(f, self.algorithm) + else: + log.debug("trying to validate digest on a missing file, %s', self.filename") + raise MissingFileException(filename=self.filename) + + def validate(self): + if self.size is None or self.validate_size(): + if self.validate_digest(): + return True + return False + + def describe(self): + if self.present() and self.validate(): + return "'%s' is present and valid" % self.filename + elif self.present(): + return "'%s' is present and invalid" % self.filename + else: + return "'%s' is absent" % self.filename + + +def create_file_record(filename, algorithm): + fo = open(filename, "rb") + stored_filename = os.path.split(filename)[1] + fr = FileRecord( + stored_filename, + os.path.getsize(filename), + digest_file(fo, algorithm), + algorithm, + ) + fo.close() + return fr + + +class FileRecordJSONEncoder(json.JSONEncoder): + def encode_file_record(self, obj): + if not issubclass(type(obj), FileRecord): + err = ( + "FileRecordJSONEncoder is only for FileRecord and lists of FileRecords, " + "not %s" % obj.__class__.__name__ + ) + log.warn(err) + raise FileRecordJSONEncoderException(err) + else: + rv = { + "filename": obj.filename, + "size": obj.size, + "algorithm": obj.algorithm, + "digest": obj.digest, + } + if obj.unpack: + rv["unpack"] = True + if obj.version: + rv["version"] = obj.version + if obj.visibility is not None: + rv["visibility"] = obj.visibility + return rv + + def default(self, f): + if issubclass(type(f), list): + record_list = [] + for i in f: + record_list.append(self.encode_file_record(i)) + return record_list + else: + return self.encode_file_record(f) + + +class FileRecordJSONDecoder(json.JSONDecoder): + + """I help the json module materialize a FileRecord from + a JSON file. I understand FileRecords and lists of + FileRecords. I ignore things that I don't expect for now""" + + # TODO: make this more explicit in what it's looking for + # and error out on unexpected things + + def process_file_records(self, obj): + if isinstance(obj, list): + record_list = [] + for i in obj: + record = self.process_file_records(i) + if issubclass(type(record), FileRecord): + record_list.append(record) + return record_list + required_fields = [ + "filename", + "size", + "algorithm", + "digest", + ] + if isinstance(obj, dict): + missing = False + for req in required_fields: + if req not in obj: + missing = True + break + + if not missing: + unpack = obj.get("unpack", False) + version = obj.get("version", None) + visibility = obj.get("visibility", None) + rv = FileRecord( + obj["filename"], + obj["size"], + obj["digest"], + obj["algorithm"], + unpack, + version, + visibility, + ) + log.debug("materialized %s" % rv) + return rv + return obj + + def decode(self, s): + decoded = json.JSONDecoder.decode(self, s) + rv = self.process_file_records(decoded) + return rv + + +class Manifest(object): + + valid_formats = ("json",) + + def __init__(self, file_records=None): + self.file_records = file_records or [] + + def __eq__(self, other): + if self is other: + return True + if len(self.file_records) != len(other.file_records): + log.debug("Manifests differ in number of files") + return False + # sort the file records by filename before comparing + mine = sorted((fr.filename, fr) for fr in self.file_records) + theirs = sorted((fr.filename, fr) for fr in other.file_records) + return mine == theirs + + def __ne__(self, other): + return not self.__eq__(other) + + def __deepcopy__(self, memo): + # This is required for a deep copy + return Manifest(self.file_records[:]) + + def __copy__(self): + return Manifest(self.file_records) + + def copy(self): + return Manifest(self.file_records[:]) + + def present(self): + return all(i.present() for i in self.file_records) + + def validate_sizes(self): + return all(i.validate_size() for i in self.file_records) + + def validate_digests(self): + return all(i.validate_digest() for i in self.file_records) + + def validate(self): + return all(i.validate() for i in self.file_records) + + def load(self, data_file, fmt="json"): + assert fmt in self.valid_formats + if fmt == "json": + try: + self.file_records.extend( + json.load(data_file, cls=FileRecordJSONDecoder) + ) + except ValueError: + raise InvalidManifest("trying to read invalid manifest file") + + def loads(self, data_string, fmt="json"): + assert fmt in self.valid_formats + if fmt == "json": + try: + self.file_records.extend( + json.loads(data_string, cls=FileRecordJSONDecoder) + ) + except ValueError: + raise InvalidManifest("trying to read invalid manifest file") + + def dump(self, output_file, fmt="json"): + assert fmt in self.valid_formats + if fmt == "json": + return json.dump( + self.file_records, + output_file, + indent=2, + separators=(",", ": "), + cls=FileRecordJSONEncoder, + ) + + def dumps(self, fmt="json"): + assert fmt in self.valid_formats + if fmt == "json": + return json.dumps( + self.file_records, + indent=2, + separators=(",", ": "), + cls=FileRecordJSONEncoder, + ) + + +def digest_file(f, a): + """I take a file like object 'f' and return a hex-string containing + of the result of the algorithm 'a' applied to 'f'.""" + h = hashlib.new(a) + chunk_size = 1024 * 10 + data = f.read(chunk_size) + while data: + h.update(data) + data = f.read(chunk_size) + name = repr(f.name) if hasattr(f, "name") else "a file" + log.debug("hashed %s with %s to be %s", name, a, h.hexdigest()) + return h.hexdigest() + + +def execute(cmd): + """Execute CMD, logging its stdout at the info level""" + process = Popen(cmd, shell=True, stdout=PIPE) + while True: + line = process.stdout.readline() + if not line: + break + log.info(line.replace("\n", " ")) + return process.wait() == 0 + + +def open_manifest(manifest_file): + """I know how to take a filename and load it into a Manifest object""" + if os.path.exists(manifest_file): + manifest = Manifest() + with open(manifest_file, "r" if PY3 else "rb") as f: + manifest.load(f) + log.debug("loaded manifest from file '%s'" % manifest_file) + return manifest + else: + log.debug("tried to load absent file '%s' as manifest" % manifest_file) + raise InvalidManifest("manifest file '%s' does not exist" % manifest_file) + + +def list_manifest(manifest_file): + """I know how print all the files in a location""" + try: + manifest = open_manifest(manifest_file) + except InvalidManifest as e: + log.error( + "failed to load manifest file at '%s': %s" + % ( + manifest_file, + str(e), + ) + ) + return False + for f in manifest.file_records: + print( + "{}\t{}\t{}".format( + "P" if f.present() else "-", + "V" if f.present() and f.validate() else "-", + f.filename, + ) + ) + return True + + +def validate_manifest(manifest_file): + """I validate that all files in a manifest are present and valid but + don't fetch or delete them if they aren't""" + try: + manifest = open_manifest(manifest_file) + except InvalidManifest as e: + log.error( + "failed to load manifest file at '%s': %s" + % ( + manifest_file, + str(e), + ) + ) + return False + invalid_files = [] + absent_files = [] + for f in manifest.file_records: + if not f.present(): + absent_files.append(f) + elif not f.validate(): + invalid_files.append(f) + if len(invalid_files + absent_files) == 0: + return True + else: + return False + + +def add_files(manifest_file, algorithm, filenames, version, visibility, unpack): + # returns True if all files successfully added, False if not + # and doesn't catch library Exceptions. If any files are already + # tracked in the manifest, return will be False because they weren't + # added + all_files_added = True + # Create a old_manifest object to add to + if os.path.exists(manifest_file): + old_manifest = open_manifest(manifest_file) + else: + old_manifest = Manifest() + log.debug("creating a new manifest file") + new_manifest = Manifest() # use a different manifest for the output + for filename in filenames: + log.debug("adding %s" % filename) + path, name = os.path.split(filename) + new_fr = create_file_record(filename, algorithm) + new_fr.version = version + new_fr.visibility = visibility + new_fr.unpack = unpack + log.debug("appending a new file record to manifest file") + add = True + for fr in old_manifest.file_records: + log.debug( + "manifest file has '%s'" + % "', ".join([x.filename for x in old_manifest.file_records]) + ) + if new_fr == fr: + log.info("file already in old_manifest") + add = False + elif filename == fr.filename: + log.error( + "manifest already contains a different file named %s" % filename + ) + add = False + if add: + new_manifest.file_records.append(new_fr) + log.debug("added '%s' to manifest" % filename) + else: + all_files_added = False + # copy any files in the old manifest that aren't in the new one + new_filenames = set(fr.filename for fr in new_manifest.file_records) + for old_fr in old_manifest.file_records: + if old_fr.filename not in new_filenames: + new_manifest.file_records.append(old_fr) + if PY3: + with open(manifest_file, mode="w") as output: + new_manifest.dump(output, fmt="json") + else: + with open(manifest_file, mode="wb") as output: + new_manifest.dump(output, fmt="json") + return all_files_added + + +def touch(f): + """Used to modify mtime in cached files; + mtime is used by the purge command""" + try: + os.utime(f, None) + except OSError: + log.warn("impossible to update utime of file %s" % f) + + +def _urlopen(req): + ssl_context = None + if os.name == "nt": + ssl_context = ssl.create_default_context(cafile=certifi.where()) + return urllib2.urlopen(req, context=ssl_context) + + +@contextmanager +@retriable(sleeptime=2) +def request(url, auth_file=None): + req = Request(url) + _authorize(req, auth_file) + with closing(_urlopen(req)) as f: + log.debug("opened %s for reading" % url) + yield f + + +def fetch_file(base_urls, file_record, grabchunk=1024 * 4, auth_file=None, region=None): + # A file which is requested to be fetched that exists locally will be + # overwritten by this function + fd, temp_path = tempfile.mkstemp(dir=os.getcwd()) + os.close(fd) + fetched_path = None + for base_url in base_urls: + # Generate the URL for the file on the server side + url = urljoin(base_url, "%s/%s" % (file_record.algorithm, file_record.digest)) + if region is not None: + url += "?region=" + region + + log.info("Attempting to fetch from '%s'..." % base_url) + + # Well, the file doesn't exist locally. Let's fetch it. + try: + with request(url, auth_file) as f, open(temp_path, mode="wb") as out: + k = True + size = 0 + while k: + # TODO: print statistics as file transfers happen both for info and to stop + # buildbot timeouts + indata = f.read(grabchunk) + out.write(indata) + size += len(indata) + if len(indata) == 0: + k = False + log.info( + "File %s fetched from %s as %s" + % (file_record.filename, base_url, temp_path) + ) + fetched_path = temp_path + break + except (URLError, HTTPError, ValueError): + log.info( + "...failed to fetch '%s' from %s" % (file_record.filename, base_url), + exc_info=True, + ) + except IOError: # pragma: no cover + log.info( + "failed to write to temporary file for '%s'" % file_record.filename, + exc_info=True, + ) + + # cleanup temp file in case of issues + if fetched_path: + return os.path.split(fetched_path)[1] + else: + try: + os.remove(temp_path) + except OSError: # pragma: no cover + pass + return None + + +def clean_path(dirname): + """Remove a subtree if is exists. Helper for unpack_file().""" + if os.path.exists(dirname): + log.info("rm tree: %s" % dirname) + shutil.rmtree(dirname) + + +CHECKSUM_SUFFIX = ".checksum" + + +def validate_tar_member(member, path): + def _is_within_directory(directory, target): + real_directory = os.path.realpath(directory) + real_target = os.path.realpath(target) + prefix = os.path.commonprefix([real_directory, real_target]) + return prefix == real_directory + + member_path = os.path.join(path, member.name) + if not _is_within_directory(path, member_path): + raise Exception("Attempted path traversal in tar file: " + member.name) + if member.issym(): + link_path = os.path.join(os.path.dirname(member_path), member.linkname) + if not _is_within_directory(path, link_path): + raise Exception("Attempted link path traversal in tar file: " + member.name) + if member.mode & (stat.S_ISUID | stat.S_ISGID): + raise Exception("Attempted setuid or setgid in tar file: " + member.name) + + +def safe_extract(tar, path=".", *, numeric_owner=False): + def _files(tar, path): + for member in tar: + validate_tar_member(member, path) + yield member + + tar.extractall(path, members=_files(tar, path), numeric_owner=numeric_owner) + + +def unpack_file(filename): + """Untar `filename`, assuming it is uncompressed or compressed with bzip2, + xz, gzip, zst, or unzip a zip file. The file is assumed to contain a single + directory with a name matching the base of the given filename. + Xz support is handled by shelling out to 'tar'.""" + if os.path.isfile(filename) and tarfile.is_tarfile(filename): + tar_file, zip_ext = os.path.splitext(filename) + base_file, tar_ext = os.path.splitext(tar_file) + clean_path(base_file) + log.info('untarring "%s"' % filename) + with tarfile.open(filename) as tar: + safe_extract(tar) + elif os.path.isfile(filename) and filename.endswith(".tar.xz"): + base_file = filename.replace(".tar.xz", "") + clean_path(base_file) + log.info('untarring "%s"' % filename) + # Not using tar -Jxf because it fails on Windows for some reason. + process = Popen(["xz", "-d", "-c", filename], stdout=PIPE) + stdout, stderr = process.communicate() + if process.returncode != 0: + return False + fileobj = BytesIO() + fileobj.write(stdout) + fileobj.seek(0) + with tarfile.open(fileobj=fileobj, mode="r|") as tar: + safe_extract(tar) + elif os.path.isfile(filename) and filename.endswith(".tar.zst"): + import zstandard + + base_file = filename.replace(".tar.zst", "") + clean_path(base_file) + log.info('untarring "%s"' % filename) + dctx = zstandard.ZstdDecompressor() + with dctx.stream_reader(open(filename, "rb")) as fileobj: + with tarfile.open(fileobj=fileobj, mode="r|") as tar: + safe_extract(tar) + elif os.path.isfile(filename) and zipfile.is_zipfile(filename): + base_file = filename.replace(".zip", "") + clean_path(base_file) + log.info('unzipping "%s"' % filename) + z = zipfile.ZipFile(filename) + z.extractall() + z.close() + else: + log.error("Unknown archive extension for filename '%s'" % filename) + return False + return True + + +def fetch_files( + manifest_file, + base_urls, + filenames=[], + cache_folder=None, + auth_file=None, + region=None, +): + # Lets load the manifest file + try: + manifest = open_manifest(manifest_file) + except InvalidManifest as e: + log.error( + "failed to load manifest file at '%s': %s" + % ( + manifest_file, + str(e), + ) + ) + return False + + # we want to track files already in current working directory AND valid + # we will not need to fetch these + present_files = [] + + # We want to track files that fail to be fetched as well as + # files that are fetched + failed_files = [] + fetched_files = [] + + # Files that we want to unpack. + unpack_files = [] + + # Lets go through the manifest and fetch the files that we want + for f in manifest.file_records: + # case 1: files are already present + if f.present(): + if f.validate(): + present_files.append(f.filename) + if f.unpack: + unpack_files.append(f.filename) + else: + # we have an invalid file here, better to cleanup! + # this invalid file needs to be replaced with a good one + # from the local cash or fetched from a tooltool server + log.info( + "File %s is present locally but it is invalid, so I will remove it " + "and try to fetch it" % f.filename + ) + os.remove(os.path.join(os.getcwd(), f.filename)) + + # check if file is already in cache + if cache_folder and f.filename not in present_files: + try: + shutil.copy( + os.path.join(cache_folder, f.digest), + os.path.join(os.getcwd(), f.filename), + ) + log.info( + "File %s retrieved from local cache %s" % (f.filename, cache_folder) + ) + touch(os.path.join(cache_folder, f.digest)) + + filerecord_for_validation = FileRecord( + f.filename, f.size, f.digest, f.algorithm + ) + if filerecord_for_validation.validate(): + present_files.append(f.filename) + if f.unpack: + unpack_files.append(f.filename) + else: + # the file copied from the cache is invalid, better to + # clean up the cache version itself as well + log.warn( + "File %s retrieved from cache is invalid! I am deleting it from the " + "cache as well" % f.filename + ) + os.remove(os.path.join(os.getcwd(), f.filename)) + os.remove(os.path.join(cache_folder, f.digest)) + except IOError: + log.info( + "File %s not present in local cache folder %s" + % (f.filename, cache_folder) + ) + + # now I will try to fetch all files which are not already present and + # valid, appending a suffix to avoid race conditions + temp_file_name = None + # 'filenames' is the list of filenames to be managed, if this variable + # is a non empty list it can be used to filter if filename is in + # present_files, it means that I have it already because it was already + # either in the working dir or in the cache + if ( + f.filename in filenames or len(filenames) == 0 + ) and f.filename not in present_files: + log.debug("fetching %s" % f.filename) + temp_file_name = fetch_file( + base_urls, f, auth_file=auth_file, region=region + ) + if temp_file_name: + fetched_files.append((f, temp_file_name)) + else: + failed_files.append(f.filename) + else: + log.debug("skipping %s" % f.filename) + + # lets ensure that fetched files match what the manifest specified + for localfile, temp_file_name in fetched_files: + # since I downloaded to a temp file, I need to perform all validations on the temp file + # this is why filerecord_for_validation is created + + filerecord_for_validation = FileRecord( + temp_file_name, localfile.size, localfile.digest, localfile.algorithm + ) + + if filerecord_for_validation.validate(): + # great! + # I can rename the temp file + log.info( + "File integrity verified, renaming %s to %s" + % (temp_file_name, localfile.filename) + ) + os.rename( + os.path.join(os.getcwd(), temp_file_name), + os.path.join(os.getcwd(), localfile.filename), + ) + + if localfile.unpack: + unpack_files.append(localfile.filename) + + # if I am using a cache and a new file has just been retrieved from a + # remote location, I need to update the cache as well + if cache_folder: + log.info("Updating local cache %s..." % cache_folder) + try: + if not os.path.exists(cache_folder): + log.info("Creating cache in %s..." % cache_folder) + os.makedirs(cache_folder, 0o0700) + shutil.copy( + os.path.join(os.getcwd(), localfile.filename), + os.path.join(cache_folder, localfile.digest), + ) + log.info( + "Local cache %s updated with %s" + % (cache_folder, localfile.filename) + ) + touch(os.path.join(cache_folder, localfile.digest)) + except (OSError, IOError): + log.warning( + "Impossible to add file %s to cache folder %s" + % (localfile.filename, cache_folder), + exc_info=True, + ) + else: + failed_files.append(localfile.filename) + log.error("'%s'" % filerecord_for_validation.describe()) + os.remove(temp_file_name) + + # Unpack files that need to be unpacked. + for filename in unpack_files: + if not unpack_file(filename): + failed_files.append(filename) + + # If we failed to fetch or validate a file, we need to fail + if len(failed_files) > 0: + log.error("The following files failed: '%s'" % "', ".join(failed_files)) + return False + return True + + +def freespace(p): + "Returns the number of bytes free under directory `p`" + if sys.platform == "win32": # pragma: no cover + # os.statvfs doesn't work on Windows + import win32file + + secsPerClus, bytesPerSec, nFreeClus, totClus = win32file.GetDiskFreeSpace(p) + return secsPerClus * bytesPerSec * nFreeClus + else: + r = os.statvfs(p) + return r.f_frsize * r.f_bavail + + +def purge(folder, gigs): + """If gigs is non 0, it deletes files in `folder` until `gigs` GB are free, + starting from older files. If gigs is 0, a full purge will be performed. + No recursive deletion of files in subfolder is performed.""" + + full_purge = bool(gigs == 0) + gigs *= 1024 * 1024 * 1024 + + if not full_purge and freespace(folder) >= gigs: + log.info("No need to cleanup") + return + + files = [] + for f in os.listdir(folder): + p = os.path.join(folder, f) + # it deletes files in folder without going into subfolders, + # assuming the cache has a flat structure + if not os.path.isfile(p): + continue + mtime = os.path.getmtime(p) + files.append((mtime, p)) + + # iterate files sorted by mtime + for _, f in sorted(files): + log.info("removing %s to free up space" % f) + try: + os.remove(f) + except OSError: + log.info("Impossible to remove %s" % f, exc_info=True) + if not full_purge and freespace(folder) >= gigs: + break + + +def _log_api_error(e): + if hasattr(e, "hdrs") and e.hdrs["content-type"] == "application/json": + json_resp = json.load(e.fp) + log.error( + "%s: %s" % (json_resp["error"]["name"], json_resp["error"]["description"]) + ) + else: + log.exception("Error making RelengAPI request:") + + +def _authorize(req, auth_file): + is_taskcluster_auth = False + + if not auth_file: + try: + taskcluster_env_keys = { + "clientId": "TASKCLUSTER_CLIENT_ID", + "accessToken": "TASKCLUSTER_ACCESS_TOKEN", + } + auth_content = {k: os.environ[v] for k, v in taskcluster_env_keys.items()} + is_taskcluster_auth = True + except KeyError: + return + else: + with open(auth_file) as f: + auth_content = f.read().strip() + try: + auth_content = json.loads(auth_content) + is_taskcluster_auth = True + except Exception: + pass + + if is_taskcluster_auth: + taskcluster_header = make_taskcluster_header(auth_content, req) + log.debug("Using taskcluster credentials in %s" % auth_file) + req.add_unredirected_header("Authorization", taskcluster_header) + else: + log.debug("Using Bearer token in %s" % auth_file) + req.add_unredirected_header("Authorization", "Bearer %s" % auth_content) + + +def _send_batch(base_url, auth_file, batch, region): + url = urljoin(base_url, "upload") + if region is not None: + url += "?region=" + region + data = json.dumps(batch) + if PY3: + data = data.encode("utf-8") + req = Request(url, data, {"Content-Type": "application/json"}) + _authorize(req, auth_file) + try: + resp = _urlopen(req) + except (URLError, HTTPError) as e: + _log_api_error(e) + return None + return json.load(resp)["result"] + + +def _s3_upload(filename, file): + # urllib2 does not support streaming, so we fall back to good old httplib + url = urlparse(file["put_url"]) + cls = HTTPSConnection if url.scheme == "https" else HTTPConnection + host, port = url.netloc.split(":") if ":" in url.netloc else (url.netloc, 443) + port = int(port) + conn = cls(host, port) + try: + req_path = "%s?%s" % (url.path, url.query) if url.query else url.path + with open(filename, "rb") as f: + content = f.read() + content_length = len(content) + f.seek(0) + conn.request( + "PUT", + req_path, + f, + { + "Content-Type": "application/octet-stream", + "Content-Length": str(content_length), + }, + ) + resp = conn.getresponse() + resp_body = resp.read() + conn.close() + if resp.status != 200: + raise RuntimeError( + "Non-200 return from AWS: %s %s\n%s" + % (resp.status, resp.reason, resp_body) + ) + except Exception: + file["upload_exception"] = sys.exc_info() + file["upload_ok"] = False + else: + file["upload_ok"] = True + + +def _notify_upload_complete(base_url, auth_file, file): + req = Request(urljoin(base_url, "upload/complete/%(algorithm)s/%(digest)s" % file)) + _authorize(req, auth_file) + try: + _urlopen(req) + except HTTPError as e: + if e.code != 409: + _log_api_error(e) + return + # 409 indicates that the upload URL hasn't expired yet and we + # should retry after a delay + to_wait = int(e.headers.get("X-Retry-After", 60)) + log.warning("Waiting %d seconds for upload URLs to expire" % to_wait) + time.sleep(to_wait) + _notify_upload_complete(base_url, auth_file, file) + except Exception: + log.exception("While notifying server of upload completion:") + + +def upload(manifest, message, base_urls, auth_file, region): + try: + manifest = open_manifest(manifest) + except InvalidManifest: + log.exception("failed to load manifest file at '%s'") + return False + + # verify the manifest, since we'll need the files present to upload + if not manifest.validate(): + log.error("manifest is invalid") + return False + + if any(fr.visibility is None for fr in manifest.file_records): + log.error("All files in a manifest for upload must have a visibility set") + + # convert the manifest to an upload batch + batch = { + "message": message, + "files": {}, + } + for fr in manifest.file_records: + batch["files"][fr.filename] = { + "size": fr.size, + "digest": fr.digest, + "algorithm": fr.algorithm, + "visibility": fr.visibility, + } + + # make the upload request + resp = _send_batch(base_urls[0], auth_file, batch, region) + if not resp: + return None + files = resp["files"] + + # Upload the files, each in a thread. This allows us to start all of the + # uploads before any of the URLs expire. + threads = {} + for filename, file in files.items(): + if "put_url" in file: + log.info("%s: starting upload" % (filename,)) + thd = threading.Thread(target=_s3_upload, args=(filename, file)) + thd.daemon = 1 + thd.start() + threads[filename] = thd + else: + log.info("%s: already exists on server" % (filename,)) + + # re-join all of those threads as they exit + success = True + while threads: + for filename, thread in list(threads.items()): + if not thread.is_alive(): + # _s3_upload has annotated file with result information + file = files[filename] + thread.join() + if file["upload_ok"]: + log.info("%s: uploaded" % filename) + else: + log.error( + "%s: failed" % filename, exc_info=file["upload_exception"] + ) + success = False + del threads[filename] + + # notify the server that the uploads are completed. If the notification + # fails, we don't consider that an error (the server will notice + # eventually) + for filename, file in files.items(): + if "put_url" in file and file["upload_ok"]: + log.info("notifying server of upload completion for %s" % (filename,)) + _notify_upload_complete(base_urls[0], auth_file, file) + + return success + + +def send_operation_on_file(data, base_urls, digest, auth_file): + url = base_urls[0] + url = urljoin(url, "file/sha512/" + digest) + + data = json.dumps(data) + + req = Request(url, data, {"Content-Type": "application/json"}) + req.get_method = lambda: "PATCH" + + _authorize(req, auth_file) + + try: + _urlopen(req) + except (URLError, HTTPError) as e: + _log_api_error(e) + return False + return True + + +def change_visibility(base_urls, digest, visibility, auth_file): + data = [ + { + "op": "set_visibility", + "visibility": visibility, + } + ] + return send_operation_on_file(data, base_urls, digest, auth_file) + + +def delete_instances(base_urls, digest, auth_file): + data = [ + { + "op": "delete_instances", + } + ] + return send_operation_on_file(data, base_urls, digest, auth_file) + + +def process_command(options, args): + """I know how to take a list of program arguments and + start doing the right thing with them""" + cmd = args[0] + cmd_args = args[1:] + log.debug("processing '%s' command with args '%s'" % (cmd, '", "'.join(cmd_args))) + log.debug("using options: %s" % options) + + if cmd == "list": + return list_manifest(options["manifest"]) + if cmd == "validate": + return validate_manifest(options["manifest"]) + elif cmd == "add": + return add_files( + options["manifest"], + options["algorithm"], + cmd_args, + options["version"], + options["visibility"], + options["unpack"], + ) + elif cmd == "purge": + if options["cache_folder"]: + purge(folder=options["cache_folder"], gigs=options["size"]) + else: + log.critical("please specify the cache folder to be purged") + return False + elif cmd == "fetch": + return fetch_files( + options["manifest"], + options["base_url"], + cmd_args, + cache_folder=options["cache_folder"], + auth_file=options.get("auth_file"), + region=options.get("region"), + ) + elif cmd == "upload": + if not options.get("message"): + log.critical("upload command requires a message") + return False + return upload( + options.get("manifest"), + options.get("message"), + options.get("base_url"), + options.get("auth_file"), + options.get("region"), + ) + elif cmd == "change-visibility": + if not options.get("digest"): + log.critical("change-visibility command requires a digest option") + return False + if not options.get("visibility"): + log.critical("change-visibility command requires a visibility option") + return False + return change_visibility( + options.get("base_url"), + options.get("digest"), + options.get("visibility"), + options.get("auth_file"), + ) + elif cmd == "delete": + if not options.get("digest"): + log.critical("change-visibility command requires a digest option") + return False + return delete_instances( + options.get("base_url"), + options.get("digest"), + options.get("auth_file"), + ) + else: + log.critical('command "%s" is not implemented' % cmd) + return False + + +def main(argv, _skip_logging=False): + # Set up option parsing + parser = optparse.OptionParser() + parser.add_option( + "-q", + "--quiet", + default=logging.INFO, + dest="loglevel", + action="store_const", + const=logging.ERROR, + ) + parser.add_option( + "-v", "--verbose", dest="loglevel", action="store_const", const=logging.DEBUG + ) + parser.add_option( + "-m", + "--manifest", + default=DEFAULT_MANIFEST_NAME, + dest="manifest", + action="store", + help="specify the manifest file to be operated on", + ) + parser.add_option( + "-d", + "--algorithm", + default="sha512", + dest="algorithm", + action="store", + help="hashing algorithm to use (only sha512 is allowed)", + ) + parser.add_option( + "--digest", + default=None, + dest="digest", + action="store", + help="digest hash to change visibility for", + ) + parser.add_option( + "--visibility", + default=None, + dest="visibility", + choices=["internal", "public"], + help='Visibility level of this file; "internal" is for ' + "files that cannot be distributed out of the company " + 'but not for secrets; "public" files are available to ' + "anyone without restriction", + ) + parser.add_option( + "--unpack", + default=False, + dest="unpack", + action="store_true", + help="Request unpacking this file after fetch." + " This is helpful with tarballs.", + ) + parser.add_option( + "--version", + default=None, + dest="version", + action="store", + help="Version string for this file. This annotates the " + "manifest entry with a version string to help " + "identify the contents.", + ) + parser.add_option( + "-o", + "--overwrite", + default=False, + dest="overwrite", + action="store_true", + help="UNUSED; present for backward compatibility", + ) + parser.add_option( + "--url", + dest="base_url", + action="append", + help="RelengAPI URL ending with /tooltool/; default " + "is appropriate for Mozilla", + ) + parser.add_option( + "-c", "--cache-folder", dest="cache_folder", help="Local cache folder" + ) + parser.add_option( + "-s", + "--size", + help="free space required (in GB)", + dest="size", + type="float", + default=0.0, + ) + parser.add_option( + "-r", + "--region", + help="Preferred AWS region for upload or fetch; " "example: --region=us-west-2", + ) + parser.add_option( + "--message", + help='The "commit message" for an upload; format with a bug number ' + "and brief comment", + dest="message", + ) + parser.add_option( + "--authentication-file", + help="Use the RelengAPI token found in the given file to " + "authenticate to the RelengAPI server.", + dest="auth_file", + ) + + (options_obj, args) = parser.parse_args(argv[1:]) + + if not options_obj.base_url: + tooltool_host = os.environ.get("TOOLTOOL_HOST", "tooltool.mozilla-releng.net") + taskcluster_proxy_url = os.environ.get("TASKCLUSTER_PROXY_URL") + if taskcluster_proxy_url: + tooltool_url = "{}/{}".format(taskcluster_proxy_url, tooltool_host) + else: + tooltool_url = "https://{}".format(tooltool_host) + + options_obj.base_url = [tooltool_url] + + # ensure all URLs have a trailing slash + def add_slash(url): + return url if url.endswith("/") else (url + "/") + + options_obj.base_url = [add_slash(u) for u in options_obj.base_url] + + # expand ~ in --authentication-file + if options_obj.auth_file: + options_obj.auth_file = os.path.expanduser(options_obj.auth_file) + + # Dictionaries are easier to work with + options = vars(options_obj) + + log.setLevel(options["loglevel"]) + + # Set up logging, for now just to the console + if not _skip_logging: # pragma: no cover + ch = logging.StreamHandler() + cf = logging.Formatter("%(levelname)s - %(message)s") + ch.setFormatter(cf) + log.addHandler(ch) + + if options["algorithm"] != "sha512": + parser.error("only --algorithm sha512 is supported") + + if len(args) < 1: + parser.error("You must specify a command") + + return 0 if process_command(options, args) else 1 + + +if __name__ == "__main__": # pragma: no cover + sys.exit(main(sys.argv)) diff --git a/python/mozbuild/mozbuild/action/unify_symbols.py b/python/mozbuild/mozbuild/action/unify_symbols.py new file mode 100644 index 0000000000..4e96a010b2 --- /dev/null +++ b/python/mozbuild/mozbuild/action/unify_symbols.py @@ -0,0 +1,49 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse + +from mozpack.copier import FileCopier +from mozpack.errors import errors +from mozpack.files import FileFinder +from mozpack.unify import UnifiedFinder + + +class UnifiedSymbolsFinder(UnifiedFinder): + def unify_file(self, path, file1, file2): + # We expect none of the files to overlap. + if not file2: + return file1 + if not file1: + return file2 + errors.error( + "{} is in both {} and {}".format( + path, self._finder1.base, self._finder2.base + ) + ) + + +def main(): + parser = argparse.ArgumentParser( + description="Merge two crashreporter symbols directories." + ) + parser.add_argument("dir1", help="Directory") + parser.add_argument("dir2", help="Directory to merge") + + options = parser.parse_args() + + dir1_finder = FileFinder(options.dir1) + dir2_finder = FileFinder(options.dir2) + finder = UnifiedSymbolsFinder(dir1_finder, dir2_finder) + + copier = FileCopier() + with errors.accumulate(): + for p, f in finder: + copier.add(p, f) + + copier.copy(options.dir1, skip_if_older=False) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/action/unify_tests.py b/python/mozbuild/mozbuild/action/unify_tests.py new file mode 100644 index 0000000000..d94ebade1b --- /dev/null +++ b/python/mozbuild/mozbuild/action/unify_tests.py @@ -0,0 +1,65 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import os + +import buildconfig +import mozpack.path as mozpath +from mozpack.copier import FileCopier +from mozpack.errors import errors +from mozpack.files import FileFinder +from mozpack.unify import UnifiedFinder + + +class UnifiedTestFinder(UnifiedFinder): + def unify_file(self, path, file1, file2): + unified = super(UnifiedTestFinder, self).unify_file(path, file1, file2) + basename = mozpath.basename(path) + if basename == "mozinfo.json": + # The mozinfo.json files contain processor info, which differs + # between both ends. + # Remove the block when this assert is hit. + assert not unified + errors.ignore_errors() + self._report_difference(path, file1, file2) + errors.ignore_errors(False) + return file1 + elif basename == "dump_syms_mac": + # At the moment, the dump_syms_mac executable is a x86_64 binary + # on both ends. We can't create a universal executable from twice + # the same executable. + # When this assert hits, remove this block. + assert file1.open().read() == file2.open().read() + return file1 + return unified + + +def main(): + parser = argparse.ArgumentParser( + description="Merge two directories, creating Universal binaries for " + "executables and libraries they contain." + ) + parser.add_argument("dir1", help="Directory") + parser.add_argument("dir2", help="Directory to merge") + + options = parser.parse_args() + + buildconfig.substs["OS_ARCH"] = "Darwin" + buildconfig.substs["LIPO"] = os.environ.get("LIPO") + + dir1_finder = FileFinder(options.dir1, find_executables=True, find_dotfiles=True) + dir2_finder = FileFinder(options.dir2, find_executables=True, find_dotfiles=True) + finder = UnifiedTestFinder(dir1_finder, dir2_finder) + + copier = FileCopier() + with errors.accumulate(): + for p, f in finder: + copier.add(p, f) + + copier.copy(options.dir1, skip_if_older=False) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/action/unpack_dmg.py b/python/mozbuild/mozbuild/action/unpack_dmg.py new file mode 100644 index 0000000000..74e4091549 --- /dev/null +++ b/python/mozbuild/mozbuild/action/unpack_dmg.py @@ -0,0 +1,52 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import sys +from pathlib import Path + +from mozpack import dmg + +from mozbuild.bootstrap import bootstrap_toolchain + + +def _path_or_none(input: str): + if not input: + return None + return Path(input) + + +def main(args): + parser = argparse.ArgumentParser( + description="Explode a DMG into its relevant files" + ) + + parser.add_argument("--dsstore", help="DSStore file from") + parser.add_argument("--background", help="Background file from") + parser.add_argument("--icon", help="Icon file from") + + parser.add_argument("dmgfile", metavar="DMG_IN", help="DMG File to Unpack") + parser.add_argument( + "outpath", metavar="PATH_OUT", help="Location to put unpacked files" + ) + + options = parser.parse_args(args) + + dmg_tool = bootstrap_toolchain("dmg/dmg") + hfs_tool = bootstrap_toolchain("dmg/hfsplus") + + dmg.extract_dmg( + dmgfile=Path(options.dmgfile), + output=Path(options.outpath), + dmg_tool=Path(dmg_tool), + hfs_tool=Path(hfs_tool), + dsstore=_path_or_none(options.dsstore), + background=_path_or_none(options.background), + icon=_path_or_none(options.icon), + ) + return 0 + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/action/util.py b/python/mozbuild/mozbuild/action/util.py new file mode 100644 index 0000000000..d4102629ff --- /dev/null +++ b/python/mozbuild/mozbuild/action/util.py @@ -0,0 +1,24 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import json +import sys +import time + + +def log_build_task(f, *args, **kwargs): + """Run the given function, representing an entire build task, and log the + BUILDTASK metadata row to stdout. + """ + start = time.monotonic() + try: + return f(*args, **kwargs) + finally: + end = time.monotonic() + print( + "BUILDTASK %s" + % json.dumps( + {"argv": sys.argv, "start": start, "end": end, "context": None} + ) + ) diff --git a/python/mozbuild/mozbuild/action/webidl.py b/python/mozbuild/mozbuild/action/webidl.py new file mode 100644 index 0000000000..81c2c2a507 --- /dev/null +++ b/python/mozbuild/mozbuild/action/webidl.py @@ -0,0 +1,19 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import sys + +from mozwebidlcodegen import create_build_system_manager + +from mozbuild.action.util import log_build_task + + +def main(argv): + """Perform WebIDL code generation required by the build system.""" + manager = create_build_system_manager() + manager.generate_build_files() + + +if __name__ == "__main__": + sys.exit(log_build_task(main, sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/action/wrap_rustc.py b/python/mozbuild/mozbuild/action/wrap_rustc.py new file mode 100644 index 0000000000..d865438c47 --- /dev/null +++ b/python/mozbuild/mozbuild/action/wrap_rustc.py @@ -0,0 +1,79 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import os +import subprocess +import sys + + +def parse_outputs(crate_output, dep_outputs, pass_l_flag): + env = {} + args = [] + + def parse_line(line): + if line.startswith("cargo:"): + return line[len("cargo:") :].split("=", 1) + + def parse_file(f): + with open(f) as fh: + return [parse_line(line.rstrip()) for line in fh.readlines()] + + for f in dep_outputs: + for entry in parse_file(f): + if not entry: + continue + key, value = entry + if key == "rustc-link-search": + args += ["-L", value] + elif key == "rustc-flags": + flags = value.split() + for flag, val in zip(flags[0::2], flags[1::2]): + if flag == "-l" and f == crate_output: + args += ["-l", val] + elif flag == "-L": + args += ["-L", val] + else: + raise Exception( + "Unknown flag passed through " + '"cargo:rustc-flags": "%s"' % flag + ) + elif key == "rustc-link-lib" and f == crate_output: + args += ["-l", value] + elif key == "rustc-cfg" and f == crate_output: + args += ["--cfg", value] + elif key == "rustc-env" and f == crate_output: + env_key, env_value = value.split("=", 1) + env[env_key] = env_value + elif key == "rerun-if-changed": + pass + elif key == "rerun-if-env-changed": + pass + elif key == "warning": + pass + elif key: + # Todo: Distinguish between direct and transitive + # dependencies so we can pass metadata environment + # variables correctly. + pass + + return env, args + + +def wrap_rustc(args): + parser = argparse.ArgumentParser() + parser.add_argument("--crate-out", nargs="?") + parser.add_argument("--deps-out", nargs="*") + parser.add_argument("--cwd") + parser.add_argument("--pass-l-flag", action="store_true") + parser.add_argument("--cmd", nargs=argparse.REMAINDER) + args = parser.parse_args(args) + + new_env, new_args = parse_outputs(args.crate_out, args.deps_out, args.pass_l_flag) + os.environ.update(new_env) + return subprocess.Popen(args.cmd + new_args, cwd=args.cwd).wait() + + +if __name__ == "__main__": + sys.exit(wrap_rustc(sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/action/xpccheck.py b/python/mozbuild/mozbuild/action/xpccheck.py new file mode 100644 index 0000000000..4b59577cce --- /dev/null +++ b/python/mozbuild/mozbuild/action/xpccheck.py @@ -0,0 +1,109 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +"""A generic script to verify all test files are in the +corresponding .ini file. + +Usage: xpccheck.py [ ...] +""" + +import os +import sys +from glob import glob + +import manifestparser + + +def getIniTests(testdir): + mp = manifestparser.ManifestParser(strict=False) + mp.read(os.path.join(testdir, "xpcshell.ini")) + return mp.tests + + +def verifyDirectory(initests, directory): + files = glob(os.path.join(os.path.abspath(directory), "test_*")) + for f in files: + if not os.path.isfile(f): + continue + + name = os.path.basename(f) + if name.endswith(".in"): + name = name[:-3] + + if not name.endswith(".js"): + continue + + found = False + for test in initests: + if os.path.join(os.path.abspath(directory), name) == test["path"]: + found = True + break + + if not found: + print( + ( + "TEST-UNEXPECTED-FAIL | xpccheck | test " + "%s is missing from test manifest %s!" + ) + % ( + name, + os.path.join(directory, "xpcshell.ini"), + ), + file=sys.stderr, + ) + sys.exit(1) + + +def verifyIniFile(initests, directory): + files = glob(os.path.join(os.path.abspath(directory), "test_*")) + for test in initests: + name = test["path"].split("/")[-1] + + found = False + for f in files: + + fname = f.split("/")[-1] + if fname.endswith(".in"): + fname = ".in".join(fname.split(".in")[:-1]) + + if os.path.join(os.path.abspath(directory), fname) == test["path"]: + found = True + break + + if not found: + print( + ( + "TEST-UNEXPECTED-FAIL | xpccheck | found " + "%s in xpcshell.ini and not in directory '%s'" + ) + % ( + name, + directory, + ), + file=sys.stderr, + ) + sys.exit(1) + + +def main(argv): + if len(argv) < 2: + print( + "Usage: xpccheck.py [ ...]", + file=sys.stderr, + ) + sys.exit(1) + + for d in argv[1:]: + # xpcshell-unpack is a copy of xpcshell sibling directory and in the Makefile + # we copy all files (including xpcshell.ini from the sibling directory. + if d.endswith("toolkit/mozapps/extensions/test/xpcshell-unpack"): + continue + + initests = getIniTests(d) + verifyDirectory(initests, d) + verifyIniFile(initests, d) + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/python/mozbuild/mozbuild/action/xpidl-process.py b/python/mozbuild/mozbuild/action/xpidl-process.py new file mode 100755 index 0000000000..99f2a83f5e --- /dev/null +++ b/python/mozbuild/mozbuild/action/xpidl-process.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# This script is used to generate an output header and xpt file for +# input IDL file(s). It's purpose is to directly support the build +# system. The API will change to meet the needs of the build system. + +import argparse +import os +import sys + +import six +from buildconfig import topsrcdir +from mozpack import path as mozpath +from xpidl import jsonxpt +from xpidl.header import print_header +from xpidl.rust import print_rust_bindings +from xpidl.rust_macros import print_rust_macros_bindings +from xpidl.xpidl import IDLParser + +from mozbuild.action.util import log_build_task +from mozbuild.makeutil import Makefile +from mozbuild.pythonutil import iter_modules_in_path +from mozbuild.util import FileAvoidWrite + + +def process( + input_dirs, + inc_paths, + bindings_conf, + header_dir, + xpcrs_dir, + xpt_dir, + deps_dir, + module, + idl_files, +): + p = IDLParser() + + xpts = [] + mk = Makefile() + rule = mk.create_rule() + + glbl = {} + exec(open(bindings_conf, encoding="utf-8").read(), glbl) + webidlconfig = glbl["DOMInterfaces"] + + # Write out dependencies for Python modules we import. If this list isn't + # up to date, we will not re-process XPIDL files if the processor changes. + rule.add_dependencies(six.ensure_text(s) for s in iter_modules_in_path(topsrcdir)) + + for path in idl_files: + basename = os.path.basename(path) + stem, _ = os.path.splitext(basename) + idl_data = open(path, encoding="utf-8").read() + + idl = p.parse(idl_data, filename=path) + idl.resolve(inc_paths, p, webidlconfig) + + header_path = os.path.join(header_dir, "%s.h" % stem) + rs_rt_path = os.path.join(xpcrs_dir, "rt", "%s.rs" % stem) + rs_bt_path = os.path.join(xpcrs_dir, "bt", "%s.rs" % stem) + + xpts.append(jsonxpt.build_typelib(idl)) + + rule.add_dependencies(six.ensure_text(s) for s in idl.deps) + + # The print_* functions don't actually do anything with the + # passed-in path other than writing it into the file to let people + # know where the original source was. This script receives + # absolute paths, which are not so great to embed in header files + # (they mess with deterministic generation of files on different + # machines, Searchfox logic, shared compilation caches, etc.), so + # we pass in fake paths that are the same across compilations, but + # should still enable people to figure out where to go. + relpath = mozpath.relpath(path, topsrcdir) + + with FileAvoidWrite(header_path) as fh: + print_header(idl, fh, path, relpath) + + with FileAvoidWrite(rs_rt_path) as fh: + print_rust_bindings(idl, fh, relpath) + + with FileAvoidWrite(rs_bt_path) as fh: + print_rust_macros_bindings(idl, fh, relpath) + + # NOTE: We don't use FileAvoidWrite here as we may re-run this code due to a + # number of different changes in the code, which may not cause the .xpt + # files to be changed in any way. This means that make will re-run us every + # time a build is run whether or not anything changed. To fix this we + # unconditionally write out the file. + xpt_path = os.path.join(xpt_dir, "%s.xpt" % module) + with open(xpt_path, "w", encoding="utf-8", newline="\n") as fh: + jsonxpt.write(jsonxpt.link(xpts), fh) + + rule.add_targets([six.ensure_text(xpt_path)]) + if deps_dir: + deps_path = os.path.join(deps_dir, "%s.pp" % module) + with FileAvoidWrite(deps_path) as fh: + mk.dump(fh) + + +def main(argv): + parser = argparse.ArgumentParser() + parser.add_argument( + "--depsdir", help="Directory in which to write dependency files." + ) + parser.add_argument( + "--bindings-conf", help="Path to the WebIDL binding configuration file." + ) + parser.add_argument( + "--input-dir", + dest="input_dirs", + action="append", + default=[], + help="Directory(ies) in which to find source .idl files.", + ) + parser.add_argument("headerdir", help="Directory in which to write header files.") + parser.add_argument( + "xpcrsdir", help="Directory in which to write rust xpcom binding files." + ) + parser.add_argument("xptdir", help="Directory in which to write xpt file.") + parser.add_argument( + "module", help="Final module name to use for linked output xpt file." + ) + parser.add_argument("idls", nargs="+", help="Source .idl file(s).") + parser.add_argument( + "-I", + dest="incpath", + action="append", + default=[], + help="Extra directories where to look for included .idl files.", + ) + + args = parser.parse_args(argv) + incpath = [os.path.join(topsrcdir, p) for p in args.incpath] + process( + args.input_dirs, + incpath, + args.bindings_conf, + args.headerdir, + args.xpcrsdir, + args.xptdir, + args.depsdir, + args.module, + args.idls, + ) + + +if __name__ == "__main__": + log_build_task(main, sys.argv[1:]) diff --git a/python/mozbuild/mozbuild/action/zip.py b/python/mozbuild/mozbuild/action/zip.py new file mode 100644 index 0000000000..e0dcbe020f --- /dev/null +++ b/python/mozbuild/mozbuild/action/zip.py @@ -0,0 +1,52 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# This script creates a zip file, but will also strip any binaries +# it finds before adding them to the zip. + +import argparse +import sys + +import mozpack.path as mozpath +from mozpack.copier import Jarrer +from mozpack.errors import errors +from mozpack.files import FileFinder +from mozpack.path import match + +from mozbuild.action.util import log_build_task + + +def main(args): + parser = argparse.ArgumentParser() + parser.add_argument( + "-C", + metavar="DIR", + default=".", + help="Change to given directory before considering " "other paths", + ) + parser.add_argument("--strip", action="store_true", help="Strip executables") + parser.add_argument( + "-x", + metavar="EXCLUDE", + default=[], + action="append", + help="Exclude files that match the pattern", + ) + parser.add_argument("zip", help="Path to zip file to write") + parser.add_argument("input", nargs="+", help="Path to files to add to zip") + args = parser.parse_args(args) + + jarrer = Jarrer() + + with errors.accumulate(): + finder = FileFinder(args.C, find_executables=args.strip) + for path in args.input: + for p, f in finder.find(path): + if not any([match(p, exclude) for exclude in args.x]): + jarrer.add(p, f) + jarrer.copy(mozpath.join(args.C, args.zip)) + + +if __name__ == "__main__": + log_build_task(main, sys.argv[1:]) diff --git a/python/mozbuild/mozbuild/analyze/__init__.py b/python/mozbuild/mozbuild/analyze/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/analyze/hg.py b/python/mozbuild/mozbuild/analyze/hg.py new file mode 100644 index 0000000000..605ff6838e --- /dev/null +++ b/python/mozbuild/mozbuild/analyze/hg.py @@ -0,0 +1,176 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import bisect +import gzip +import json +import math +from collections import Counter +from datetime import datetime, timedelta + +import mozpack.path as mozpath +import requests + +PUSHLOG_CHUNK_SIZE = 500 + +URL = "https://hg.mozilla.org/mozilla-central/json-pushes?" + + +def unix_epoch(date): + return (date - datetime(1970, 1, 1)).total_seconds() + + +def unix_from_date(n, today): + return unix_epoch(today - timedelta(days=n)) + + +def get_lastpid(session): + return session.get(URL + "&version=2").json()["lastpushid"] + + +def get_pushlog_chunk(session, start, end): + # returns pushes sorted by date + res = session.get( + URL + + "version=1&startID={0}&\ + endID={1}&full=1".format( + start, end + ) + ).json() + return sorted(res.items(), key=lambda x: x[1]["date"]) + + +def collect_data(session, date): + if date < 1206031764: # first push + raise Exception("No pushes exist before March 20, 2008.") + lastpushid = get_lastpid(session) + data = [] + start_id = lastpushid - PUSHLOG_CHUNK_SIZE + end_id = lastpushid + 1 + while True: + res = get_pushlog_chunk(session, start_id, end_id) + starting_date = res[0][1]["date"] # date of oldest push in chunk + dates = [x[1]["date"] for x in res] + if starting_date < date: + i = bisect.bisect_left(dates, date) + data.append(res[i:]) + return data + else: + data.append(res) + end_id = start_id + 1 + start_id = start_id - PUSHLOG_CHUNK_SIZE + + +def get_data(epoch): + session = requests.Session() + data = collect_data(session, epoch) + return {k: v for sublist in data for (k, v) in sublist} + + +class Pushlog(object): + def __init__(self, days): + info = get_data(unix_from_date(days, datetime.today())) + self.pushlog = info + self.pids = self.get_pids() + self.pushes = self.make_pushes() + self.files = [l for p in self.pushes for l in set(p.files)] + self.file_set = set(self.files) + self.file_count = Counter(self.files) + + def make_pushes(self): + pids = self.pids + all_pushes = self.pushlog + return [Push(pid, all_pushes[str(pid)]) for pid in pids] + + def get_pids(self): + keys = self.pushlog.keys() + keys.sort() + return keys + + +class Push(object): + def __init__(self, pid, p_dict): + self.id = pid + self.date = p_dict["date"] + self.files = [f for x in p_dict["changesets"] for f in x["files"]] + + +class Report(object): + def __init__(self, days, path=None, cost_dict=None): + obj = Pushlog(days) + self.file_set = obj.file_set + self.file_count = obj.file_count + self.name = str(days) + "day_report" + self.cost_dict = self.get_cost_dict(path, cost_dict) + + def get_cost_dict(self, path, cost_dict): + if path is not None: + with gzip.open(path) as file: + return json.loads(file.read()) + else: + if cost_dict is not None: + return cost_dict + else: + raise Exception + + def organize_data(self): + costs = self.cost_dict + counts = self.file_count + res = [] + for f in self.file_set: + cost = costs.get(f) + count = counts.get(f) + if cost is not None: + res.append((f, cost, count, round(cost * count, 3))) + return res + + def get_sorted_report(self, format): + res = self.organize_data() + res.sort(key=(lambda x: x[3]), reverse=True) + + def ms_to_mins_secs(ms): + secs = ms / 1000.0 + mins = secs / 60 + secs = secs % 60 + return "%d:%02d" % (math.trunc(mins), int(round(secs))) + + if format in ("html", "pretty"): + res = [ + (f, ms_to_mins_secs(cost), count, ms_to_mins_secs(total)) + for (f, cost, count, total) in res + ] + + return res + + def cut(self, size, lst): + if len(lst) <= size: + return lst + else: + return lst[:size] + + def generate_output(self, format, limit, dst): + import tablib + + data = tablib.Dataset(headers=["FILE", "TIME", "CHANGES", "TOTAL"]) + res = self.get_sorted_report(format) + if limit is not None: + res = self.cut(limit, res) + for x in res: + data.append(x) + if format == "pretty": + print(data) + else: + file_name = self.name + "." + format + content = None + data.export(format) + if format == "csv": + content = data.csv + elif format == "json": + content = data.json + else: + content = data.html + file_path = mozpath.join(dst, file_name) + with open(file_path, "wb") as f: + f.write(content) + print("Created report: %s" % file_path) diff --git a/python/mozbuild/mozbuild/android_version_code.py b/python/mozbuild/mozbuild/android_version_code.py new file mode 100644 index 0000000000..aa13609a7a --- /dev/null +++ b/python/mozbuild/mozbuild/android_version_code.py @@ -0,0 +1,197 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import math +import sys +import time + +# Builds before this build ID use the v0 version scheme. Builds after this +# build ID use the v1 version scheme. +V1_CUTOFF = 20150801000000 # YYYYmmddHHMMSS + + +def android_version_code_v0(buildid, cpu_arch=None, min_sdk=0, max_sdk=0): + base = int(str(buildid)[:10]) + # None is interpreted as arm. + if not cpu_arch or cpu_arch == "armeabi-v7a": + # Increment by MIN_SDK_VERSION -- this adds 9 to every build ID as a + # minimum. Our split APK starts at 15. + return base + min_sdk + 0 + elif cpu_arch in ["x86"]: + # Increment the version code by 3 for x86 builds so they are offered to + # x86 phones that have ARM emulators, beating the 2-point advantage that + # the v15+ ARMv7 APK has. If we change our splits in the future, we'll + # need to do this further still. + return base + min_sdk + 3 + else: + raise ValueError( + "Don't know how to compute android:versionCode " + "for CPU arch %s" % cpu_arch + ) + + +def android_version_code_v1(buildid, cpu_arch=None, min_sdk=0, max_sdk=0): + """Generate a v1 android:versionCode. + The important consideration is that version codes be monotonically + increasing (per Android package name) for all published builds. The input + build IDs are based on timestamps and hence are always monotonically + increasing. + + The generated v1 version codes look like (in binary): + + 0111 1000 0010 tttt tttt tttt tttt txpg + + The 17 bits labelled 't' represent the number of hours since midnight on + September 1, 2015. (2015090100 in YYYYMMMDDHH format.) This yields a + little under 15 years worth of hourly build identifiers, since 2**17 / (366 + * 24) =~ 14.92. + + The bits labelled 'x', 'p', and 'g' are feature flags. + + The bit labelled 'x' is 1 if the build is for an x86 or x86-64 architecture, + and 0 otherwise, which means the build is for an ARM or ARM64 architecture. + (Fennec no longer supports ARMv6, so ARM is equivalent to ARMv7. + + ARM64 is also known as AArch64; it is logically ARMv8.) + + For the same release, x86 and x86_64 builds have higher version codes and + take precedence over ARM builds, so that they are preferred over ARM on + devices that have ARM emulation. + + The bit labelled 'p' is 1 if the build is for a 64-bit architecture (x86-64 + or ARM64), and 0 otherwise, which means the build is for a 32-bit + architecture (x86 or ARM). 64-bit builds have higher version codes so + they take precedence over 32-bit builds on devices that support 64-bit. + + The bit labelled 'g' is 1 if the build targets a recent API level, which + is currently always the case, because Firefox no longer ships releases that + are split by API levels. However, we may reintroduce a split in the future, + in which case the release that targets an older API level will + + We throw an explanatory exception when we are within one calendar year of + running out of build events. This gives lots of time to update the version + scheme. The responsible individual should then bump the range (to allow + builds to continue) and use the time remaining to update the version scheme + via the reserved high order bits. + + N.B.: the reserved 0 bit to the left of the highest order 't' bit can, + sometimes, be used to bump the version scheme. In addition, by reducing the + granularity of the build identifiers (for example, moving to identifying + builds every 2 or 4 hours), the version scheme may be adjusted further still + without losing a (valuable) high order bit. + """ + + def hours_since_cutoff(buildid): + # The ID is formatted like YYYYMMDDHHMMSS (using + # datetime.now().strftime('%Y%m%d%H%M%S'); see build/variables.py). + # The inverse function is time.strptime. + # N.B.: the time module expresses time as decimal seconds since the + # epoch. + fmt = "%Y%m%d%H%M%S" + build = time.strptime(str(buildid), fmt) + cutoff = time.strptime(str(V1_CUTOFF), fmt) + return int( + math.floor((time.mktime(build) - time.mktime(cutoff)) / (60.0 * 60.0)) + ) + + # Of the 21 low order bits, we take 17 bits for builds. + base = hours_since_cutoff(buildid) + if base < 0: + raise ValueError( + "Something has gone horribly wrong: cannot calculate " + "android:versionCode from build ID %s: hours underflow " + "bits allotted!" % buildid + ) + if base > 2 ** 17: + raise ValueError( + "Something has gone horribly wrong: cannot calculate " + "android:versionCode from build ID %s: hours overflow " + "bits allotted!" % buildid + ) + if base > 2 ** 17 - 366 * 24: + raise ValueError( + "Running out of low order bits calculating " + "android:versionCode from build ID %s: " + "; YOU HAVE ONE YEAR TO UPDATE THE VERSION SCHEME." % buildid + ) + + version = 0b1111000001000000000000000000000 + # We reserve 1 "middle" high order bit for the future, and 3 low order bits + # for architecture and APK splits. + version |= base << 3 + + # 'x' bit is 1 for x86/x86-64 architectures (`None` is interpreted as ARM). + if cpu_arch in ["x86", "x86_64"]: + version |= 1 << 2 + elif not cpu_arch or cpu_arch in ["armeabi-v7a", "arm64-v8a"]: + pass + else: + raise ValueError( + "Don't know how to compute android:versionCode " + "for CPU arch %s" % cpu_arch + ) + + # 'p' bit is 1 for 64-bit architectures. + if cpu_arch in ["arm64-v8a", "x86_64"]: + version |= 1 << 1 + elif cpu_arch in ["armeabi-v7a", "x86"]: + pass + else: + raise ValueError( + "Don't know how to compute android:versionCode " + "for CPU arch %s" % cpu_arch + ) + + # 'g' bit is currently always 1, but may depend on `min_sdk` in the future. + version |= 1 << 0 + + return version + + +def android_version_code(buildid, *args, **kwargs): + base = int(str(buildid)) + if base < V1_CUTOFF: + return android_version_code_v0(buildid, *args, **kwargs) + else: + return android_version_code_v1(buildid, *args, **kwargs) + + +def main(argv): + parser = argparse.ArgumentParser("Generate an android:versionCode", add_help=False) + parser.add_argument( + "--verbose", action="store_true", default=False, help="Be verbose" + ) + parser.add_argument( + "--with-android-cpu-arch", + dest="cpu_arch", + choices=["armeabi", "armeabi-v7a", "arm64-v8a", "x86", "x86_64"], + help="The target CPU architecture", + ) + parser.add_argument( + "--with-android-min-sdk-version", + dest="min_sdk", + type=int, + default=0, + help="The minimum target SDK", + ) + parser.add_argument( + "--with-android-max-sdk-version", + dest="max_sdk", + type=int, + default=0, + help="The maximum target SDK", + ) + parser.add_argument("buildid", type=int, help="The input build ID") + + args = parser.parse_args(argv) + code = android_version_code( + args.buildid, cpu_arch=args.cpu_arch, min_sdk=args.min_sdk, max_sdk=args.max_sdk + ) + print(code) + return 0 + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/artifact_builds.py b/python/mozbuild/mozbuild/artifact_builds.py new file mode 100644 index 0000000000..a4d2a0bdd2 --- /dev/null +++ b/python/mozbuild/mozbuild/artifact_builds.py @@ -0,0 +1,27 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# The values correspond to entries at +# https://tools.taskcluster.net/index/artifacts/#gecko.v2.mozilla-central.latest/gecko.v2.mozilla-central.latest +JOB_CHOICES = { + "android-arm-opt", + "android-arm-debug", + "android-x86-opt", + "android-x86_64-opt", + "android-x86_64-debug", + "android-aarch64-opt", + "android-aarch64-debug", + "linux-opt", + "linux-debug", + "linux64-opt", + "linux64-debug", + "macosx64-opt", + "macosx64-debug", + "win32-opt", + "win32-debug", + "win64-opt", + "win64-debug", + "win64-aarch64-opt", + "win64-aarch64-debug", +} diff --git a/python/mozbuild/mozbuild/artifact_cache.py b/python/mozbuild/mozbuild/artifact_cache.py new file mode 100644 index 0000000000..572953e1f7 --- /dev/null +++ b/python/mozbuild/mozbuild/artifact_cache.py @@ -0,0 +1,251 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +Fetch and cache artifacts from URLs. + +This module manages fetching artifacts from URLS and purging old +artifacts using a simple Least Recently Used cache. + +This module requires certain modules be importable from the ambient Python +environment. Consumers will need to arrange this themselves. + +The bulk of the complexity is in managing and persisting several caches. If +we found a Python LRU cache that pickled cleanly, we could remove a lot of +this code! Sadly, I found no such candidate implementations, so we pickle +pylru caches manually. + +None of the instances (or the underlying caches) are safe for concurrent use. +A future need, perhaps. +""" + + +import binascii +import hashlib +import logging +import os + +import dlmanager +import mozpack.path as mozpath +import six +import six.moves.urllib.parse as urlparse + +from mozbuild.util import mkdir + +# Using 'DownloadManager' through the provided interface we +# can't directly specify a 'chunk_size' for the 'Download' it manages. +# One way to get it to use the 'chunk_size' we want is to monkeypatch +# the defaults of the init function for the 'Download' class. +CHUNK_SIZE = 16 * 1024 * 1024 # 16 MB in bytes. +dl_init = dlmanager.Download.__init__ +dl_init.__defaults__ = ( + dl_init.__defaults__[:1] + (CHUNK_SIZE,) + dl_init.__defaults__[2:] +) + + +# Minimum number of downloaded artifacts to keep. Each artifact can be very large, +# so don't make this to large! +MIN_CACHED_ARTIFACTS = 12 + +# Maximum size of the downloaded artifacts to keep in cache, in bytes (2GiB). +MAX_CACHED_ARTIFACTS_SIZE = 2 * 1024 * 1024 * 1024 + + +class ArtifactPersistLimit(dlmanager.PersistLimit): + """Handle persistence for a cache of artifacts. + + When instantiating a DownloadManager, it starts by filling the + PersistLimit instance it's given with register_dir_content. + In practice, this registers all the files already in the cache directory. + After a download finishes, the newly downloaded file is registered, and the + oldest files registered to the PersistLimit instance are removed depending + on the size and file limits it's configured for. + + This is all good, but there are a few tweaks we want here: + + - We have pickle files in the cache directory that we don't want purged. + - Files that were just downloaded in the same session shouldn't be + purged. (if for some reason we end up downloading more than the default + max size, we don't want the files to be purged) + + To achieve this, this subclass of PersistLimit inhibits the register_file + method for pickle files and tracks what files were downloaded in the same + session to avoid removing them. + + The register_file method may be used to register cache matches too, so that + later sessions know they were freshly used. + """ + + def __init__(self, log=None): + super(ArtifactPersistLimit, self).__init__( + size_limit=MAX_CACHED_ARTIFACTS_SIZE, file_limit=MIN_CACHED_ARTIFACTS + ) + self._log = log + self._registering_dir = False + self._downloaded_now = set() + + def log(self, *args, **kwargs): + if self._log: + self._log(*args, **kwargs) + + def register_file(self, path): + if ( + path.endswith(".pickle") + or path.endswith(".checksum") + or os.path.basename(path) == ".metadata_never_index" + ): + return + if not self._registering_dir: + # Touch the file so that subsequent calls to a mach artifact + # command know it was recently used. While remove_old_files + # is based on access time, in various cases, the access time is not + # updated when just reading the file, so we force an update. + try: + os.utime(path, None) + except OSError: + pass + self._downloaded_now.add(path) + super(ArtifactPersistLimit, self).register_file(path) + + def register_dir_content(self, directory, pattern="*"): + self._registering_dir = True + super(ArtifactPersistLimit, self).register_dir_content(directory, pattern) + self._registering_dir = False + + def remove_old_files(self): + from dlmanager import fs + + files = sorted(self.files, key=lambda f: f.stat.st_atime) + kept = [] + while len(files) > self.file_limit and self._files_size >= self.size_limit: + f = files.pop(0) + if f.path in self._downloaded_now: + kept.append(f) + continue + try: + fs.remove(f.path) + except WindowsError: + # For some reason, on automation, we can't remove those files. + # So for now, ignore the error. + kept.append(f) + continue + self.log( + logging.INFO, + "artifact", + {"filename": f.path}, + "Purged artifact {filename}", + ) + self._files_size -= f.stat.st_size + self.files = files + kept + + def remove_all(self): + from dlmanager import fs + + for f in self.files: + fs.remove(f.path) + self._files_size = 0 + self.files = [] + + +class ArtifactCache(object): + """Fetch artifacts from URLS and purge least recently used artifacts from disk.""" + + def __init__(self, cache_dir, log=None, skip_cache=False): + mkdir(cache_dir, not_indexed=True) + self._cache_dir = cache_dir + self._log = log + self._skip_cache = skip_cache + self._persist_limit = ArtifactPersistLimit(log) + self._download_manager = dlmanager.DownloadManager( + self._cache_dir, persist_limit=self._persist_limit + ) + self._last_dl_update = -1 + + def log(self, *args, **kwargs): + if self._log: + self._log(*args, **kwargs) + + def fetch(self, url, force=False): + fname = os.path.basename(url) + try: + # Use the file name from the url if it looks like a hash digest. + if len(fname) not in (32, 40, 56, 64, 96, 128): + raise TypeError() + binascii.unhexlify(fname) + except (TypeError, binascii.Error): + # We download to a temporary name like HASH[:16]-basename to + # differentiate among URLs with the same basenames. We used to then + # extract the build ID from the downloaded artifact and use it to make a + # human readable unique name, but extracting build IDs is time consuming + # (especially on Mac OS X, where we must mount a large DMG file). + hash = hashlib.sha256(six.ensure_binary(url)).hexdigest()[:16] + # Strip query string and fragments. + basename = os.path.basename(urlparse.urlparse(url).path) + fname = hash + "-" + basename + + path = os.path.abspath(mozpath.join(self._cache_dir, fname)) + if self._skip_cache and os.path.exists(path): + self.log( + logging.INFO, + "artifact", + {"path": path}, + "Skipping cache: removing cached downloaded artifact {path}", + ) + os.remove(path) + + try: + dl = self._download_manager.download(url, fname) + + def download_progress(dl, bytes_so_far, total_size): + if not total_size: + return + percent = (float(bytes_so_far) / total_size) * 100 + now = int(percent / 5) + if now == self._last_dl_update: + return + self._last_dl_update = now + self.log( + logging.INFO, + "artifact", + { + "bytes_so_far": bytes_so_far, + "total_size": total_size, + "percent": percent, + }, + "Downloading... {percent:02.1f} %", + ) + + if dl: + self.log( + logging.INFO, + "artifact", + {"path": path}, + "Downloading artifact to local cache: {path}", + ) + dl.set_progress(download_progress) + dl.wait() + else: + self.log( + logging.INFO, + "artifact", + {"path": path}, + "Using artifact from local cache: {path}", + ) + # Avoid the file being removed if it was in the cache already. + path = os.path.join(self._cache_dir, fname) + self._persist_limit.register_file(path) + + return os.path.abspath(mozpath.join(self._cache_dir, fname)) + finally: + # Cancel any background downloads in progress. + self._download_manager.cancel() + + def clear_cache(self): + if self._skip_cache: + self.log( + logging.INFO, "artifact", {}, "Skipping cache: ignoring clear_cache!" + ) + return + + self._persist_limit.remove_all() diff --git a/python/mozbuild/mozbuild/artifact_commands.py b/python/mozbuild/mozbuild/artifact_commands.py new file mode 100644 index 0000000000..12184ce0d9 --- /dev/null +++ b/python/mozbuild/mozbuild/artifact_commands.py @@ -0,0 +1,615 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +import argparse +import hashlib +import json +import logging +import os +import shutil +from collections import OrderedDict + +import mozversioncontrol +import six +from mach.decorators import Command, CommandArgument, SubCommand + +from mozbuild.artifact_builds import JOB_CHOICES +from mozbuild.base import MachCommandConditions as conditions +from mozbuild.util import ensureParentDir + +_COULD_NOT_FIND_ARTIFACTS_TEMPLATE = ( + "ERROR!!!!!! Could not find artifacts for a toolchain build named " + "`{build}`. Local commits, dirty/stale files, and other changes in your " + "checkout may cause this error. Make sure you are on a fresh, current " + "checkout of mozilla-central. Beware that commands like `mach bootstrap` " + "and `mach artifact` are unlikely to work on any versions of the code " + "besides recent revisions of mozilla-central." +) + + +class SymbolsAction(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + # If this function is called, it means the --symbols option was given, + # so we want to store the value `True` if no explicit value was given + # to the option. + setattr(namespace, self.dest, values or True) + + +class ArtifactSubCommand(SubCommand): + def __call__(self, func): + after = SubCommand.__call__(self, func) + args = [ + CommandArgument("--tree", metavar="TREE", type=str, help="Firefox tree."), + CommandArgument( + "--job", metavar="JOB", choices=JOB_CHOICES, help="Build job." + ), + CommandArgument( + "--verbose", "-v", action="store_true", help="Print verbose output." + ), + ] + for arg in args: + after = arg(after) + return after + + +# Fetch and install binary artifacts from Mozilla automation. + + +@Command( + "artifact", + category="post-build", + description="Use pre-built artifacts to build Firefox.", +) +def artifact(command_context): + """Download, cache, and install pre-built binary artifacts to build Firefox. + + Use ``mach build`` as normal to freshen your installed binary libraries: + artifact builds automatically download, cache, and install binary + artifacts from Mozilla automation, replacing whatever may be in your + object directory. Use ``mach artifact last`` to see what binary artifacts + were last used. + + Never build libxul again! + + """ + pass + + +def _make_artifacts( + command_context, + tree=None, + job=None, + skip_cache=False, + download_tests=True, + download_symbols=False, + download_maven_zip=False, + no_process=False, +): + state_dir = command_context._mach_context.state_dir + cache_dir = os.path.join(state_dir, "package-frontend") + + hg = None + if conditions.is_hg(command_context): + hg = command_context.substs["HG"] + + git = None + if conditions.is_git(command_context): + git = command_context.substs["GIT"] + + # If we're building Thunderbird, we should be checking for comm-central artifacts. + topsrcdir = command_context.substs.get("commtopsrcdir", command_context.topsrcdir) + + if download_maven_zip: + if download_tests: + raise ValueError("--maven-zip requires --no-tests") + if download_symbols: + raise ValueError("--maven-zip requires no --symbols") + if not no_process: + raise ValueError("--maven-zip requires --no-process") + + from mozbuild.artifacts import Artifacts + + artifacts = Artifacts( + tree, + command_context.substs, + command_context.defines, + job, + log=command_context.log, + cache_dir=cache_dir, + skip_cache=skip_cache, + hg=hg, + git=git, + topsrcdir=topsrcdir, + download_tests=download_tests, + download_symbols=download_symbols, + download_maven_zip=download_maven_zip, + no_process=no_process, + mozbuild=command_context, + ) + return artifacts + + +@ArtifactSubCommand("artifact", "install", "Install a good pre-built artifact.") +@CommandArgument( + "source", + metavar="SRC", + nargs="?", + type=str, + help="Where to fetch and install artifacts from. Can be omitted, in " + "which case the current hg repository is inspected; an hg revision; " + "a remote URL; or a local file.", + default=None, +) +@CommandArgument( + "--skip-cache", + action="store_true", + help="Skip all local caches to force re-fetching remote artifacts.", + default=False, +) +@CommandArgument("--no-tests", action="store_true", help="Don't install tests.") +@CommandArgument("--symbols", nargs="?", action=SymbolsAction, help="Download symbols.") +@CommandArgument("--distdir", help="Where to install artifacts to.") +@CommandArgument( + "--no-process", + action="store_true", + help="Don't process (unpack) artifact packages, just download them.", +) +@CommandArgument( + "--maven-zip", action="store_true", help="Download Maven zip (Android-only)." +) +def artifact_install( + command_context, + source=None, + skip_cache=False, + tree=None, + job=None, + verbose=False, + no_tests=False, + symbols=False, + distdir=None, + no_process=False, + maven_zip=False, +): + command_context._set_log_level(verbose) + artifacts = _make_artifacts( + command_context, + tree=tree, + job=job, + skip_cache=skip_cache, + download_tests=not no_tests, + download_symbols=symbols, + download_maven_zip=maven_zip, + no_process=no_process, + ) + + return artifacts.install_from(source, distdir or command_context.distdir) + + +@ArtifactSubCommand( + "artifact", + "clear-cache", + "Delete local artifacts and reset local artifact cache.", +) +def artifact_clear_cache(command_context, tree=None, job=None, verbose=False): + command_context._set_log_level(verbose) + artifacts = _make_artifacts(command_context, tree=tree, job=job) + artifacts.clear_cache() + return 0 + + +@SubCommand("artifact", "toolchain") +@CommandArgument("--verbose", "-v", action="store_true", help="Print verbose output.") +@CommandArgument( + "--cache-dir", + metavar="DIR", + help="Directory where to store the artifacts cache", +) +@CommandArgument( + "--skip-cache", + action="store_true", + help="Skip all local caches to force re-fetching remote artifacts.", + default=False, +) +@CommandArgument( + "--from-build", + metavar="BUILD", + nargs="+", + help="Download toolchains resulting from the given build(s); " + "BUILD is a name of a toolchain task, e.g. linux64-clang", +) +@CommandArgument( + "--from-task", + metavar="TASK_ID:ARTIFACT", + nargs="+", + help="Download toolchain artifact from a given task.", +) +@CommandArgument( + "--tooltool-manifest", + metavar="MANIFEST", + help="Explicit tooltool manifest to process", +) +@CommandArgument( + "--no-unpack", action="store_true", help="Do not unpack any downloaded file" +) +@CommandArgument( + "--retry", type=int, default=4, help="Number of times to retry failed downloads" +) +@CommandArgument( + "--bootstrap", + action="store_true", + help="Whether this is being called from bootstrap. " + "This verifies the toolchain is annotated as a toolchain used for local development.", +) +@CommandArgument( + "--artifact-manifest", + metavar="FILE", + help="Store a manifest about the downloaded taskcluster artifacts", +) +def artifact_toolchain( + command_context, + verbose=False, + cache_dir=None, + skip_cache=False, + from_build=(), + from_task=(), + tooltool_manifest=None, + no_unpack=False, + retry=0, + bootstrap=False, + artifact_manifest=None, +): + """Download, cache and install pre-built toolchains.""" + import time + + import redo + import requests + from taskgraph.util.taskcluster import get_artifact_url + + from mozbuild.action.tooltool import FileRecord, open_manifest, unpack_file + from mozbuild.artifacts import ArtifactCache + + start = time.monotonic() + command_context._set_log_level(verbose) + # Normally, we'd use command_context.log_manager.enable_unstructured(), + # but that enables all logging, while we only really want tooltool's + # and it also makes structured log output twice. + # So we manually do what it does, and limit that to the tooltool + # logger. + if command_context.log_manager.terminal_handler: + logging.getLogger("mozbuild.action.tooltool").addHandler( + command_context.log_manager.terminal_handler + ) + logging.getLogger("redo").addHandler( + command_context.log_manager.terminal_handler + ) + command_context.log_manager.terminal_handler.addFilter( + command_context.log_manager.structured_filter + ) + if not cache_dir: + cache_dir = os.path.join(command_context._mach_context.state_dir, "toolchains") + + tooltool_host = os.environ.get("TOOLTOOL_HOST", "tooltool.mozilla-releng.net") + taskcluster_proxy_url = os.environ.get("TASKCLUSTER_PROXY_URL") + if taskcluster_proxy_url: + tooltool_url = "{}/{}".format(taskcluster_proxy_url, tooltool_host) + else: + tooltool_url = "https://{}".format(tooltool_host) + + cache = ArtifactCache( + cache_dir=cache_dir, log=command_context.log, skip_cache=skip_cache + ) + + class DownloadRecord(FileRecord): + def __init__(self, url, *args, **kwargs): + super(DownloadRecord, self).__init__(*args, **kwargs) + self.url = url + self.basename = self.filename + + def fetch_with(self, cache): + self.filename = cache.fetch(self.url) + return self.filename + + def validate(self): + if self.size is None and self.digest is None: + return True + return super(DownloadRecord, self).validate() + + class ArtifactRecord(DownloadRecord): + def __init__(self, task_id, artifact_name): + for _ in redo.retrier(attempts=retry + 1, sleeptime=60): + cot = cache._download_manager.session.get( + get_artifact_url(task_id, "public/chain-of-trust.json") + ) + if cot.status_code >= 500: + continue + cot.raise_for_status() + break + else: + cot.raise_for_status() + + digest = algorithm = None + data = json.loads(cot.text) + for algorithm, digest in ( + data.get("artifacts", {}).get(artifact_name, {}).items() + ): + pass + + name = os.path.basename(artifact_name) + artifact_url = get_artifact_url( + task_id, + artifact_name, + use_proxy=not artifact_name.startswith("public/"), + ) + super(ArtifactRecord, self).__init__( + artifact_url, name, None, digest, algorithm, unpack=True + ) + + records = OrderedDict() + downloaded = [] + + if tooltool_manifest: + manifest = open_manifest(tooltool_manifest) + for record in manifest.file_records: + url = "{}/{}/{}".format(tooltool_url, record.algorithm, record.digest) + records[record.filename] = DownloadRecord( + url, + record.filename, + record.size, + record.digest, + record.algorithm, + unpack=record.unpack, + version=record.version, + visibility=record.visibility, + ) + + if from_build: + if "MOZ_AUTOMATION" in os.environ: + command_context.log( + logging.ERROR, + "artifact", + {}, + "Do not use --from-build in automation; all dependencies " + "should be determined in the decision task.", + ) + return 1 + from gecko_taskgraph.optimize.strategies import IndexSearch + + from mozbuild.toolchains import toolchain_task_definitions + + tasks = toolchain_task_definitions() + + for b in from_build: + user_value = b + + if not b.startswith("toolchain-"): + b = "toolchain-{}".format(b) + + task = tasks.get(b) + if not task: + command_context.log( + logging.ERROR, + "artifact", + {"build": user_value}, + "Could not find a toolchain build named `{build}`", + ) + return 1 + + # Ensure that toolchains installed by `mach bootstrap` have the + # `local-toolchain attribute set. Taskgraph ensures that these + # are built on trunk projects, so the task will be available to + # install here. + if bootstrap and not task.attributes.get("local-toolchain"): + command_context.log( + logging.ERROR, + "artifact", + {"build": user_value}, + "Toolchain `{build}` is not annotated as used for local development.", + ) + return 1 + + artifact_name = task.attributes.get("toolchain-artifact") + command_context.log( + logging.DEBUG, + "artifact", + { + "name": artifact_name, + "index": task.optimization.get("index-search"), + }, + "Searching for {name} in {index}", + ) + deadline = None + task_id = IndexSearch().should_replace_task( + task, {}, deadline, task.optimization.get("index-search", []) + ) + if task_id in (True, False) or not artifact_name: + command_context.log( + logging.ERROR, + "artifact", + {"build": user_value}, + _COULD_NOT_FIND_ARTIFACTS_TEMPLATE, + ) + # Get and print some helpful info for diagnosis. + repo = mozversioncontrol.get_repository_object( + command_context.topsrcdir + ) + if not isinstance(repo, mozversioncontrol.SrcRepository): + changed_files = set(repo.get_outgoing_files()) | set( + repo.get_changed_files() + ) + if changed_files: + command_context.log( + logging.ERROR, + "artifact", + {}, + "Hint: consider reverting your local changes " + "to the following files: %s" % sorted(changed_files), + ) + if "TASKCLUSTER_ROOT_URL" in os.environ: + command_context.log( + logging.ERROR, + "artifact", + {"build": user_value}, + "Due to the environment variable TASKCLUSTER_ROOT_URL " + "being set, the artifacts were expected to be found " + "on {}. If this was unintended, unset " + "TASKCLUSTER_ROOT_URL and try again.".format( + os.environ["TASKCLUSTER_ROOT_URL"] + ), + ) + return 1 + + command_context.log( + logging.DEBUG, + "artifact", + {"name": artifact_name, "task_id": task_id}, + "Found {name} in {task_id}", + ) + + record = ArtifactRecord(task_id, artifact_name) + records[record.filename] = record + + # Handle the list of files of the form task_id:path from --from-task. + for f in from_task or (): + task_id, colon, name = f.partition(":") + if not colon: + command_context.log( + logging.ERROR, + "artifact", + {}, + "Expected an argument of the form task_id:path", + ) + return 1 + record = ArtifactRecord(task_id, name) + records[record.filename] = record + + for record in six.itervalues(records): + command_context.log( + logging.INFO, + "artifact", + {"name": record.basename}, + "Setting up artifact {name}", + ) + valid = False + # sleeptime is 60 per retry.py, used by tooltool_wrapper.sh + for attempt, _ in enumerate(redo.retrier(attempts=retry + 1, sleeptime=60)): + try: + record.fetch_with(cache) + except ( + requests.exceptions.HTTPError, + requests.exceptions.ChunkedEncodingError, + requests.exceptions.ConnectionError, + ) as e: + + if isinstance(e, requests.exceptions.HTTPError): + # The relengapi proxy likes to return error 400 bad request + # which seems improbably to be due to our (simple) GET + # being borked. + status = e.response.status_code + should_retry = status >= 500 or status == 400 + else: + should_retry = True + + if should_retry or attempt < retry: + level = logging.WARN + else: + level = logging.ERROR + command_context.log(level, "artifact", {}, str(e)) + if not should_retry: + break + if attempt < retry: + command_context.log( + logging.INFO, "artifact", {}, "Will retry in a moment..." + ) + continue + try: + valid = record.validate() + except Exception: + pass + if not valid: + os.unlink(record.filename) + if attempt < retry: + command_context.log( + logging.INFO, + "artifact", + {}, + "Corrupt download. Will retry in a moment...", + ) + continue + + downloaded.append(record) + break + + if not valid: + command_context.log( + logging.ERROR, + "artifact", + {"name": record.basename}, + "Failed to download {name}", + ) + return 1 + + artifacts = {} if artifact_manifest else None + + for record in downloaded: + local = os.path.join(os.getcwd(), record.basename) + if os.path.exists(local): + os.unlink(local) + # unpack_file needs the file with its final name to work + # (https://github.com/mozilla/build-tooltool/issues/38), so we + # need to copy it, even though we remove it later. Use hard links + # when possible. + try: + os.link(record.filename, local) + except Exception: + shutil.copy(record.filename, local) + # Keep a sha256 of each downloaded file, for the chain-of-trust + # validation. + if artifact_manifest is not None: + with open(local, "rb") as fh: + h = hashlib.sha256() + while True: + data = fh.read(1024 * 1024) + if not data: + break + h.update(data) + artifacts[record.url] = {"sha256": h.hexdigest()} + if record.unpack and not no_unpack: + unpack_file(local) + os.unlink(local) + + if not downloaded: + command_context.log(logging.ERROR, "artifact", {}, "Nothing to download") + if from_task: + return 1 + + if artifacts: + ensureParentDir(artifact_manifest) + with open(artifact_manifest, "w") as fh: + json.dump(artifacts, fh, indent=4, sort_keys=True) + + if "MOZ_AUTOMATION" in os.environ: + end = time.monotonic() + + perfherder_data = { + "framework": {"name": "build_metrics"}, + "suites": [ + { + "name": "mach_artifact_toolchain", + "value": end - start, + "lowerIsBetter": True, + "shouldAlert": False, + "subtests": [], + } + ], + } + command_context.log( + logging.INFO, + "perfherder", + {"data": json.dumps(perfherder_data)}, + "PERFHERDER_DATA: {data}", + ) + + return 0 diff --git a/python/mozbuild/mozbuild/artifacts.py b/python/mozbuild/mozbuild/artifacts.py new file mode 100644 index 0000000000..1083c0c997 --- /dev/null +++ b/python/mozbuild/mozbuild/artifacts.py @@ -0,0 +1,1661 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +Fetch build artifacts from a Firefox tree. + +This provides an (at-the-moment special purpose) interface to download Android +artifacts from Mozilla's Task Cluster. + +This module performs the following steps: + +* find a candidate hg parent revision. At one time we used the local pushlog, + which required the mozext hg extension. This isn't feasible with git, and it + is only mildly less efficient to not use the pushlog, so we don't use it even + when querying hg. + +* map the candidate parent to candidate Task Cluster tasks and artifact + locations. Pushlog entries might not correspond to tasks (yet), and those + tasks might not produce the desired class of artifacts. + +* fetch fresh Task Cluster artifacts and purge old artifacts, using a simple + Least Recently Used cache. + +* post-process fresh artifacts, to speed future installation. In particular, + extract relevant files from Mac OS X DMG files into a friendly archive format + so we don't have to mount DMG files frequently. + +This module requires certain modules be importable from the ambient Python +environment. ``mach artifact`` ensures these modules are available, but other +consumers will need to arrange this themselves. +""" + + +import collections +import functools +import glob +import logging +import operator +import os +import pickle +import re +import shutil +import stat +import subprocess +import tarfile +import tempfile +import zipfile +from contextlib import contextmanager +from io import BufferedReader +from urllib.parse import urlparse + +import buildconfig +import mozinstall +import mozpack.path as mozpath +import pylru +import requests +import six +from mach.util import UserError +from mozpack import executables +from mozpack.files import JarFinder, TarFinder +from mozpack.mozjar import JarReader, JarWriter +from mozpack.packager.unpack import UnpackFinder +from taskgraph.util.taskcluster import find_task_id, get_artifact_url, list_artifacts + +from mozbuild.artifact_builds import JOB_CHOICES +from mozbuild.artifact_cache import ArtifactCache +from mozbuild.util import FileAvoidWrite, ensureParentDir, mkdir + +# Number of candidate pushheads to cache per parent changeset. +NUM_PUSHHEADS_TO_QUERY_PER_PARENT = 50 + +# Number of parent changesets to consider as possible pushheads. +# There isn't really such a thing as a reasonable default here, because we don't +# know how many pushheads we'll need to look at to find a build with our artifacts, +# and we don't know how many changesets will be in each push. For now we assume +# we'll find a build in the last 50 pushes, assuming each push contains 10 changesets. +NUM_REVISIONS_TO_QUERY = 500 + +MAX_CACHED_TASKS = 400 # Number of pushheads to cache Task Cluster task data for. + +# Downloaded artifacts are cached, and a subset of their contents extracted for +# easy installation. This is most noticeable on Mac OS X: since mounting and +# copying from DMG files is very slow, we extract the desired binaries to a +# separate archive for fast re-installation. +PROCESSED_SUFFIX = ".processed.jar" + + +class ArtifactJob(object): + trust_domain = "gecko" + default_candidate_trees = [ + "releases/mozilla-release", + ] + nightly_candidate_trees = [ + "mozilla-central", + "integration/autoland", + ] + beta_candidate_trees = [ + "releases/mozilla-beta", + ] + # The list below list should be updated when we have new ESRs. + esr_candidate_trees = [ + "releases/mozilla-esr102", + "releases/mozilla-esr115", + ] + try_tree = "try" + + # These are a subset of TEST_HARNESS_BINS in testing/mochitest/Makefile.in. + # Each item is a pair of (pattern, (src_prefix, dest_prefix), where src_prefix + # is the prefix of the pattern relevant to its location in the archive, and + # dest_prefix is the prefix to be added that will yield the final path relative + # to dist/. + test_artifact_patterns = { + ("bin/BadCertAndPinningServer", ("bin", "bin")), + ("bin/DelegatedCredentialsServer", ("bin", "bin")), + ("bin/EncryptedClientHelloServer", ("bin", "bin")), + ("bin/FaultyServer", ("bin", "bin")), + ("bin/GenerateOCSPResponse", ("bin", "bin")), + ("bin/OCSPStaplingServer", ("bin", "bin")), + ("bin/SanctionsTestServer", ("bin", "bin")), + ("bin/certutil", ("bin", "bin")), + ("bin/geckodriver", ("bin", "bin")), + ("bin/pk12util", ("bin", "bin")), + ("bin/screentopng", ("bin", "bin")), + ("bin/ssltunnel", ("bin", "bin")), + ("bin/xpcshell", ("bin", "bin")), + ("bin/http3server", ("bin", "bin")), + ("bin/plugins/gmp-*/*/*", ("bin/plugins", "bin")), + ("bin/plugins/*", ("bin/plugins", "plugins")), + } + + # We can tell our input is a test archive by this suffix, which happens to + # be the same across platforms. + _test_zip_archive_suffix = ".common.tests.zip" + _test_tar_archive_suffix = ".common.tests.tar.gz" + + # A map of extra archives to fetch and unpack. An extra archive might + # include optional build output to incorporate into the local artifact + # build. Test archives and crashreporter symbols could be extra archives + # but they require special handling; this mechanism is generic and intended + # only for the simplest cases. + # + # Each suffix key matches a candidate archive (i.e., an artifact produced by + # an upstream build). Each value is itself a dictionary that must contain + # the following keys: + # + # - `description`: a purely informational string description. + # - `src_prefix`: entry names in the archive with leading `src_prefix` will + # have the prefix stripped. + # - `dest_prefix`: entry names in the archive will have `dest_prefix` + # prepended. + # + # The entries in the archive, suitably renamed, will be extracted into `dist`. + _extra_archives = { + ".xpt_artifacts.zip": { + "description": "XPT Artifacts", + "src_prefix": "", + "dest_prefix": "xpt_artifacts", + }, + } + _extra_archive_suffixes = tuple(sorted(_extra_archives.keys())) + + def __init__( + self, + log=None, + download_tests=True, + download_symbols=False, + download_maven_zip=False, + substs=None, + mozbuild=None, + ): + self._package_re = re.compile(self.package_re) + self._tests_re = None + if download_tests: + self._tests_re = re.compile( + r"public/build/(en-US/)?target\.common\.tests\.(zip|tar\.gz)$" + ) + self._maven_zip_re = None + if download_maven_zip: + self._maven_zip_re = re.compile(r"public/build/target\.maven\.zip$") + self._log = log + self._substs = substs + self._symbols_archive_suffix = None + if download_symbols == "full": + self._symbols_archive_suffix = "crashreporter-symbols-full.tar.zst" + elif download_symbols: + self._symbols_archive_suffix = "crashreporter-symbols.zip" + self._mozbuild = mozbuild + self._candidate_trees = None + + def log(self, *args, **kwargs): + if self._log: + self._log(*args, **kwargs) + + def find_candidate_artifacts(self, artifacts): + # TODO: Handle multiple artifacts, taking the latest one. + tests_artifact = None + maven_zip_artifact = None + for artifact in artifacts: + name = artifact["name"] + if self._maven_zip_re: + if self._maven_zip_re.match(name): + maven_zip_artifact = name + yield name + else: + continue + elif self._package_re and self._package_re.match(name): + yield name + elif self._tests_re and self._tests_re.match(name): + tests_artifact = name + yield name + elif self._symbols_archive_suffix and name.endswith( + self._symbols_archive_suffix + ): + yield name + elif name.endswith(ArtifactJob._extra_archive_suffixes): + yield name + else: + self.log( + logging.DEBUG, + "artifact", + {"name": name}, + "Not yielding artifact named {name} as a candidate artifact", + ) + if self._tests_re and not tests_artifact: + raise ValueError( + 'Expected tests archive matching "{re}", but ' + "found none!".format(re=self._tests_re) + ) + if self._maven_zip_re and not maven_zip_artifact: + raise ValueError( + 'Expected Maven zip archive matching "{re}", but ' + "found none!".format(re=self._maven_zip_re) + ) + + @contextmanager + def get_writer(self, **kwargs): + with JarWriter(**kwargs) as writer: + yield writer + + def process_artifact(self, filename, processed_filename): + if filename.endswith(ArtifactJob._test_zip_archive_suffix) and self._tests_re: + return self.process_tests_zip_artifact(filename, processed_filename) + if filename.endswith(ArtifactJob._test_tar_archive_suffix) and self._tests_re: + return self.process_tests_tar_artifact(filename, processed_filename) + if self._symbols_archive_suffix and filename.endswith( + self._symbols_archive_suffix + ): + return self.process_symbols_archive(filename, processed_filename) + if filename.endswith(ArtifactJob._extra_archive_suffixes): + return self.process_extra_archive(filename, processed_filename) + return self.process_package_artifact(filename, processed_filename) + + def process_package_artifact(self, filename, processed_filename): + raise NotImplementedError( + "Subclasses must specialize process_package_artifact!" + ) + + def process_tests_zip_artifact(self, filename, processed_filename): + from mozbuild.action.test_archive import OBJDIR_TEST_FILES + + added_entry = False + + with self.get_writer(file=processed_filename, compress_level=5) as writer: + reader = JarReader(filename) + for filename, entry in six.iteritems(reader.entries): + for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns: + if not mozpath.match(filename, pattern): + continue + destpath = mozpath.relpath(filename, src_prefix) + destpath = mozpath.join(dest_prefix, destpath) + self.log( + logging.DEBUG, + "artifact", + {"destpath": destpath}, + "Adding {destpath} to processed archive", + ) + mode = entry["external_attr"] >> 16 + writer.add(destpath.encode("utf-8"), reader[filename], mode=mode) + added_entry = True + break + + if filename.endswith(".ini"): + # The artifact build writes test .ini files into the object + # directory; they don't come from the upstream test archive. + self.log( + logging.DEBUG, + "artifact", + {"filename": filename}, + "Skipping test INI file {filename}", + ) + continue + + for files_entry in OBJDIR_TEST_FILES.values(): + origin_pattern = files_entry["pattern"] + leaf_filename = filename + if "dest" in files_entry: + dest = files_entry["dest"] + origin_pattern = mozpath.join(dest, origin_pattern) + leaf_filename = filename[len(dest) + 1 :] + if mozpath.match(filename, origin_pattern): + destpath = mozpath.join( + "..", files_entry["base"], leaf_filename + ) + mode = entry["external_attr"] >> 16 + writer.add( + destpath.encode("utf-8"), reader[filename], mode=mode + ) + + if not added_entry: + raise ValueError( + 'Archive format changed! No pattern from "{patterns}"' + "matched an archive path.".format( + patterns=LinuxArtifactJob.test_artifact_patterns + ) + ) + + def process_tests_tar_artifact(self, filename, processed_filename): + from mozbuild.action.test_archive import OBJDIR_TEST_FILES + + added_entry = False + + with self.get_writer(file=processed_filename, compress_level=5) as writer: + with tarfile.open(filename) as reader: + for filename, entry in TarFinder(filename, reader): + for ( + pattern, + (src_prefix, dest_prefix), + ) in self.test_artifact_patterns: + if not mozpath.match(filename, pattern): + continue + + destpath = mozpath.relpath(filename, src_prefix) + destpath = mozpath.join(dest_prefix, destpath) + self.log( + logging.DEBUG, + "artifact", + {"destpath": destpath}, + "Adding {destpath} to processed archive", + ) + mode = entry.mode + writer.add(destpath.encode("utf-8"), entry.open(), mode=mode) + added_entry = True + break + + if filename.endswith(".ini"): + # The artifact build writes test .ini files into the object + # directory; they don't come from the upstream test archive. + self.log( + logging.DEBUG, + "artifact", + {"filename": filename}, + "Skipping test INI file {filename}", + ) + continue + + for files_entry in OBJDIR_TEST_FILES.values(): + origin_pattern = files_entry["pattern"] + leaf_filename = filename + if "dest" in files_entry: + dest = files_entry["dest"] + origin_pattern = mozpath.join(dest, origin_pattern) + leaf_filename = filename[len(dest) + 1 :] + if mozpath.match(filename, origin_pattern): + destpath = mozpath.join( + "..", files_entry["base"], leaf_filename + ) + mode = entry.mode + writer.add( + destpath.encode("utf-8"), entry.open(), mode=mode + ) + + if not added_entry: + raise ValueError( + 'Archive format changed! No pattern from "{patterns}"' + "matched an archive path.".format( + patterns=LinuxArtifactJob.test_artifact_patterns + ) + ) + + def process_symbols_archive( + self, filename, processed_filename, skip_compressed=False + ): + with self.get_writer(file=processed_filename, compress_level=5) as writer: + for filename, entry in self.iter_artifact_archive(filename): + if skip_compressed and filename.endswith(".gz"): + self.log( + logging.DEBUG, + "artifact", + {"filename": filename}, + "Skipping compressed ELF debug symbol file {filename}", + ) + continue + destpath = mozpath.join("crashreporter-symbols", filename) + self.log( + logging.INFO, + "artifact", + {"destpath": destpath}, + "Adding {destpath} to processed archive", + ) + writer.add(destpath.encode("utf-8"), entry) + + def process_extra_archive(self, filename, processed_filename): + for suffix, extra_archive in ArtifactJob._extra_archives.items(): + if filename.endswith(suffix): + self.log( + logging.INFO, + "artifact", + {"filename": filename, "description": extra_archive["description"]}, + '"{filename}" is a recognized extra archive ({description})', + ) + break + else: + raise ValueError('"{}" is not a recognized extra archive!'.format(filename)) + + src_prefix = extra_archive["src_prefix"] + dest_prefix = extra_archive["dest_prefix"] + + with self.get_writer(file=processed_filename, compress_level=5) as writer: + for filename, entry in self.iter_artifact_archive(filename): + if not filename.startswith(src_prefix): + self.log( + logging.DEBUG, + "artifact", + {"filename": filename, "src_prefix": src_prefix}, + "Skipping extra archive item {filename} " + "that does not start with {src_prefix}", + ) + continue + destpath = mozpath.relpath(filename, src_prefix) + destpath = mozpath.join(dest_prefix, destpath) + self.log( + logging.INFO, + "artifact", + {"destpath": destpath}, + "Adding {destpath} to processed archive", + ) + writer.add(destpath.encode("utf-8"), entry) + + def iter_artifact_archive(self, filename): + if filename.endswith(".zip"): + reader = JarReader(filename) + for filename in reader.entries: + yield filename, reader[filename] + elif filename.endswith(".tar.zst") and self._mozbuild is not None: + self._mozbuild._ensure_zstd() + import zstandard + + ctx = zstandard.ZstdDecompressor() + uncompressed = ctx.stream_reader(open(filename, "rb")) + with tarfile.open( + mode="r|", fileobj=uncompressed, bufsize=1024 * 1024 + ) as reader: + while True: + info = reader.next() + if info is None: + break + yield info.name, reader.extractfile(info) + else: + raise RuntimeError("Unsupported archive type for %s" % filename) + + @property + def candidate_trees(self): + if not self._candidate_trees: + self._candidate_trees = self.select_candidate_trees() + return self._candidate_trees + + def select_candidate_trees(self): + source_repo = buildconfig.substs.get("MOZ_SOURCE_REPO", "") + version_display = buildconfig.substs.get("MOZ_APP_VERSION_DISPLAY") + + if "esr" in version_display or "esr" in source_repo: + return self.esr_candidate_trees + elif re.search("a\d+$", version_display): + return self.nightly_candidate_trees + elif re.search("b\d+$", version_display): + return self.beta_candidate_trees + + return self.default_candidate_trees + + +class AndroidArtifactJob(ArtifactJob): + package_re = r"public/build/geckoview_example\.apk$" + product = "mobile" + + package_artifact_patterns = {"**/*.so"} + + def process_package_artifact(self, filename, processed_filename): + # Extract all .so files into the root, which will get copied into dist/bin. + with self.get_writer(file=processed_filename, compress_level=5) as writer: + for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))): + if not any( + mozpath.match(p, pat) for pat in self.package_artifact_patterns + ): + continue + + dirname, basename = os.path.split(p) + self.log( + logging.DEBUG, + "artifact", + {"basename": basename}, + "Adding {basename} to processed archive", + ) + + basedir = "bin" + if not basename.endswith(".so"): + basedir = mozpath.join("bin", dirname.lstrip("assets/")) + basename = mozpath.join(basedir, basename) + writer.add(basename.encode("utf-8"), f.open()) + + def process_symbols_archive(self, filename, processed_filename): + ArtifactJob.process_symbols_archive( + self, filename, processed_filename, skip_compressed=True + ) + + if not self._symbols_archive_suffix.startswith("crashreporter-symbols-full."): + return + + import gzip + + with self.get_writer(file=processed_filename, compress_level=5) as writer: + for filename, entry in self.iter_artifact_archive(filename): + if not filename.endswith(".gz"): + continue + + # Uncompress "libxul.so/D3271457813E976AE7BF5DAFBABABBFD0/libxul.so.dbg.gz" + # into "libxul.so.dbg". + # + # After running `settings append target.debug-file-search-paths $file`, + # where file=/path/to/topobjdir/dist/crashreporter-symbols, + # Android Studio's lldb (7.0.0, at least) will find the ELF debug symbol files. + # + # There are other paths that will work but none seem more desireable. See + # https://github.com/llvm-mirror/lldb/blob/882670690ca69d9dd96b7236c620987b11894af9/source/Host/common/Symbols.cpp#L324. + basename = os.path.basename(filename).replace(".gz", "") + destpath = mozpath.join("crashreporter-symbols", basename) + self.log( + logging.DEBUG, + "artifact", + {"destpath": destpath}, + "Adding uncompressed ELF debug symbol file " + "{destpath} to processed archive", + ) + writer.add(destpath.encode("utf-8"), gzip.GzipFile(fileobj=entry)) + + +class LinuxArtifactJob(ArtifactJob): + package_re = r"public/build/target\.tar\.bz2$" + product = "firefox" + + _package_artifact_patterns = { + "{product}/crashreporter", + "{product}/dependentlibs.list", + "{product}/{product}", + "{product}/{product}-bin", + "{product}/minidump-analyzer", + "{product}/pingsender", + "{product}/plugin-container", + "{product}/updater", + "{product}/glxtest", + "{product}/vaapitest", + "{product}/**/*.so", + # Preserve signatures when present. + "{product}/**/*.sig", + } + + @property + def package_artifact_patterns(self): + return {p.format(product=self.product) for p in self._package_artifact_patterns} + + def process_package_artifact(self, filename, processed_filename): + added_entry = False + + with self.get_writer(file=processed_filename, compress_level=5) as writer: + with tarfile.open(filename) as reader: + for p, f in UnpackFinder(TarFinder(filename, reader)): + if not any( + mozpath.match(p, pat) for pat in self.package_artifact_patterns + ): + continue + + # We strip off the relative "firefox/" bit from the path, + # but otherwise preserve it. + destpath = mozpath.join("bin", mozpath.relpath(p, self.product)) + self.log( + logging.DEBUG, + "artifact", + {"destpath": destpath}, + "Adding {destpath} to processed archive", + ) + writer.add(destpath.encode("utf-8"), f.open(), mode=f.mode) + added_entry = True + + if not added_entry: + raise ValueError( + 'Archive format changed! No pattern from "{patterns}" ' + "matched an archive path.".format( + patterns=LinuxArtifactJob.package_artifact_patterns + ) + ) + + +class ResignJarWriter(JarWriter): + def __init__(self, job, **kwargs): + super().__init__(**kwargs) + self._job = job + + def add(self, name, data, mode=None): + if self._job._substs["HOST_OS_ARCH"] == "Darwin": + # Wrap in a BufferedReader so that executable.get_type can peek at the + # data signature without subsequent read() being affected. + data = BufferedReader(data) + if executables.get_type(data) == executables.MACHO: + # If the file is a Mach-O binary, we run `codesign -s - -f` against + # it to force a local codesign against the original binary, which is + # likely unsigned. As of writing, only arm64 macs require codesigned + # binaries, but it doesn't hurt to do it on intel macs as well + # preemptively, because they could end up with the same requirement + # in future versions of macOS. + tmp = tempfile.NamedTemporaryFile(delete=False) + try: + shutil.copyfileobj(data, tmp) + tmp.close() + self._job.log( + logging.DEBUG, + "artifact", + {"path": name.decode("utf-8")}, + "Re-signing {path}", + ) + subprocess.check_call( + ["codesign", "-s", "-", "-f", tmp.name], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + data = open(tmp.name, "rb") + finally: + os.unlink(tmp.name) + super().add(name, data, mode=mode) + + +class MacArtifactJob(ArtifactJob): + package_re = r"public/build/target\.dmg$" + product = "firefox" + + # These get copied into dist/bin without the path, so "root/a/b/c" -> "dist/bin/c". + _paths_no_keep_path = ( + "Contents/MacOS", + [ + "crashreporter.app/Contents/MacOS/crashreporter", + "{product}", + "{product}-bin", + "*.dylib", + "minidump-analyzer", + "pingsender", + "plugin-container.app/Contents/MacOS/plugin-container", + "updater.app/Contents/MacOS/org.mozilla.updater", + # 'xpcshell', + "XUL", + ], + ) + + @property + def paths_no_keep_path(self): + root, paths = self._paths_no_keep_path + return (root, [p.format(product=self.product) for p in paths]) + + @contextmanager + def get_writer(self, **kwargs): + with ResignJarWriter(self, **kwargs) as writer: + yield writer + + def process_package_artifact(self, filename, processed_filename): + tempdir = tempfile.mkdtemp() + oldcwd = os.getcwd() + try: + self.log( + logging.DEBUG, + "artifact", + {"tempdir": tempdir}, + "Unpacking DMG into {tempdir}", + ) + if self._substs["HOST_OS_ARCH"] == "Linux": + # This is a cross build, use hfsplus and dmg tools to extract the dmg. + os.chdir(tempdir) + with open(os.devnull, "wb") as devnull: + subprocess.check_call( + [ + self._substs["DMG_TOOL"], + "extract", + filename, + "extracted_img", + ], + stdout=devnull, + ) + subprocess.check_call( + [self._substs["HFS_TOOL"], "extracted_img", "extractall"], + stdout=devnull, + ) + else: + mozinstall.install(filename, tempdir) + + bundle_dirs = glob.glob(mozpath.join(tempdir, "*.app")) + if len(bundle_dirs) != 1: + raise ValueError( + "Expected one source bundle, found: {}".format(bundle_dirs) + ) + [source] = bundle_dirs + + # These get copied into dist/bin with the path, so "root/a/b/c" -> "dist/bin/a/b/c". + paths_keep_path = [ + ( + "Contents/Resources", + [ + "browser/components/libbrowsercomps.dylib", + "dependentlibs.list", + # 'firefox', + "gmp-clearkey/0.1/libclearkey.dylib", + # 'gmp-fake/1.0/libfake.dylib', + # 'gmp-fakeopenh264/1.0/libfakeopenh264.dylib', + ], + ) + ] + + with self.get_writer(file=processed_filename, compress_level=5) as writer: + root, paths = self.paths_no_keep_path + finder = UnpackFinder(mozpath.join(source, root)) + for path in paths: + for p, f in finder.find(path): + self.log( + logging.DEBUG, + "artifact", + {"path": p}, + "Adding {path} to processed archive", + ) + destpath = mozpath.join("bin", os.path.basename(p)) + writer.add(destpath.encode("utf-8"), f.open(), mode=f.mode) + + for root, paths in paths_keep_path: + finder = UnpackFinder(mozpath.join(source, root)) + for path in paths: + for p, f in finder.find(path): + self.log( + logging.DEBUG, + "artifact", + {"path": p}, + "Adding {path} to processed archive", + ) + destpath = mozpath.join("bin", p) + writer.add(destpath.encode("utf-8"), f.open(), mode=f.mode) + + finally: + os.chdir(oldcwd) + try: + shutil.rmtree(tempdir) + except (OSError, IOError): + self.log( + logging.WARN, + "artifact", + {"tempdir": tempdir}, + "Unable to delete {tempdir}", + ) + pass + + +class WinArtifactJob(ArtifactJob): + package_re = r"public/build/target\.(zip|tar\.gz)$" + product = "firefox" + + _package_artifact_patterns = { + "{product}/dependentlibs.list", + "{product}/**/*.dll", + "{product}/*.exe", + "{product}/*.tlb", + } + + @property + def package_artifact_patterns(self): + return {p.format(product=self.product) for p in self._package_artifact_patterns} + + # These are a subset of TEST_HARNESS_BINS in testing/mochitest/Makefile.in. + test_artifact_patterns = { + ("bin/BadCertAndPinningServer.exe", ("bin", "bin")), + ("bin/DelegatedCredentialsServer.exe", ("bin", "bin")), + ("bin/EncryptedClientHelloServer.exe", ("bin", "bin")), + ("bin/FaultyServer.exe", ("bin", "bin")), + ("bin/GenerateOCSPResponse.exe", ("bin", "bin")), + ("bin/OCSPStaplingServer.exe", ("bin", "bin")), + ("bin/SanctionsTestServer.exe", ("bin", "bin")), + ("bin/certutil.exe", ("bin", "bin")), + ("bin/geckodriver.exe", ("bin", "bin")), + ("bin/minidumpwriter.exe", ("bin", "bin")), + ("bin/pk12util.exe", ("bin", "bin")), + ("bin/screenshot.exe", ("bin", "bin")), + ("bin/ssltunnel.exe", ("bin", "bin")), + ("bin/xpcshell.exe", ("bin", "bin")), + ("bin/http3server.exe", ("bin", "bin")), + ("bin/plugins/gmp-*/*/*", ("bin/plugins", "bin")), + ("bin/plugins/*", ("bin/plugins", "plugins")), + ("bin/components/*", ("bin/components", "bin/components")), + } + + def process_package_artifact(self, filename, processed_filename): + added_entry = False + with self.get_writer(file=processed_filename, compress_level=5) as writer: + for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))): + if not any( + mozpath.match(p, pat) for pat in self.package_artifact_patterns + ): + continue + + # strip off the relative "firefox/" bit from the path: + basename = mozpath.relpath(p, self.product) + basename = mozpath.join("bin", basename) + self.log( + logging.DEBUG, + "artifact", + {"basename": basename}, + "Adding {basename} to processed archive", + ) + writer.add(basename.encode("utf-8"), f.open(), mode=f.mode) + added_entry = True + + if not added_entry: + raise ValueError( + 'Archive format changed! No pattern from "{patterns}"' + "matched an archive path.".format(patterns=self.artifact_patterns) + ) + + +class ThunderbirdMixin(object): + trust_domain = "comm" + product = "thunderbird" + try_tree = "try-comm-central" + + nightly_candidate_trees = [ + "comm-central", + ] + beta_candidate_trees = [ + "releases/comm-beta", + ] + # The list below list should be updated when we have new ESRs. + esr_candidate_trees = [ + "releases/comm-esr102", + "releases/comm-esr115", + ] + + +class LinuxThunderbirdArtifactJob(ThunderbirdMixin, LinuxArtifactJob): + pass + + +class MacThunderbirdArtifactJob(ThunderbirdMixin, MacArtifactJob): + pass + + +class WinThunderbirdArtifactJob(ThunderbirdMixin, WinArtifactJob): + pass + + +def startswithwhich(s, prefixes): + for prefix in prefixes: + if s.startswith(prefix): + return prefix + + +MOZ_JOB_DETAILS = { + j: { + "android": AndroidArtifactJob, + "linux": LinuxArtifactJob, + "macosx": MacArtifactJob, + "win": WinArtifactJob, + }[startswithwhich(j, ("android", "linux", "macosx", "win"))] + for j in JOB_CHOICES +} +COMM_JOB_DETAILS = { + j: { + "android": None, + "linux": LinuxThunderbirdArtifactJob, + "macosx": MacThunderbirdArtifactJob, + "win": WinThunderbirdArtifactJob, + }[startswithwhich(j, ("android", "linux", "macosx", "win"))] + for j in JOB_CHOICES +} + + +def cachedmethod(cachefunc): + """Decorator to wrap a class or instance method with a memoizing callable that + saves results in a (possibly shared) cache. + """ + + def decorator(method): + def wrapper(self, *args, **kwargs): + mapping = cachefunc(self) + if mapping is None: + return method(self, *args, **kwargs) + key = (method.__name__, args, tuple(sorted(kwargs.items()))) + try: + value = mapping[key] + return value + except KeyError: + pass + result = method(self, *args, **kwargs) + mapping[key] = result + return result + + return functools.update_wrapper(wrapper, method) + + return decorator + + +class CacheManager(object): + """Maintain an LRU cache. Provide simple persistence, including support for + loading and saving the state using a "with" block. Allow clearing the cache + and printing the cache for debugging. + + Provide simple logging. + """ + + def __init__( + self, + cache_dir, + cache_name, + cache_size, + cache_callback=None, + log=None, + skip_cache=False, + ): + self._skip_cache = skip_cache + self._cache = pylru.lrucache(cache_size, callback=cache_callback) + self._cache_filename = mozpath.join(cache_dir, cache_name + "-cache.pickle") + self._log = log + mkdir(cache_dir, not_indexed=True) + + def log(self, *args, **kwargs): + if self._log: + self._log(*args, **kwargs) + + def load_cache(self): + if self._skip_cache: + self.log( + logging.INFO, "artifact", {}, "Skipping cache: ignoring load_cache!" + ) + return + + try: + items = pickle.load(open(self._cache_filename, "rb")) + for key, value in items: + self._cache[key] = value + except Exception as e: + # Corrupt cache, perhaps? Sadly, pickle raises many different + # exceptions, so it's not worth trying to be fine grained here. + # We ignore any exception, so the cache is effectively dropped. + self.log( + logging.INFO, + "artifact", + {"filename": self._cache_filename, "exception": repr(e)}, + "Ignoring exception unpickling cache file {filename}: {exception}", + ) + pass + + def dump_cache(self): + if self._skip_cache: + self.log( + logging.INFO, "artifact", {}, "Skipping cache: ignoring dump_cache!" + ) + return + + ensureParentDir(self._cache_filename) + pickle.dump( + list(reversed(list(self._cache.items()))), + open(self._cache_filename, "wb"), + -1, + ) + + def clear_cache(self): + if self._skip_cache: + self.log( + logging.INFO, "artifact", {}, "Skipping cache: ignoring clear_cache!" + ) + return + + with self: + self._cache.clear() + + def __enter__(self): + self.load_cache() + return self + + def __exit__(self, type, value, traceback): + self.dump_cache() + + +class PushheadCache(CacheManager): + """Helps map tree/revision pairs to parent pushheads according to the pushlog.""" + + def __init__(self, cache_dir, log=None, skip_cache=False): + CacheManager.__init__( + self, + cache_dir, + "pushhead_cache", + MAX_CACHED_TASKS, + log=log, + skip_cache=skip_cache, + ) + + @cachedmethod(operator.attrgetter("_cache")) + def parent_pushhead_id(self, tree, revision): + cset_url_tmpl = ( + "https://hg.mozilla.org/{tree}/json-pushes?" + "changeset={changeset}&version=2&tipsonly=1" + ) + req = requests.get( + cset_url_tmpl.format(tree=tree, changeset=revision), + headers={"Accept": "application/json"}, + ) + if req.status_code not in range(200, 300): + raise ValueError + result = req.json() + [found_pushid] = result["pushes"].keys() + return int(found_pushid) + + @cachedmethod(operator.attrgetter("_cache")) + def pushid_range(self, tree, start, end): + pushid_url_tmpl = ( + "https://hg.mozilla.org/{tree}/json-pushes?" + "startID={start}&endID={end}&version=2&tipsonly=1" + ) + + req = requests.get( + pushid_url_tmpl.format(tree=tree, start=start, end=end), + headers={"Accept": "application/json"}, + ) + result = req.json() + return [p["changesets"][-1] for p in result["pushes"].values()] + + +class TaskCache(CacheManager): + """Map candidate pushheads to Task Cluster task IDs and artifact URLs.""" + + def __init__(self, cache_dir, log=None, skip_cache=False): + CacheManager.__init__( + self, + cache_dir, + "artifact_url", + MAX_CACHED_TASKS, + log=log, + skip_cache=skip_cache, + ) + + @cachedmethod(operator.attrgetter("_cache")) + def artifacts(self, tree, job, artifact_job_class, rev): + # Grab the second part of the repo name, which is generally how things + # are indexed. Eg: 'integration/autoland' is indexed as + # 'autoland' + tree = tree.split("/")[1] if "/" in tree else tree + + if job.endswith("-opt"): + tree += ".shippable" + + namespace = "{trust_domain}.v2.{tree}.revision.{rev}.{product}.{job}".format( + trust_domain=artifact_job_class.trust_domain, + rev=rev, + tree=tree, + product=artifact_job_class.product, + job=job, + ) + self.log( + logging.DEBUG, + "artifact", + {"namespace": namespace}, + "Searching Taskcluster index with namespace: {namespace}", + ) + try: + taskId = find_task_id(namespace) + except KeyError: + # Not all revisions correspond to pushes that produce the job we + # care about; and even those that do may not have completed yet. + raise ValueError( + "Task for {namespace} does not exist (yet)!".format(namespace=namespace) + ) + + return taskId, list_artifacts(taskId) + + +class Artifacts(object): + """Maintain state to efficiently fetch build artifacts from a Firefox tree.""" + + def __init__( + self, + tree, + substs, + defines, + job=None, + log=None, + cache_dir=".", + hg=None, + git=None, + skip_cache=False, + topsrcdir=None, + download_tests=True, + download_symbols=False, + download_maven_zip=False, + no_process=False, + mozbuild=None, + ): + if (hg and git) or (not hg and not git): + raise ValueError("Must provide path to exactly one of hg and git") + + self._substs = substs + self._defines = defines + self._tree = tree + self._job = job or self._guess_artifact_job() + self._log = log + self._hg = hg + self._git = git + self._cache_dir = cache_dir + self._skip_cache = skip_cache + self._topsrcdir = topsrcdir + self._no_process = no_process + + app = self._substs.get("MOZ_BUILD_APP") + job_details = COMM_JOB_DETAILS if app == "comm/mail" else MOZ_JOB_DETAILS + + try: + cls = job_details[self._job] + self._artifact_job = cls( + log=self._log, + download_tests=download_tests, + download_symbols=download_symbols, + download_maven_zip=download_maven_zip, + substs=self._substs, + mozbuild=mozbuild, + ) + except KeyError: + self.log(logging.INFO, "artifact", {"job": self._job}, "Unknown job {job}") + raise KeyError("Unknown job") + + self._task_cache = TaskCache( + self._cache_dir, log=self._log, skip_cache=self._skip_cache + ) + self._artifact_cache = ArtifactCache( + self._cache_dir, log=self._log, skip_cache=self._skip_cache + ) + self._pushhead_cache = PushheadCache( + self._cache_dir, log=self._log, skip_cache=self._skip_cache + ) + + def log(self, *args, **kwargs): + if self._log: + self._log(*args, **kwargs) + + def run_hg(self, *args, **kwargs): + env = kwargs.get("env", {}) + env["HGPLAIN"] = "1" + kwargs["universal_newlines"] = True + return subprocess.check_output([self._hg] + list(args), **kwargs) + + def _guess_artifact_job(self): + # Add the "-debug" suffix to the guessed artifact job name + # if MOZ_DEBUG is enabled. + if self._substs.get("MOZ_DEBUG"): + target_suffix = "-debug" + else: + target_suffix = "-opt" + + if self._substs.get("MOZ_BUILD_APP", "") == "mobile/android": + if self._substs["ANDROID_CPU_ARCH"] == "x86_64": + return "android-x86_64" + target_suffix + if self._substs["ANDROID_CPU_ARCH"] == "x86": + return "android-x86" + target_suffix + if self._substs["ANDROID_CPU_ARCH"] == "arm64-v8a": + return "android-aarch64" + target_suffix + return "android-arm" + target_suffix + + target_64bit = False + if self._substs["target_cpu"] == "x86_64": + target_64bit = True + + if self._defines.get("XP_LINUX", False): + return ("linux64" if target_64bit else "linux") + target_suffix + if self._defines.get("XP_WIN", False): + if self._substs["target_cpu"] == "aarch64": + return "win64-aarch64" + target_suffix + return ("win64" if target_64bit else "win32") + target_suffix + if self._defines.get("XP_MACOSX", False): + # We only produce unified builds in automation, so the target_cpu + # check is not relevant. + return "macosx64" + target_suffix + raise Exception("Cannot determine default job for |mach artifact|!") + + def _pushheads_from_rev(self, rev, count): + """Queries hg.mozilla.org's json-pushlog for pushheads that are nearby + ancestors or `rev`. Multiple trees are queried, as the `rev` may + already have been pushed to multiple repositories. For each repository + containing `rev`, the pushhead introducing `rev` and the previous + `count` pushheads from that point are included in the output. + """ + + with self._pushhead_cache as pushhead_cache: + found_pushids = {} + + search_trees = self._artifact_job.candidate_trees + for tree in search_trees: + self.log( + logging.DEBUG, + "artifact", + {"tree": tree, "rev": rev}, + "Attempting to find a pushhead containing {rev} on {tree}.", + ) + try: + pushid = pushhead_cache.parent_pushhead_id(tree, rev) + found_pushids[tree] = pushid + except ValueError: + continue + + candidate_pushheads = collections.defaultdict(list) + + for tree, pushid in six.iteritems(found_pushids): + end = pushid + start = pushid - NUM_PUSHHEADS_TO_QUERY_PER_PARENT + + self.log( + logging.DEBUG, + "artifact", + { + "tree": tree, + "pushid": pushid, + "num": NUM_PUSHHEADS_TO_QUERY_PER_PARENT, + }, + "Retrieving the last {num} pushheads starting with id {pushid} on {tree}", + ) + for pushhead in pushhead_cache.pushid_range(tree, start, end): + candidate_pushheads[pushhead].append(tree) + + return candidate_pushheads + + def _get_hg_revisions_from_git(self): + rev_list = subprocess.check_output( + [ + self._git, + "rev-list", + "--topo-order", + "--max-count={num}".format(num=NUM_REVISIONS_TO_QUERY), + "HEAD", + ], + universal_newlines=True, + cwd=self._topsrcdir, + ) + + hg_hash_list = subprocess.check_output( + [self._git, "cinnabar", "git2hg"] + rev_list.splitlines(), + universal_newlines=True, + cwd=self._topsrcdir, + ) + + zeroes = "0" * 40 + + hashes = [] + for hg_hash in hg_hash_list.splitlines(): + hg_hash = hg_hash.strip() + if not hg_hash or hg_hash == zeroes: + continue + hashes.append(hg_hash) + if not hashes: + msg = ( + "Could not list any recent revisions in your clone. Does " + "your clone have git-cinnabar metadata? If not, consider " + "re-cloning using the directions at " + "https://github.com/glandium/git-cinnabar/wiki/Mozilla:-A-" + "git-workflow-for-Gecko-development" + ) + try: + subprocess.check_output( + [ + self._git, + "cat-file", + "-e", + "05e5d33a570d48aed58b2d38f5dfc0a7870ff8d3^{commit}", + ], + stderr=subprocess.STDOUT, + ) + # If the above commit exists, we're probably in a clone of + # `gecko-dev`, and this documentation applies. + msg += ( + "\n\nNOTE: Consider following the directions " + "at https://github.com/glandium/git-cinnabar/wiki/" + "Mozilla:-Using-a-git-clone-of-gecko%E2%80%90dev-" + "to-push-to-mercurial to resolve this issue." + ) + except subprocess.CalledProcessError: + pass + raise UserError(msg) + return hashes + + def _get_recent_public_revisions(self): + """Returns recent ancestors of the working parent that are likely to + to be known to Mozilla automation. + + If we're using git, retrieves hg revisions from git-cinnabar. + """ + if self._git: + return self._get_hg_revisions_from_git() + + # Mercurial updated the ordering of "last" in 4.3. We use revision + # numbers to order here to accommodate multiple versions of hg. + last_revs = self.run_hg( + "log", + "--template", + "{rev}:{node}\n", + "-r", + "last(public() and ::., {num})".format(num=NUM_REVISIONS_TO_QUERY), + cwd=self._topsrcdir, + ).splitlines() + + if len(last_revs) == 0: + raise UserError( + """\ +There are no public revisions. +This can happen if the repository is created from bundle file and never pulled +from remote. Please run `hg pull` and build again. +https://firefox-source-docs.mozilla.org/contributing/vcs/mercurial_bundles.html +""" + ) + + self.log( + logging.DEBUG, + "artifact", + {"len": len(last_revs)}, + "hg suggested {len} candidate revisions", + ) + + def to_pair(line): + rev, node = line.split(":", 1) + return (int(rev), node) + + pairs = [to_pair(r) for r in last_revs] + + # Python's tuple sort orders by first component: here, the (local) + # revision number. + nodes = [pair[1] for pair in sorted(pairs, reverse=True)] + + for node in nodes[:20]: + self.log( + logging.DEBUG, + "artifact", + {"node": node}, + "hg suggested candidate revision: {node}", + ) + self.log( + logging.DEBUG, + "artifact", + {"remaining": max(0, len(nodes) - 20)}, + "hg suggested candidate revision: and {remaining} more", + ) + + return nodes + + def _find_pushheads(self): + """Returns an iterator of recent pushhead revisions, starting with the + working parent. + """ + + last_revs = self._get_recent_public_revisions() + candidate_pushheads = self._pushheads_from_rev( + last_revs[0].rstrip(), NUM_PUSHHEADS_TO_QUERY_PER_PARENT + ) + count = 0 + for rev in last_revs: + rev = rev.rstrip() + if not rev: + continue + if rev not in candidate_pushheads: + continue + count += 1 + yield candidate_pushheads[rev], rev + + if not count: + raise Exception( + "Could not find any candidate pushheads in the last {num} revisions.\n" + "Search started with {rev}, which must be known to Mozilla automation.\n\n" + "see https://firefox-source-docs.mozilla.org/contributing/build/artifact_builds.html".format( # noqa E501 + rev=last_revs[0], num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT + ) + ) + + def find_pushhead_artifacts(self, task_cache, job, tree, pushhead): + try: + taskId, artifacts = task_cache.artifacts( + tree, job, self._artifact_job.__class__, pushhead + ) + except ValueError: + return None + + urls = [] + for artifact_name in self._artifact_job.find_candidate_artifacts(artifacts): + url = get_artifact_url(taskId, artifact_name) + urls.append(url) + if urls: + self.log( + logging.DEBUG, + "artifact", + {"pushhead": pushhead, "tree": tree}, + "Installing from remote pushhead {pushhead} on {tree}", + ) + return urls + return None + + def install_from_file(self, filename, distdir): + self.log( + logging.DEBUG, + "artifact", + {"filename": filename}, + "Installing from {filename}", + ) + + # Copy all .so files, avoiding modification where possible. + ensureParentDir(mozpath.join(distdir, ".dummy")) + + if self._no_process: + orig_basename = os.path.basename(filename) + # Turn 'HASH-target...' into 'target...' if possible. It might not + # be possible if the file is given directly on the command line. + before, _sep, after = orig_basename.rpartition("-") + if re.match(r"[0-9a-fA-F]{16}$", before): + orig_basename = after + path = mozpath.join(distdir, orig_basename) + with FileAvoidWrite(path, readmode="rb") as fh: + shutil.copyfileobj(open(filename, mode="rb"), fh) + self.log( + logging.DEBUG, + "artifact", + {"path": path}, + "Copied unprocessed artifact: to {path}", + ) + return + + # Do we need to post-process? + processed_filename = filename + PROCESSED_SUFFIX + + if self._skip_cache and os.path.exists(processed_filename): + self.log( + logging.INFO, + "artifact", + {"path": processed_filename}, + "Skipping cache: removing cached processed artifact {path}", + ) + os.remove(processed_filename) + + if not os.path.exists(processed_filename): + self.log( + logging.DEBUG, + "artifact", + {"filename": filename}, + "Processing contents of {filename}", + ) + self.log( + logging.DEBUG, + "artifact", + {"processed_filename": processed_filename}, + "Writing processed {processed_filename}", + ) + try: + self._artifact_job.process_artifact(filename, processed_filename) + except Exception as e: + # Delete the partial output of failed processing. + try: + os.remove(processed_filename) + except FileNotFoundError: + pass + raise e + + self._artifact_cache._persist_limit.register_file(processed_filename) + + self.log( + logging.DEBUG, + "artifact", + {"processed_filename": processed_filename}, + "Installing from processed {processed_filename}", + ) + + with zipfile.ZipFile(processed_filename) as zf: + for info in zf.infolist(): + n = mozpath.join(distdir, info.filename) + fh = FileAvoidWrite(n, readmode="rb") + shutil.copyfileobj(zf.open(info), fh) + file_existed, file_updated = fh.close() + self.log( + logging.DEBUG, + "artifact", + { + "updating": "Updating" if file_updated else "Not updating", + "filename": n, + }, + "{updating} {filename}", + ) + if not file_existed or file_updated: + # Libraries and binaries may need to be marked executable, + # depending on platform. + perms = ( + info.external_attr >> 16 + ) # See http://stackoverflow.com/a/434689. + perms |= ( + stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH + ) # u+w, a+r. + os.chmod(n, perms) + return 0 + + def install_from_url(self, url, distdir): + self.log(logging.DEBUG, "artifact", {"url": url}, "Installing from {url}") + filename = self._artifact_cache.fetch(url) + return self.install_from_file(filename, distdir) + + def _install_from_hg_pushheads(self, hg_pushheads, distdir): + """Iterate pairs (hg_hash, {tree-set}) associating hg revision hashes + and tree-sets they are known to be in, trying to download and + install from each. + """ + + urls = None + count = 0 + # with blocks handle handle persistence. + with self._task_cache as task_cache: + for trees, hg_hash in hg_pushheads: + for tree in trees: + count += 1 + self.log( + logging.DEBUG, + "artifact", + {"hg_hash": hg_hash, "tree": tree}, + "Trying to find artifacts for hg revision {hg_hash} on tree {tree}.", + ) + urls = self.find_pushhead_artifacts( + task_cache, self._job, tree, hg_hash + ) + if urls: + for url in urls: + if self.install_from_url(url, distdir): + return 1 + return 0 + + self.log( + logging.ERROR, + "artifact", + {"count": count}, + "Tried {count} pushheads, no built artifacts found.", + ) + return 1 + + def install_from_recent(self, distdir): + hg_pushheads = self._find_pushheads() + return self._install_from_hg_pushheads(hg_pushheads, distdir) + + def install_from_revset(self, revset, distdir): + revision = None + try: + if self._hg: + revision = self.run_hg( + "log", "--template", "{node}\n", "-r", revset, cwd=self._topsrcdir + ).strip() + elif self._git: + revset = subprocess.check_output( + [self._git, "rev-parse", "%s^{commit}" % revset], + stderr=open(os.devnull, "w"), + universal_newlines=True, + cwd=self._topsrcdir, + ).strip() + else: + # Fallback to the exception handling case from both hg and git + raise subprocess.CalledProcessError() + except subprocess.CalledProcessError: + # If the mercurial of git commands above failed, it means the given + # revset is not known locally to the VCS. But if the revset looks + # like a complete sha1, assume it is a mercurial sha1 that hasn't + # been pulled, and use that. + if re.match(r"^[A-Fa-f0-9]{40}$", revset): + revision = revset + + if revision is None and self._git: + revision = subprocess.check_output( + [self._git, "cinnabar", "git2hg", revset], + universal_newlines=True, + cwd=self._topsrcdir, + ).strip() + + if revision == "0" * 40 or revision is None: + raise ValueError( + "revision specification must resolve to a commit known to hg" + ) + if len(revision.split("\n")) != 1: + raise ValueError( + "revision specification must resolve to exactly one commit" + ) + + self.log( + logging.INFO, + "artifact", + {"revset": revset, "revision": revision}, + "Will only accept artifacts from a pushhead at {revision} " + '(matched revset "{revset}").', + ) + # Include try in our search to allow pulling from a specific push. + pushheads = [ + ( + self._artifact_job.candidate_trees + [self._artifact_job.try_tree], + revision, + ) + ] + return self._install_from_hg_pushheads(pushheads, distdir) + + def install_from_task(self, taskId, distdir): + artifacts = list_artifacts(taskId) + + urls = [] + for artifact_name in self._artifact_job.find_candidate_artifacts(artifacts): + url = get_artifact_url(taskId, artifact_name) + urls.append(url) + if not urls: + raise ValueError( + "Task {taskId} existed, but no artifacts found!".format(taskId=taskId) + ) + for url in urls: + if self.install_from_url(url, distdir): + return 1 + return 0 + + def install_from(self, source, distdir): + """Install artifacts from a ``source`` into the given ``distdir``.""" + if (source and os.path.isfile(source)) or "MOZ_ARTIFACT_FILE" in os.environ: + source = source or os.environ["MOZ_ARTIFACT_FILE"] + for source in source.split(os.pathsep): + ret = self.install_from_file(source, distdir) + if ret: + return ret + return 0 + + if (source and urlparse(source).scheme) or "MOZ_ARTIFACT_URL" in os.environ: + source = source or os.environ["MOZ_ARTIFACT_URL"] + for source in source.split(): + ret = self.install_from_url(source, distdir) + if ret: + return ret + return 0 + + if source or "MOZ_ARTIFACT_REVISION" in os.environ: + source = source or os.environ["MOZ_ARTIFACT_REVISION"] + return self.install_from_revset(source, distdir) + + for var in ( + "MOZ_ARTIFACT_TASK_%s" % self._job.upper().replace("-", "_"), + "MOZ_ARTIFACT_TASK", + ): + if var in os.environ: + return self.install_from_task(os.environ[var], distdir) + + return self.install_from_recent(distdir) + + def clear_cache(self): + self.log(logging.INFO, "artifact", {}, "Deleting cached artifacts and caches.") + self._task_cache.clear_cache() + self._artifact_cache.clear_cache() + self._pushhead_cache.clear_cache() diff --git a/python/mozbuild/mozbuild/backend/__init__.py b/python/mozbuild/mozbuild/backend/__init__.py new file mode 100644 index 0000000000..e7097eb614 --- /dev/null +++ b/python/mozbuild/mozbuild/backend/__init__.py @@ -0,0 +1,27 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +backends = { + "Clangd": "mozbuild.backend.clangd", + "ChromeMap": "mozbuild.codecoverage.chrome_map", + "CompileDB": "mozbuild.compilation.database", + "CppEclipse": "mozbuild.backend.cpp_eclipse", + "FasterMake": "mozbuild.backend.fastermake", + "FasterMake+RecursiveMake": None, + "RecursiveMake": "mozbuild.backend.recursivemake", + "StaticAnalysis": "mozbuild.backend.static_analysis", + "TestManifest": "mozbuild.backend.test_manifest", + "VisualStudio": "mozbuild.backend.visualstudio", +} + + +def get_backend_class(name): + if "+" in name: + from mozbuild.backend.base import HybridBackend + + return HybridBackend(*(get_backend_class(name) for name in name.split("+"))) + + class_name = "%sBackend" % name + module = __import__(backends[name], globals(), locals(), [class_name]) + return getattr(module, class_name) diff --git a/python/mozbuild/mozbuild/backend/base.py b/python/mozbuild/mozbuild/backend/base.py new file mode 100644 index 0000000000..0f95942f51 --- /dev/null +++ b/python/mozbuild/mozbuild/backend/base.py @@ -0,0 +1,389 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import errno +import io +import itertools +import os +import time +from abc import ABCMeta, abstractmethod +from contextlib import contextmanager + +import mozpack.path as mozpath +import six +from mach.mixin.logging import LoggingMixin + +from mozbuild.base import ExecutionSummary + +from ..frontend.data import ContextDerived +from ..frontend.reader import EmptyConfig +from ..preprocessor import Preprocessor +from ..pythonutil import iter_modules_in_path +from ..util import FileAvoidWrite, simple_diff +from .configenvironment import ConfigEnvironment + + +class BuildBackend(LoggingMixin): + """Abstract base class for build backends. + + A build backend is merely a consumer of the build configuration (the output + of the frontend processing). It does something with said data. What exactly + is the discretion of the specific implementation. + """ + + __metaclass__ = ABCMeta + + def __init__(self, environment): + assert isinstance(environment, (ConfigEnvironment, EmptyConfig)) + self.populate_logger() + + self.environment = environment + + # Files whose modification should cause a new read and backend + # generation. + self.backend_input_files = set() + + # Files generated by the backend. + self._backend_output_files = set() + + self._environments = {} + self._environments[environment.topobjdir] = environment + + # The number of backend files created. + self._created_count = 0 + + # The number of backend files updated. + self._updated_count = 0 + + # The number of unchanged backend files. + self._unchanged_count = 0 + + # The number of deleted backend files. + self._deleted_count = 0 + + # The total wall time spent in the backend. This counts the time the + # backend writes out files, etc. + self._execution_time = 0.0 + + # Mapping of changed file paths to diffs of the changes. + self.file_diffs = {} + + self.dry_run = False + + self._init() + + def summary(self): + return ExecutionSummary( + self.__class__.__name__.replace("Backend", "") + + " backend executed in {execution_time:.2f}s\n " + "{total:d} total backend files; " + "{created:d} created; " + "{updated:d} updated; " + "{unchanged:d} unchanged; " + "{deleted:d} deleted", + execution_time=self._execution_time, + total=self._created_count + self._updated_count + self._unchanged_count, + created=self._created_count, + updated=self._updated_count, + unchanged=self._unchanged_count, + deleted=self._deleted_count, + ) + + def _init(self): + """Hook point for child classes to perform actions during __init__. + + This exists so child classes don't need to implement __init__. + """ + + def consume(self, objs): + """Consume a stream of TreeMetadata instances. + + This is the main method of the interface. This is what takes the + frontend output and does something with it. + + Child classes are not expected to implement this method. Instead, the + base class consumes objects and calls methods (possibly) implemented by + child classes. + """ + + # Previously generated files. + list_file = mozpath.join( + self.environment.topobjdir, "backend.%s" % self.__class__.__name__ + ) + backend_output_list = set() + if os.path.exists(list_file): + with open(list_file) as fh: + backend_output_list.update( + mozpath.normsep(p) for p in fh.read().splitlines() + ) + + for obj in objs: + obj_start = time.monotonic() + if not self.consume_object(obj) and not isinstance(self, PartialBackend): + raise Exception("Unhandled object of type %s" % type(obj)) + self._execution_time += time.monotonic() - obj_start + + if isinstance(obj, ContextDerived) and not isinstance(self, PartialBackend): + self.backend_input_files |= obj.context_all_paths + + # Pull in all loaded Python as dependencies so any Python changes that + # could influence our output result in a rescan. + self.backend_input_files |= set( + iter_modules_in_path(self.environment.topsrcdir, self.environment.topobjdir) + ) + + finished_start = time.monotonic() + self.consume_finished() + self._execution_time += time.monotonic() - finished_start + + # Purge backend files created in previous run, but not created anymore + delete_files = backend_output_list - self._backend_output_files + for path in delete_files: + full_path = mozpath.join(self.environment.topobjdir, path) + try: + with io.open(full_path, mode="r", encoding="utf-8") as existing: + old_content = existing.read() + if old_content: + self.file_diffs[full_path] = simple_diff( + full_path, old_content.splitlines(), None + ) + except IOError: + pass + try: + if not self.dry_run: + os.unlink(full_path) + self._deleted_count += 1 + except OSError: + pass + # Remove now empty directories + for dir in set(mozpath.dirname(d) for d in delete_files): + try: + os.removedirs(dir) + except OSError: + pass + + # Write out the list of backend files generated, if it changed. + if backend_output_list != self._backend_output_files: + with self._write_file(list_file) as fh: + fh.write("\n".join(sorted(self._backend_output_files))) + else: + # Always update its mtime if we're not in dry-run mode. + if not self.dry_run: + with open(list_file, "a"): + os.utime(list_file, None) + + # Write out the list of input files for the backend + with self._write_file("%s.in" % list_file) as fh: + fh.write( + "\n".join(sorted(mozpath.normsep(f) for f in self.backend_input_files)) + ) + + @abstractmethod + def consume_object(self, obj): + """Consumes an individual TreeMetadata instance. + + This is the main method used by child classes to react to build + metadata. + """ + + def consume_finished(self): + """Called when consume() has completed handling all objects.""" + + def build(self, config, output, jobs, verbose, what=None): + """Called when 'mach build' is executed. + + This should return the status value of a subprocess, where 0 denotes + success and any other value is an error code. A return value of None + indicates that the default 'make -f client.mk' should run. + """ + return None + + def _write_purgecaches(self, config): + """Write .purgecaches sentinels. + + The purgecaches mechanism exists to allow the platform to + invalidate the XUL cache (which includes some JS) at application + startup-time. The application checks for .purgecaches in the + application directory, which varies according to + --enable-application/--enable-project. There's a further wrinkle on + macOS, where the real application directory is part of a Cocoa bundle + produced from the regular application directory by the build + system. In this case, we write to both locations, since the + build system recreates the Cocoa bundle from the contents of the + regular application directory and might remove a sentinel + created here. + """ + + app = config.substs["MOZ_BUILD_APP"] + if app == "mobile/android": + # In order to take effect, .purgecaches sentinels would need to be + # written to the Android device file system. + return + + root = mozpath.join(config.topobjdir, "dist", "bin") + + if app == "browser": + root = mozpath.join(config.topobjdir, "dist", "bin", "browser") + + purgecaches_dirs = [root] + if app == "browser" and "cocoa" == config.substs["MOZ_WIDGET_TOOLKIT"]: + bundledir = mozpath.join( + config.topobjdir, + "dist", + config.substs["MOZ_MACBUNDLE_NAME"], + "Contents", + "Resources", + "browser", + ) + purgecaches_dirs.append(bundledir) + + for dir in purgecaches_dirs: + with open(mozpath.join(dir, ".purgecaches"), "wt") as f: + f.write("\n") + + def post_build(self, config, output, jobs, verbose, status): + """Called late during 'mach build' execution, after `build(...)` has finished. + + `status` is the status value returned from `build(...)`. + + In the case where `build` returns `None`, this is called after + the default `make` command has completed, with the status of + that command. + + This should return the status value from `build(...)`, or the + status value of a subprocess, where 0 denotes success and any + other value is an error code. + + If an exception is raised, ``mach build`` will fail with a + non-zero exit code. + """ + self._write_purgecaches(config) + + return status + + @contextmanager + def _write_file(self, path=None, fh=None, readmode="r"): + """Context manager to write a file. + + This is a glorified wrapper around FileAvoidWrite with integration to + update the summary data on this instance. + + Example usage: + + with self._write_file('foo.txt') as fh: + fh.write('hello world') + """ + + if path is not None: + assert fh is None + fh = FileAvoidWrite( + path, capture_diff=True, dry_run=self.dry_run, readmode=readmode + ) + else: + assert fh is not None + + dirname = mozpath.dirname(fh.name) + try: + os.makedirs(dirname) + except OSError as error: + if error.errno != errno.EEXIST: + raise + + yield fh + + self._backend_output_files.add( + mozpath.relpath(fh.name, self.environment.topobjdir) + ) + existed, updated = fh.close() + if fh.diff: + self.file_diffs[fh.name] = fh.diff + if not existed: + self._created_count += 1 + elif updated: + self._updated_count += 1 + else: + self._unchanged_count += 1 + + @contextmanager + def _get_preprocessor(self, obj): + """Returns a preprocessor with a few predefined values depending on + the given BaseConfigSubstitution(-like) object, and all the substs + in the current environment.""" + pp = Preprocessor() + srcdir = mozpath.dirname(obj.input_path) + pp.context.update( + { + k: " ".join(v) if isinstance(v, list) else v + for k, v in six.iteritems(obj.config.substs) + } + ) + pp.context.update( + top_srcdir=obj.topsrcdir, + topobjdir=obj.topobjdir, + srcdir=srcdir, + srcdir_rel=mozpath.relpath(srcdir, mozpath.dirname(obj.output_path)), + relativesrcdir=mozpath.relpath(srcdir, obj.topsrcdir) or ".", + DEPTH=mozpath.relpath(obj.topobjdir, mozpath.dirname(obj.output_path)) + or ".", + ) + pp.do_filter("attemptSubstitution") + pp.setMarker(None) + with self._write_file(obj.output_path) as fh: + pp.out = fh + yield pp + + +class PartialBackend(BuildBackend): + """A PartialBackend is a BuildBackend declaring that its consume_object + method may not handle all build configuration objects it's passed, and + that it's fine.""" + + +def HybridBackend(*backends): + """A HybridBackend is the combination of one or more PartialBackends + with a non-partial BuildBackend. + + Build configuration objects are passed to each backend, stopping at the + first of them that declares having handled them. + """ + assert len(backends) >= 2 + assert all(issubclass(b, PartialBackend) for b in backends[:-1]) + assert not (issubclass(backends[-1], PartialBackend)) + assert all(issubclass(b, BuildBackend) for b in backends) + + class TheHybridBackend(BuildBackend): + def __init__(self, environment): + self._backends = [b(environment) for b in backends] + super(TheHybridBackend, self).__init__(environment) + + def consume_object(self, obj): + return any(b.consume_object(obj) for b in self._backends) + + def consume_finished(self): + for backend in self._backends: + backend.consume_finished() + + for attr in ( + "_execution_time", + "_created_count", + "_updated_count", + "_unchanged_count", + "_deleted_count", + ): + setattr(self, attr, sum(getattr(b, attr) for b in self._backends)) + + for b in self._backends: + self.file_diffs.update(b.file_diffs) + for attr in ("backend_input_files", "_backend_output_files"): + files = getattr(self, attr) + files |= getattr(b, attr) + + name = "+".join( + itertools.chain( + (b.__name__.replace("Backend", "") for b in backends[:-1]), + (b.__name__ for b in backends[-1:]), + ) + ) + + return type(str(name), (TheHybridBackend,), {}) diff --git a/python/mozbuild/mozbuild/backend/cargo_build_defs.py b/python/mozbuild/mozbuild/backend/cargo_build_defs.py new file mode 100644 index 0000000000..c60fd2abf6 --- /dev/null +++ b/python/mozbuild/mozbuild/backend/cargo_build_defs.py @@ -0,0 +1,87 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +cargo_extra_outputs = { + "bindgen": ["tests.rs", "host-target.txt"], + "cssparser": ["tokenizer.rs"], + "gleam": ["gl_and_gles_bindings.rs", "gl_bindings.rs", "gles_bindings.rs"], + "khronos_api": ["webgl_exts.rs"], + "libloading": ["libglobal_static.a", "src/os/unix/global_static.o"], + "lmdb-sys": ["liblmdb.a", "midl.o", "mdb.o"], + "num-integer": ["rust_out.o"], + "num-traits": ["rust_out.o"], + "selectors": ["ascii_case_insensitive_html_attributes.rs"], + "style": [ + "gecko/atom_macro.rs", + "gecko/bindings.rs", + "gecko/pseudo_element_definition.rs", + "gecko/structs.rs", + "gecko_properties.rs", + "longhands/background.rs", + "longhands/border.rs", + "longhands/box.rs", + "longhands/color.rs", + "longhands/column.rs", + "longhands/counters.rs", + "longhands/effects.rs", + "longhands/font.rs", + "longhands/inherited_box.rs", + "longhands/inherited_svg.rs", + "longhands/inherited_table.rs", + "longhands/inherited_text.rs", + "longhands/inherited_ui.rs", + "longhands/list.rs", + "longhands/margin.rs", + "longhands/outline.rs", + "longhands/padding.rs", + "longhands/position.rs", + "longhands/svg.rs", + "longhands/table.rs", + "longhands/text.rs", + "longhands/ui.rs", + "longhands/xul.rs", + "properties.rs", + "shorthands/background.rs", + "shorthands/border.rs", + "shorthands/box.rs", + "shorthands/color.rs", + "shorthands/column.rs", + "shorthands/counters.rs", + "shorthands/effects.rs", + "shorthands/font.rs", + "shorthands/inherited_box.rs", + "shorthands/inherited_svg.rs", + "shorthands/inherited_table.rs", + "shorthands/inherited_text.rs", + "shorthands/inherited_ui.rs", + "shorthands/list.rs", + "shorthands/margin.rs", + "shorthands/outline.rs", + "shorthands/padding.rs", + "shorthands/position.rs", + "shorthands/svg.rs", + "shorthands/table.rs", + "shorthands/text.rs", + "shorthands/ui.rs", + "shorthands/xul.rs", + ], + "webrender": ["shaders.rs"], + "geckodriver": ["build-info.rs"], + "gecko-profiler": ["gecko/bindings.rs"], + "crc": ["crc64_constants.rs", "crc32_constants.rs"], + "bzip2-sys": [ + "bzip2-1.0.6/blocksort.o", + "bzip2-1.0.6/bzlib.o", + "bzip2-1.0.6/compress.o", + "bzip2-1.0.6/crctable.o", + "bzip2-1.0.6/decompress.o", + "bzip2-1.0.6/huffman.o", + "bzip2-1.0.6/randtable.o", + "libbz2.a", + ], + "clang-sys": ["common.rs", "dynamic.rs"], + "target-lexicon": ["host.rs"], + "baldrdash": ["bindings.rs"], + "typenum": ["op.rs", "consts.rs"], +} diff --git a/python/mozbuild/mozbuild/backend/clangd.py b/python/mozbuild/mozbuild/backend/clangd.py new file mode 100644 index 0000000000..5db5610ae6 --- /dev/null +++ b/python/mozbuild/mozbuild/backend/clangd.py @@ -0,0 +1,126 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +# This module provides a backend for `clangd` in order to have support for +# code completion, compile errors, go-to-definition and more. +# It is based on `database.py` with the difference that we don't generate +# an unified `compile_commands.json` but we generate a per file basis `command` in +# `objdir/clangd/compile_commands.json` + +import os + +import mozpack.path as mozpath + +from mozbuild.compilation.database import CompileDBBackend + + +def find_vscode_cmd(): + import shutil + import sys + + # Try to look up the `code` binary on $PATH, and use it if present. This + # should catch cases like being run from within a vscode-remote shell, + # even if vscode itself is also installed on the remote host. + path = shutil.which("code") + if path is not None: + return [path] + + cmd_and_path = [] + + # If the binary wasn't on $PATH, try to find it in a variety of other + # well-known install locations based on the current platform. + if sys.platform.startswith("darwin"): + cmd_and_path = [ + {"path": "/usr/local/bin/code", "cmd": ["/usr/local/bin/code"]}, + { + "path": "/Applications/Visual Studio Code.app", + "cmd": ["open", "/Applications/Visual Studio Code.app", "--args"], + }, + { + "path": "/Applications/Visual Studio Code - Insiders.app", + "cmd": [ + "open", + "/Applications/Visual Studio Code - Insiders.app", + "--args", + ], + }, + ] + elif sys.platform.startswith("win"): + from pathlib import Path + + vscode_path = mozpath.join( + str(Path.home()), + "AppData", + "Local", + "Programs", + "Microsoft VS Code", + "Code.exe", + ) + vscode_insiders_path = mozpath.join( + str(Path.home()), + "AppData", + "Local", + "Programs", + "Microsoft VS Code Insiders", + "Code - Insiders.exe", + ) + cmd_and_path = [ + {"path": vscode_path, "cmd": [vscode_path]}, + {"path": vscode_insiders_path, "cmd": [vscode_insiders_path]}, + ] + elif sys.platform.startswith("linux"): + cmd_and_path = [ + {"path": "/usr/local/bin/code", "cmd": ["/usr/local/bin/code"]}, + {"path": "/snap/bin/code", "cmd": ["/snap/bin/code"]}, + {"path": "/usr/bin/code", "cmd": ["/usr/bin/code"]}, + {"path": "/usr/bin/code-insiders", "cmd": ["/usr/bin/code-insiders"]}, + ] + + # Did we guess the path? + for element in cmd_and_path: + if os.path.exists(element["path"]): + return element["cmd"] + + # Path cannot be found + return None + + +class ClangdBackend(CompileDBBackend): + """ + Configuration that generates the backend for clangd, it is used with `clangd` + extension for vscode + """ + + def _init(self): + CompileDBBackend._init(self) + + def _get_compiler_args(self, cenv, canonical_suffix): + compiler_args = super(ClangdBackend, self)._get_compiler_args( + cenv, canonical_suffix + ) + if compiler_args is None: + return None + + if len(compiler_args) and compiler_args[0].endswith("ccache"): + compiler_args.pop(0) + return compiler_args + + def _build_cmd(self, cmd, filename, unified): + cmd = list(cmd) + + cmd.append(filename) + + return cmd + + def _outputfile_path(self): + clangd_cc_path = os.path.join(self.environment.topobjdir, "clangd") + + if not os.path.exists(clangd_cc_path): + os.mkdir(clangd_cc_path) + + # Output the database (a JSON file) to objdir/clangd/compile_commands.json + return mozpath.join(clangd_cc_path, "compile_commands.json") + + def _process_unified_sources(self, obj): + self._process_unified_sources_without_mapping(obj) diff --git a/python/mozbuild/mozbuild/backend/common.py b/python/mozbuild/mozbuild/backend/common.py new file mode 100644 index 0000000000..f0dc7d4e46 --- /dev/null +++ b/python/mozbuild/mozbuild/backend/common.py @@ -0,0 +1,603 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import itertools +import json +import os +from collections import defaultdict +from operator import itemgetter + +import mozpack.path as mozpath +import six +from mozpack.chrome.manifest import parse_manifest_line + +from mozbuild.backend.base import BuildBackend +from mozbuild.frontend.context import ( + VARIABLES, + Context, + ObjDirPath, + Path, + RenamedSourcePath, +) +from mozbuild.frontend.data import ( + BaseProgram, + ChromeManifestEntry, + ConfigFileSubstitution, + Exports, + FinalTargetFiles, + FinalTargetPreprocessedFiles, + GeneratedFile, + HostLibrary, + HostSources, + IPDLCollection, + LocalizedFiles, + LocalizedPreprocessedFiles, + SandboxedWasmLibrary, + SharedLibrary, + Sources, + StaticLibrary, + UnifiedSources, + WebIDLCollection, + XPCOMComponentManifests, + XPIDLModule, +) +from mozbuild.jar import DeprecatedJarManifest, JarManifestParser +from mozbuild.preprocessor import Preprocessor +from mozbuild.util import mkdir + + +class XPIDLManager(object): + """Helps manage XPCOM IDLs in the context of the build system.""" + + class Module(object): + def __init__(self): + self.idl_files = set() + self.directories = set() + self._stems = set() + + def add_idls(self, idls): + self.idl_files.update(idl.full_path for idl in idls) + self.directories.update(mozpath.dirname(idl.full_path) for idl in idls) + self._stems.update( + mozpath.splitext(mozpath.basename(idl))[0] for idl in idls + ) + + def stems(self): + return iter(self._stems) + + def __init__(self, config): + self.config = config + self.topsrcdir = config.topsrcdir + self.topobjdir = config.topobjdir + + self._idls = set() + self.modules = defaultdict(self.Module) + + def link_module(self, module): + """Links an XPIDL module with with this instance.""" + for idl in module.idl_files: + basename = mozpath.basename(idl.full_path) + + if basename in self._idls: + raise Exception("IDL already registered: %s" % basename) + self._idls.add(basename) + + self.modules[module.name].add_idls(module.idl_files) + + def idl_stems(self): + """Return an iterator of stems of the managed IDL files. + + The stem of an IDL file is the basename of the file with no .idl extension. + """ + return itertools.chain(*[m.stems() for m in six.itervalues(self.modules)]) + + +class BinariesCollection(object): + """Tracks state of binaries produced by the build.""" + + def __init__(self): + self.shared_libraries = [] + self.programs = [] + + +class CommonBackend(BuildBackend): + """Holds logic common to all build backends.""" + + def _init(self): + self._idl_manager = XPIDLManager(self.environment) + self._binaries = BinariesCollection() + self._configs = set() + self._generated_sources = set() + + def consume_object(self, obj): + self._configs.add(obj.config) + + if isinstance(obj, XPIDLModule): + # TODO bug 1240134 tracks not processing XPIDL files during + # artifact builds. + self._idl_manager.link_module(obj) + + elif isinstance(obj, ConfigFileSubstitution): + # Do not handle ConfigFileSubstitution for Makefiles. Leave that + # to other + if mozpath.basename(obj.output_path) == "Makefile": + return False + with self._get_preprocessor(obj) as pp: + pp.do_include(obj.input_path) + self.backend_input_files.add(obj.input_path) + + elif isinstance(obj, WebIDLCollection): + self._handle_webidl_collection(obj) + + elif isinstance(obj, IPDLCollection): + self._handle_ipdl_sources( + obj.objdir, + list(sorted(obj.all_sources())), + list(sorted(obj.all_preprocessed_sources())), + list(sorted(obj.all_regular_sources())), + ) + + elif isinstance(obj, XPCOMComponentManifests): + self._handle_xpcom_collection(obj) + + elif isinstance(obj, UnifiedSources): + if obj.generated_files: + self._handle_generated_sources(obj.generated_files) + + # Unified sources aren't relevant to artifact builds. + if self.environment.is_artifact_build: + return True + + if obj.have_unified_mapping: + self._write_unified_files(obj.unified_source_mapping, obj.objdir) + if hasattr(self, "_process_unified_sources"): + self._process_unified_sources(obj) + + elif isinstance(obj, BaseProgram): + self._binaries.programs.append(obj) + return False + + elif isinstance(obj, SharedLibrary): + self._binaries.shared_libraries.append(obj) + return False + + elif isinstance(obj, SandboxedWasmLibrary): + self._handle_generated_sources( + [mozpath.join(obj.relobjdir, f"{obj.basename}.h")] + ) + return False + + elif isinstance(obj, (Sources, HostSources)): + if obj.generated_files: + self._handle_generated_sources(obj.generated_files) + return False + + elif isinstance(obj, GeneratedFile): + if obj.required_during_compile or obj.required_before_compile: + for f in itertools.chain( + obj.required_before_compile, obj.required_during_compile + ): + fullpath = ObjDirPath(obj._context, "!" + f).full_path + self._handle_generated_sources([fullpath]) + return False + + elif isinstance(obj, Exports): + objdir_files = [ + f.full_path + for path, files in obj.files.walk() + for f in files + if isinstance(f, ObjDirPath) + ] + if objdir_files: + self._handle_generated_sources(objdir_files) + return False + + else: + return False + + return True + + def consume_finished(self): + if len(self._idl_manager.modules): + self._write_rust_xpidl_summary(self._idl_manager) + self._handle_idl_manager(self._idl_manager) + self._handle_xpidl_sources() + + for config in self._configs: + self.backend_input_files.add(config.source) + + # Write out a machine-readable file describing binaries. + topobjdir = self.environment.topobjdir + with self._write_file(mozpath.join(topobjdir, "binaries.json")) as fh: + d = { + "shared_libraries": sorted( + (s.to_dict() for s in self._binaries.shared_libraries), + key=itemgetter("basename"), + ), + "programs": sorted( + (p.to_dict() for p in self._binaries.programs), + key=itemgetter("program"), + ), + } + json.dump(d, fh, sort_keys=True, indent=4) + + # Write out a file listing generated sources. + with self._write_file(mozpath.join(topobjdir, "generated-sources.json")) as fh: + d = {"sources": sorted(self._generated_sources)} + json.dump(d, fh, sort_keys=True, indent=4) + + def _expand_libs(self, input_bin): + os_libs = [] + shared_libs = [] + static_libs = [] + objs = [] + + seen_objs = set() + seen_libs = set() + + def add_objs(lib): + for o in lib.objs: + if o in seen_objs: + continue + + seen_objs.add(o) + objs.append(o) + + def expand(lib, recurse_objs, system_libs): + if isinstance(lib, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)): + if lib.no_expand_lib: + static_libs.append(lib) + recurse_objs = False + elif recurse_objs: + add_objs(lib) + + for l in lib.linked_libraries: + expand(l, recurse_objs, system_libs) + + if system_libs: + for l in lib.linked_system_libs: + if l not in seen_libs: + seen_libs.add(l) + os_libs.append(l) + + elif isinstance(lib, SharedLibrary): + if lib not in seen_libs: + seen_libs.add(lib) + shared_libs.append(lib) + + add_objs(input_bin) + + system_libs = not isinstance( + input_bin, (HostLibrary, StaticLibrary, SandboxedWasmLibrary) + ) + for lib in input_bin.linked_libraries: + if isinstance(lib, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)): + expand(lib, True, system_libs) + elif isinstance(lib, SharedLibrary): + if lib not in seen_libs: + seen_libs.add(lib) + shared_libs.append(lib) + + for lib in input_bin.linked_system_libs: + if lib not in seen_libs: + seen_libs.add(lib) + os_libs.append(lib) + + return (objs, shared_libs, os_libs, static_libs) + + def _make_list_file(self, kind, objdir, objs, name): + if not objs: + return None + if kind == "target": + list_style = self.environment.substs.get("EXPAND_LIBS_LIST_STYLE") + else: + # The host compiler is not necessarily the same kind as the target + # compiler, so we can't be sure EXPAND_LIBS_LIST_STYLE is the right + # style to use ; however, all compilers support the `list` type, so + # use that. That doesn't cause any practical problem because where + # it really matters to use something else than `list` is when + # linking tons of objects (because of command line argument limits), + # which only really happens for libxul. + list_style = "list" + list_file_path = mozpath.join(objdir, name) + objs = [os.path.relpath(o, objdir) for o in objs] + if list_style == "linkerscript": + ref = list_file_path + content = "\n".join('INPUT("%s")' % o for o in objs) + elif list_style == "filelist": + ref = "-Wl,-filelist," + list_file_path + content = "\n".join(objs) + elif list_style == "list": + ref = "@" + list_file_path + content = "\n".join(objs) + else: + return None + + mkdir(objdir) + with self._write_file(list_file_path) as fh: + fh.write(content) + + return ref + + def _handle_generated_sources(self, files): + self._generated_sources.update( + mozpath.relpath(f, self.environment.topobjdir) for f in files + ) + + def _handle_xpidl_sources(self): + bindings_rt_dir = mozpath.join( + self.environment.topobjdir, "dist", "xpcrs", "rt" + ) + bindings_bt_dir = mozpath.join( + self.environment.topobjdir, "dist", "xpcrs", "bt" + ) + include_dir = mozpath.join(self.environment.topobjdir, "dist", "include") + + self._handle_generated_sources( + itertools.chain.from_iterable( + ( + mozpath.join(include_dir, "%s.h" % stem), + mozpath.join(bindings_rt_dir, "%s.rs" % stem), + mozpath.join(bindings_bt_dir, "%s.rs" % stem), + ) + for stem in self._idl_manager.idl_stems() + ) + ) + + def _handle_webidl_collection(self, webidls): + + bindings_dir = mozpath.join(self.environment.topobjdir, "dom", "bindings") + + all_inputs = set(webidls.all_static_sources()) + for s in webidls.all_non_static_basenames(): + all_inputs.add(mozpath.join(bindings_dir, s)) + + generated_events_stems = webidls.generated_events_stems() + exported_stems = webidls.all_regular_stems() + + # The WebIDL manager reads configuration from a JSON file. So, we + # need to write this file early. + o = dict( + webidls=sorted(all_inputs), + generated_events_stems=sorted(generated_events_stems), + exported_stems=sorted(exported_stems), + example_interfaces=sorted(webidls.example_interfaces), + ) + + file_lists = mozpath.join(bindings_dir, "file-lists.json") + with self._write_file(file_lists) as fh: + json.dump(o, fh, sort_keys=True, indent=2) + + import mozwebidlcodegen + + manager = mozwebidlcodegen.create_build_system_manager( + self.environment.topsrcdir, + self.environment.topobjdir, + mozpath.join(self.environment.topobjdir, "dist"), + ) + self._handle_generated_sources(manager.expected_build_output_files()) + self._write_unified_files( + webidls.unified_source_mapping, bindings_dir, poison_windows_h=True + ) + self._handle_webidl_build( + bindings_dir, + webidls.unified_source_mapping, + webidls, + manager.expected_build_output_files(), + manager.GLOBAL_DEFINE_FILES, + ) + + def _handle_xpcom_collection(self, manifests): + components_dir = mozpath.join(manifests.topobjdir, "xpcom", "components") + + # The code generators read their configuration from this file, so it + # needs to be written early. + o = dict(manifests=sorted(manifests.all_sources())) + + conf_file = mozpath.join(components_dir, "manifest-lists.json") + with self._write_file(conf_file) as fh: + json.dump(o, fh, sort_keys=True, indent=2) + + def _write_unified_file( + self, unified_file, source_filenames, output_directory, poison_windows_h=False + ): + with self._write_file(mozpath.join(output_directory, unified_file)) as f: + f.write("#define MOZ_UNIFIED_BUILD\n") + includeTemplate = '#include "%(cppfile)s"' + if poison_windows_h: + includeTemplate += ( + "\n" + "#if defined(_WINDOWS_) && !defined(MOZ_WRAPPED_WINDOWS_H)\n" + '#pragma message("wrapper failure reason: " MOZ_WINDOWS_WRAPPER_DISABLED_REASON)\n' # noqa + '#error "%(cppfile)s included unwrapped windows.h"\n' + "#endif" + ) + includeTemplate += ( + "\n" + "#ifdef PL_ARENA_CONST_ALIGN_MASK\n" + '#error "%(cppfile)s uses PL_ARENA_CONST_ALIGN_MASK, ' + 'so it cannot be built in unified mode."\n' + "#undef PL_ARENA_CONST_ALIGN_MASK\n" + "#endif\n" + "#ifdef INITGUID\n" + '#error "%(cppfile)s defines INITGUID, ' + 'so it cannot be built in unified mode."\n' + "#undef INITGUID\n" + "#endif" + ) + f.write( + "\n".join(includeTemplate % {"cppfile": s} for s in source_filenames) + ) + + def _write_unified_files( + self, unified_source_mapping, output_directory, poison_windows_h=False + ): + for unified_file, source_filenames in unified_source_mapping: + self._write_unified_file( + unified_file, source_filenames, output_directory, poison_windows_h + ) + + def localized_path(self, relativesrcdir, filename): + """Return the localized path for a file. + + Given ``relativesrcdir``, a path relative to the topsrcdir, return a path to ``filename`` + from the current locale as specified by ``MOZ_UI_LOCALE``, using ``L10NBASEDIR`` as the + parent directory for non-en-US locales. + """ + ab_cd = self.environment.substs["MOZ_UI_LOCALE"][0] + l10nbase = mozpath.join(self.environment.substs["L10NBASEDIR"], ab_cd) + # Filenames from LOCALIZED_FILES will start with en-US/. + if filename.startswith("en-US/"): + e, filename = filename.split("en-US/") + assert not e + if ab_cd == "en-US": + return mozpath.join( + self.environment.topsrcdir, relativesrcdir, "en-US", filename + ) + if mozpath.basename(relativesrcdir) == "locales": + l10nrelsrcdir = mozpath.dirname(relativesrcdir) + else: + l10nrelsrcdir = relativesrcdir + return mozpath.join(l10nbase, l10nrelsrcdir, filename) + + def _consume_jar_manifest(self, obj): + # Ideally, this would all be handled somehow in the emitter, but + # this would require all the magic surrounding l10n and addons in + # the recursive make backend to die, which is not going to happen + # any time soon enough. + # Notably missing: + # - DEFINES from config/config.mk + # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in + # moz.build, but it doesn't matter in dist/bin. + pp = Preprocessor() + if obj.defines: + pp.context.update(obj.defines.defines) + pp.context.update(self.environment.defines) + ab_cd = obj.config.substs["MOZ_UI_LOCALE"][0] + pp.context.update(AB_CD=ab_cd) + pp.out = JarManifestParser() + try: + pp.do_include(obj.path.full_path) + except DeprecatedJarManifest as e: + raise DeprecatedJarManifest( + "Parsing error while processing %s: %s" % (obj.path.full_path, e) + ) + self.backend_input_files |= pp.includes + + for jarinfo in pp.out: + jar_context = Context( + allowed_variables=VARIABLES, config=obj._context.config + ) + jar_context.push_source(obj._context.main_path) + jar_context.push_source(obj.path.full_path) + + install_target = obj.install_target + if jarinfo.base: + install_target = mozpath.normpath( + mozpath.join(install_target, jarinfo.base) + ) + jar_context["FINAL_TARGET"] = install_target + if obj.defines: + jar_context["DEFINES"] = obj.defines.defines + files = jar_context["FINAL_TARGET_FILES"] + files_pp = jar_context["FINAL_TARGET_PP_FILES"] + localized_files = jar_context["LOCALIZED_FILES"] + localized_files_pp = jar_context["LOCALIZED_PP_FILES"] + + for e in jarinfo.entries: + if e.is_locale: + if jarinfo.relativesrcdir: + src = "/%s" % jarinfo.relativesrcdir + else: + src = "" + src = mozpath.join(src, "en-US", e.source) + else: + src = e.source + + src = Path(jar_context, src) + + if "*" not in e.source and not os.path.exists(src.full_path): + if e.is_locale: + raise Exception( + "%s: Cannot find %s (tried %s)" + % (obj.path, e.source, src.full_path) + ) + if e.source.startswith("/"): + src = Path(jar_context, "!" + e.source) + else: + # This actually gets awkward if the jar.mn is not + # in the same directory as the moz.build declaring + # it, but it's how it works in the recursive make, + # not that anything relies on that, but it's simpler. + src = Path(obj._context, "!" + e.source) + + output_basename = mozpath.basename(e.output) + if output_basename != src.target_basename: + src = RenamedSourcePath(jar_context, (src, output_basename)) + path = mozpath.dirname(mozpath.join(jarinfo.name, e.output)) + + if e.preprocess: + if "*" in e.source: + raise Exception( + "%s: Wildcards are not supported with " + "preprocessing" % obj.path + ) + if e.is_locale: + localized_files_pp[path] += [src] + else: + files_pp[path] += [src] + else: + if e.is_locale: + localized_files[path] += [src] + else: + files[path] += [src] + + if files: + self.consume_object(FinalTargetFiles(jar_context, files)) + if files_pp: + self.consume_object(FinalTargetPreprocessedFiles(jar_context, files_pp)) + if localized_files: + self.consume_object(LocalizedFiles(jar_context, localized_files)) + if localized_files_pp: + self.consume_object( + LocalizedPreprocessedFiles(jar_context, localized_files_pp) + ) + + for m in jarinfo.chrome_manifests: + entry = parse_manifest_line( + mozpath.dirname(jarinfo.name), + m.replace("%", mozpath.basename(jarinfo.name) + "/"), + ) + self.consume_object( + ChromeManifestEntry( + jar_context, "%s.manifest" % jarinfo.name, entry + ) + ) + + def _write_rust_xpidl_summary(self, manager): + """Write out a rust file which includes the generated xpcom rust modules""" + topobjdir = self.environment.topobjdir + + include_tmpl = 'include!(mozbuild::objdir_path!("dist/xpcrs/%s/%s.rs"))' + + # Ensure deterministic output files. + stems = sorted(manager.idl_stems()) + + with self._write_file( + mozpath.join(topobjdir, "dist", "xpcrs", "rt", "all.rs") + ) as fh: + fh.write("// THIS FILE IS GENERATED - DO NOT EDIT\n\n") + for stem in stems: + fh.write(include_tmpl % ("rt", stem)) + fh.write(";\n") + + with self._write_file( + mozpath.join(topobjdir, "dist", "xpcrs", "bt", "all.rs") + ) as fh: + fh.write("// THIS FILE IS GENERATED - DO NOT EDIT\n\n") + fh.write("&[\n") + for stem in stems: + fh.write(include_tmpl % ("bt", stem)) + fh.write(",\n") + fh.write("]\n") diff --git a/python/mozbuild/mozbuild/backend/configenvironment.py b/python/mozbuild/mozbuild/backend/configenvironment.py new file mode 100644 index 0000000000..eef1b62ee6 --- /dev/null +++ b/python/mozbuild/mozbuild/backend/configenvironment.py @@ -0,0 +1,357 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import json +import os +import sys +from collections import OrderedDict +from collections.abc import Iterable +from pathlib import Path +from types import ModuleType + +import mozpack.path as mozpath +import six + +from mozbuild.shellutil import quote as shell_quote +from mozbuild.util import ( + FileAvoidWrite, + ReadOnlyDict, + memoized_property, + system_encoding, +) + + +class ConfigStatusFailure(Exception): + """Error loading config.status""" + + +class BuildConfig(object): + """Represents the output of configure.""" + + _CODE_CACHE = {} + + def __init__(self): + self.topsrcdir = None + self.topobjdir = None + self.defines = {} + self.substs = {} + self.files = [] + self.mozconfig = None + + @classmethod + def from_config_status(cls, path): + """Create an instance from a config.status file.""" + code_cache = cls._CODE_CACHE + mtime = os.path.getmtime(path) + + # cache the compiled code as it can be reused + # we cache it the first time, or if the file changed + if path not in code_cache or code_cache[path][0] != mtime: + # Add config.status manually to sys.modules so it gets picked up by + # iter_modules_in_path() for automatic dependencies. + mod = ModuleType("config.status") + mod.__file__ = path + sys.modules["config.status"] = mod + + with open(path, "rt") as fh: + source = fh.read() + code_cache[path] = ( + mtime, + compile(source, path, "exec", dont_inherit=1), + ) + + g = {"__builtins__": __builtins__, "__file__": path} + l = {} + try: + exec(code_cache[path][1], g, l) + except Exception: + raise ConfigStatusFailure() + + config = BuildConfig() + + for name in l["__all__"]: + setattr(config, name, l[name]) + + return config + + +class ConfigEnvironment(object): + """Perform actions associated with a configured but bare objdir. + + The purpose of this class is to preprocess files from the source directory + and output results in the object directory. + + There are two types of files: config files and config headers, + each treated through a different member function. + + Creating a ConfigEnvironment requires a few arguments: + - topsrcdir and topobjdir are, respectively, the top source and + the top object directory. + - defines is a dict filled from AC_DEFINE and AC_DEFINE_UNQUOTED in autoconf. + - substs is a dict filled from AC_SUBST in autoconf. + + ConfigEnvironment automatically defines one additional substs variable + from all the defines: + - ACDEFINES contains the defines in the form -DNAME=VALUE, for use on + preprocessor command lines. The order in which defines were given + when creating the ConfigEnvironment is preserved. + + and two other additional subst variables from all the other substs: + - ALLSUBSTS contains the substs in the form NAME = VALUE, in sorted + order, for use in autoconf.mk. It includes ACDEFINES. + Only substs with a VALUE are included, such that the resulting file + doesn't change when new empty substs are added. + This results in less invalidation of build dependencies in the case + of autoconf.mk.. + - ALLEMPTYSUBSTS contains the substs with an empty value, in the form NAME =. + + ConfigEnvironment expects a "top_srcdir" subst to be set with the top + source directory, in msys format on windows. It is used to derive a + "srcdir" subst when treating config files. It can either be an absolute + path or a path relative to the topobjdir. + """ + + def __init__( + self, + topsrcdir, + topobjdir, + defines=None, + substs=None, + source=None, + mozconfig=None, + ): + + if not source: + source = mozpath.join(topobjdir, "config.status") + self.source = source + self.defines = ReadOnlyDict(defines or {}) + self.substs = dict(substs or {}) + self.topsrcdir = mozpath.abspath(topsrcdir) + self.topobjdir = mozpath.abspath(topobjdir) + self.mozconfig = mozpath.abspath(mozconfig) if mozconfig else None + self.lib_prefix = self.substs.get("LIB_PREFIX", "") + if "LIB_SUFFIX" in self.substs: + self.lib_suffix = ".%s" % self.substs["LIB_SUFFIX"] + self.dll_prefix = self.substs.get("DLL_PREFIX", "") + self.dll_suffix = self.substs.get("DLL_SUFFIX", "") + self.host_dll_prefix = self.substs.get("HOST_DLL_PREFIX", "") + self.host_dll_suffix = self.substs.get("HOST_DLL_SUFFIX", "") + if self.substs.get("IMPORT_LIB_SUFFIX"): + self.import_prefix = self.lib_prefix + self.import_suffix = ".%s" % self.substs["IMPORT_LIB_SUFFIX"] + else: + self.import_prefix = self.dll_prefix + self.import_suffix = self.dll_suffix + if self.substs.get("HOST_IMPORT_LIB_SUFFIX"): + self.host_import_prefix = self.substs.get("HOST_LIB_PREFIX", "") + self.host_import_suffix = ".%s" % self.substs["HOST_IMPORT_LIB_SUFFIX"] + else: + self.host_import_prefix = self.host_dll_prefix + self.host_import_suffix = self.host_dll_suffix + self.bin_suffix = self.substs.get("BIN_SUFFIX", "") + + global_defines = [name for name in self.defines] + self.substs["ACDEFINES"] = " ".join( + [ + "-D%s=%s" % (name, shell_quote(self.defines[name]).replace("$", "$$")) + for name in sorted(global_defines) + ] + ) + + def serialize(name, obj): + if isinstance(obj, six.string_types): + return obj + if isinstance(obj, Iterable): + return " ".join(obj) + raise Exception("Unhandled type %s for %s", type(obj), str(name)) + + self.substs["ALLSUBSTS"] = "\n".join( + sorted( + [ + "%s = %s" % (name, serialize(name, self.substs[name])) + for name in self.substs + if self.substs[name] + ] + ) + ) + self.substs["ALLEMPTYSUBSTS"] = "\n".join( + sorted(["%s =" % name for name in self.substs if not self.substs[name]]) + ) + + self.substs = ReadOnlyDict(self.substs) + + @property + def is_artifact_build(self): + return self.substs.get("MOZ_ARTIFACT_BUILDS", False) + + @memoized_property + def acdefines(self): + acdefines = dict((name, self.defines[name]) for name in self.defines) + return ReadOnlyDict(acdefines) + + @staticmethod + def from_config_status(path): + config = BuildConfig.from_config_status(path) + + return ConfigEnvironment( + config.topsrcdir, config.topobjdir, config.defines, config.substs, path + ) + + +class PartialConfigDict(object): + """Facilitates mapping the config.statusd defines & substs with dict-like access. + + This allows a buildconfig client to use buildconfig.defines['FOO'] (and + similar for substs), where the value of FOO is delay-loaded until it is + needed. + """ + + def __init__(self, config_statusd, typ, environ_override=False): + self._dict = {} + self._datadir = mozpath.join(config_statusd, typ) + self._config_track = mozpath.join(self._datadir, "config.track") + self._files = set() + self._environ_override = environ_override + + def _load_config_track(self): + existing_files = set() + try: + with open(self._config_track) as fh: + existing_files.update(fh.read().splitlines()) + except IOError: + pass + return existing_files + + def _write_file(self, key, value): + filename = mozpath.join(self._datadir, key) + with FileAvoidWrite(filename) as fh: + to_write = json.dumps(value, indent=4) + fh.write(to_write.encode(system_encoding)) + return filename + + def _fill_group(self, values): + # Clear out any cached values. This is mostly for tests that will check + # the environment, write out a new set of variables, and then check the + # environment again. Normally only configure ends up calling this + # function, and other consumers create their own + # PartialConfigEnvironments in new python processes. + self._dict = {} + + existing_files = self._load_config_track() + existing_files = {Path(f) for f in existing_files} + + new_files = set() + for k, v in six.iteritems(values): + new_files.add(Path(self._write_file(k, v))) + + for filename in existing_files - new_files: + # We can't actually os.remove() here, since make would not see that the + # file has been removed and that the target needs to be updated. Instead + # we just overwrite the file with a value of None, which is equivalent + # to a non-existing file. + with FileAvoidWrite(filename) as fh: + json.dump(None, fh) + + with FileAvoidWrite(self._config_track) as fh: + for f in sorted(new_files): + fh.write("%s\n" % f) + + def __getitem__(self, key): + if self._environ_override: + if (key not in ("CPP", "CXXCPP", "SHELL")) and (key in os.environ): + return os.environ[key] + + if key not in self._dict: + data = None + try: + filename = mozpath.join(self._datadir, key) + self._files.add(filename) + with open(filename) as f: + data = json.load(f) + except IOError: + pass + self._dict[key] = data + + if self._dict[key] is None: + raise KeyError("'%s'" % key) + return self._dict[key] + + def __setitem__(self, key, value): + self._dict[key] = value + + def get(self, key, default=None): + return self[key] if key in self else default + + def __contains__(self, key): + try: + return self[key] is not None + except KeyError: + return False + + def iteritems(self): + existing_files = self._load_config_track() + for f in existing_files: + # The track file contains filenames, and the basename is the + # variable name. + var = mozpath.basename(f) + yield var, self[var] + + +class PartialConfigEnvironment(object): + """Allows access to individual config.status items via config.statusd/* files. + + This class is similar to the full ConfigEnvironment, which uses + config.status, except this allows access and tracks dependencies to + individual configure values. It is intended to be used during the build + process to handle things like GENERATED_FILES, CONFIGURE_DEFINE_FILES, and + anything else that may need to access specific substs or defines. + + Creating a PartialConfigEnvironment requires only the topobjdir, which is + needed to distinguish between the top-level environment and the js/src + environment. + + The PartialConfigEnvironment automatically defines one additional subst variable + from all the defines: + + - ACDEFINES contains the defines in the form -DNAME=VALUE, for use on + preprocessor command lines. The order in which defines were given + when creating the ConfigEnvironment is preserved. + + and one additional define from all the defines as a dictionary: + + - ALLDEFINES contains all of the global defines as a dictionary. This is + intended to be used instead of the defines structure from config.status so + that scripts can depend directly on its value. + """ + + def __init__(self, topobjdir): + config_statusd = mozpath.join(topobjdir, "config.statusd") + self.substs = PartialConfigDict(config_statusd, "substs", environ_override=True) + self.defines = PartialConfigDict(config_statusd, "defines") + self.topobjdir = topobjdir + + def write_vars(self, config): + substs = config["substs"].copy() + defines = config["defines"].copy() + + global_defines = [name for name in config["defines"]] + acdefines = " ".join( + [ + "-D%s=%s" + % (name, shell_quote(config["defines"][name]).replace("$", "$$")) + for name in sorted(global_defines) + ] + ) + substs["ACDEFINES"] = acdefines + + all_defines = OrderedDict() + for k in global_defines: + all_defines[k] = config["defines"][k] + defines["ALLDEFINES"] = all_defines + + self.substs._fill_group(substs) + self.defines._fill_group(defines) + + def get_dependencies(self): + return ["$(wildcard %s)" % f for f in self.substs._files | self.defines._files] diff --git a/python/mozbuild/mozbuild/backend/cpp_eclipse.py b/python/mozbuild/mozbuild/backend/cpp_eclipse.py new file mode 100644 index 0000000000..413cca3f75 --- /dev/null +++ b/python/mozbuild/mozbuild/backend/cpp_eclipse.py @@ -0,0 +1,876 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import errno +import glob +import os +import shutil +import subprocess +from xml.sax.saxutils import quoteattr + +from mozbuild.base import ExecutionSummary + +from ..frontend.data import ComputedFlags +from .common import CommonBackend + +# TODO Have ./mach eclipse generate the workspace and index it: +# /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -application org.eclipse.cdt.managedbuilder.core.headlessbuild -data $PWD/workspace -importAll $PWD/eclipse +# Open eclipse: +# /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -data $PWD/workspace + + +class CppEclipseBackend(CommonBackend): + """Backend that generates Cpp Eclipse project files.""" + + def __init__(self, environment): + if os.name == "nt": + raise Exception( + "Eclipse is not supported on Windows. " + "Consider using Visual Studio instead." + ) + super(CppEclipseBackend, self).__init__(environment) + + def _init(self): + CommonBackend._init(self) + + self._args_for_dirs = {} + self._project_name = "Gecko" + self._workspace_dir = self._get_workspace_path() + self._workspace_lang_dir = os.path.join( + self._workspace_dir, ".metadata/.plugins/org.eclipse.cdt.core" + ) + self._project_dir = os.path.join(self._workspace_dir, self._project_name) + self._overwriting_workspace = os.path.isdir(self._workspace_dir) + + self._macbundle = self.environment.substs["MOZ_MACBUNDLE_NAME"] + self._appname = self.environment.substs["MOZ_APP_NAME"] + self._bin_suffix = self.environment.substs["BIN_SUFFIX"] + self._cxx = self.environment.substs["CXX"] + # Note: We need the C Pre Processor (CPP) flags, not the CXX flags + self._cppflags = self.environment.substs.get("CPPFLAGS", "") + + def summary(self): + return ExecutionSummary( + "CppEclipse backend executed in {execution_time:.2f}s\n" + 'Generated Cpp Eclipse workspace in "{workspace:s}".\n' + "If missing, import the project using File > Import > General > Existing Project into workspace\n" + "\n" + "Run with: eclipse -data {workspace:s}\n", + execution_time=self._execution_time, + workspace=self._workspace_dir, + ) + + def _get_workspace_path(self): + return CppEclipseBackend.get_workspace_path( + self.environment.topsrcdir, self.environment.topobjdir + ) + + @staticmethod + def get_workspace_path(topsrcdir, topobjdir): + # Eclipse doesn't support having the workspace inside the srcdir. + # Since most people have their objdir inside their srcdir it's easier + # and more consistent to just put the workspace along side the srcdir + srcdir_parent = os.path.dirname(topsrcdir) + workspace_dirname = "eclipse_" + os.path.basename(topobjdir) + return os.path.join(srcdir_parent, workspace_dirname) + + def consume_object(self, obj): + reldir = getattr(obj, "relsrcdir", None) + + # Note that unlike VS, Eclipse' indexer seem to crawl the headers and + # isn't picky about the local includes. + if isinstance(obj, ComputedFlags): + args = self._args_for_dirs.setdefault( + "tree/" + reldir, {"includes": [], "defines": []} + ) + # use the same args for any objdirs we include: + if reldir == "dom/bindings": + self._args_for_dirs.setdefault("generated-webidl", args) + if reldir == "ipc/ipdl": + self._args_for_dirs.setdefault("generated-ipdl", args) + + includes = args["includes"] + if "BASE_INCLUDES" in obj.flags and obj.flags["BASE_INCLUDES"]: + includes += obj.flags["BASE_INCLUDES"] + if "LOCAL_INCLUDES" in obj.flags and obj.flags["LOCAL_INCLUDES"]: + includes += obj.flags["LOCAL_INCLUDES"] + + defs = args["defines"] + if "DEFINES" in obj.flags and obj.flags["DEFINES"]: + defs += obj.flags["DEFINES"] + if "LIBRARY_DEFINES" in obj.flags and obj.flags["LIBRARY_DEFINES"]: + defs += obj.flags["LIBRARY_DEFINES"] + + return True + + def consume_finished(self): + settings_dir = os.path.join(self._project_dir, ".settings") + launch_dir = os.path.join(self._project_dir, "RunConfigurations") + workspace_settings_dir = os.path.join( + self._workspace_dir, ".metadata/.plugins/org.eclipse.core.runtime/.settings" + ) + + for dir_name in [ + self._project_dir, + settings_dir, + launch_dir, + workspace_settings_dir, + self._workspace_lang_dir, + ]: + try: + os.makedirs(dir_name) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + project_path = os.path.join(self._project_dir, ".project") + with open(project_path, "w") as fh: + self._write_project(fh) + + cproject_path = os.path.join(self._project_dir, ".cproject") + with open(cproject_path, "w") as fh: + self._write_cproject(fh) + + language_path = os.path.join(settings_dir, "language.settings.xml") + with open(language_path, "w") as fh: + self._write_language_settings(fh) + + workspace_language_path = os.path.join( + self._workspace_lang_dir, "language.settings.xml" + ) + with open(workspace_language_path, "w") as fh: + workspace_lang_settings = WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE + workspace_lang_settings = workspace_lang_settings.replace( + "@COMPILER_FLAGS@", self._cxx + " " + self._cppflags + ) + fh.write(workspace_lang_settings) + + self._write_launch_files(launch_dir) + + core_resources_prefs_path = os.path.join( + workspace_settings_dir, "org.eclipse.core.resources.prefs" + ) + with open(core_resources_prefs_path, "w") as fh: + fh.write(STATIC_CORE_RESOURCES_PREFS) + + core_runtime_prefs_path = os.path.join( + workspace_settings_dir, "org.eclipse.core.runtime.prefs" + ) + with open(core_runtime_prefs_path, "w") as fh: + fh.write(STATIC_CORE_RUNTIME_PREFS) + + ui_prefs_path = os.path.join(workspace_settings_dir, "org.eclipse.ui.prefs") + with open(ui_prefs_path, "w") as fh: + fh.write(STATIC_UI_PREFS) + + cdt_ui_prefs_path = os.path.join( + workspace_settings_dir, "org.eclipse.cdt.ui.prefs" + ) + cdt_ui_prefs = STATIC_CDT_UI_PREFS + # Here we generate the code formatter that will show up in the UI with + # the name "Mozilla". The formatter is stored as a single line of XML + # in the org.eclipse.cdt.ui.formatterprofiles pref. + cdt_ui_prefs += """org.eclipse.cdt.ui.formatterprofiles=\\n\\n\\n""" + XML_PREF_TEMPLATE = """\\n""" + for line in FORMATTER_SETTINGS.splitlines(): + [pref, val] = line.split("=") + cdt_ui_prefs += XML_PREF_TEMPLATE.replace("@PREF_NAME@", pref).replace( + "@PREF_VAL@", val + ) + cdt_ui_prefs += "\\n\\n" + with open(cdt_ui_prefs_path, "w") as fh: + fh.write(cdt_ui_prefs) + + cdt_core_prefs_path = os.path.join( + workspace_settings_dir, "org.eclipse.cdt.core.prefs" + ) + with open(cdt_core_prefs_path, "w") as fh: + cdt_core_prefs = STATIC_CDT_CORE_PREFS + # When we generated the code formatter called "Mozilla" above, we + # also set it to be the active formatter. When a formatter is set + # as the active formatter all its prefs are set in this prefs file, + # so we need add those now: + cdt_core_prefs += FORMATTER_SETTINGS + fh.write(cdt_core_prefs) + + editor_prefs_path = os.path.join( + workspace_settings_dir, "org.eclipse.ui.editors.prefs" + ) + with open(editor_prefs_path, "w") as fh: + fh.write(EDITOR_SETTINGS) + + # Now import the project into the workspace + self._import_project() + + def _import_project(self): + # If the workspace already exists then don't import the project again because + # eclipse doesn't handle this properly + if self._overwriting_workspace: + return + + # We disable the indexer otherwise we're forced to index + # the whole codebase when importing the project. Indexing the project can take 20 minutes. + self._write_noindex() + + try: + subprocess.check_call( + [ + "eclipse", + "-application", + "-nosplash", + "org.eclipse.cdt.managedbuilder.core.headlessbuild", + "-data", + self._workspace_dir, + "-importAll", + self._project_dir, + ] + ) + except OSError as e: + # Remove the workspace directory so we re-generate it and + # try to import again when the backend is invoked again. + shutil.rmtree(self._workspace_dir) + + if e.errno == errno.ENOENT: + raise Exception( + "Failed to launch eclipse to import project. " + "Ensure 'eclipse' is in your PATH and try again" + ) + else: + raise + finally: + self._remove_noindex() + + def _write_noindex(self): + noindex_path = os.path.join( + self._project_dir, ".settings/org.eclipse.cdt.core.prefs" + ) + with open(noindex_path, "w") as fh: + fh.write(NOINDEX_TEMPLATE) + + def _remove_noindex(self): + # Below we remove the config file that temporarily disabled the indexer + # while we were importing the project. Unfortunately, CDT doesn't + # notice indexer settings changes in config files when it restarts. To + # work around that we remove the index database here to force it to: + for f in glob.glob(os.path.join(self._workspace_lang_dir, "Gecko.*.pdom")): + os.remove(f) + + noindex_path = os.path.join( + self._project_dir, ".settings/org.eclipse.cdt.core.prefs" + ) + # This may fail if the entire tree has been removed; that's fine. + try: + os.remove(noindex_path) + except OSError as e: + if e.errno != errno.ENOENT: + raise + + def _write_language_settings(self, fh): + def add_abs_include_path(absinclude): + assert absinclude[:3] == "-I/" + return LANGUAGE_SETTINGS_TEMPLATE_DIR_INCLUDE.replace( + "@INCLUDE_PATH@", absinclude[2:] + ) + + def add_objdir_include_path(relpath): + p = os.path.join(self.environment.topobjdir, relpath) + return LANGUAGE_SETTINGS_TEMPLATE_DIR_INCLUDE.replace("@INCLUDE_PATH@", p) + + def add_define(name, value): + define = LANGUAGE_SETTINGS_TEMPLATE_DIR_DEFINE + define = define.replace("@NAME@", name) + # We use quoteattr here because some defines contain characters + # such as "<" and '"' which need proper XML escaping. + define = define.replace("@VALUE@", quoteattr(value)) + return define + + fh.write(LANGUAGE_SETTINGS_TEMPLATE_HEADER) + + # Unfortunately, whenever we set a user defined include path or define + # on a directory, Eclipse ignores user defined include paths and defines + # on ancestor directories. That means that we need to add all the + # common include paths and defines to every single directory entry that + # we add settings for. (Fortunately that doesn't appear to have a + # noticeable impact on the time it takes to open the generated Eclipse + # project.) We do that by generating a template here that we can then + # use for each individual directory in the loop below. + # + dirsettings_template = LANGUAGE_SETTINGS_TEMPLATE_DIR_HEADER + + # Add OS_COMPILE_CXXFLAGS args (same as OS_COMPILE_CFLAGS): + dirsettings_template = dirsettings_template.replace( + "@PREINCLUDE_FILE_PATH@", + os.path.join(self.environment.topobjdir, "dist/include/mozilla-config.h"), + ) + dirsettings_template += add_define("MOZILLA_CLIENT", "1") + + # Add EXTRA_INCLUDES args: + dirsettings_template += add_objdir_include_path("dist/include") + + # Add OS_INCLUDES args: + # XXX media/webrtc/trunk/webrtc's moz.builds reset this. + dirsettings_template += add_objdir_include_path("dist/include/nspr") + dirsettings_template += add_objdir_include_path("dist/include/nss") + + # Finally, add anything else that makes things work better. + # + # Because of https://developer.mozilla.org/en-US/docs/Eclipse_CDT#Headers_are_only_parsed_once + # we set MOZILLA_INTERNAL_API for all directories to make sure + # headers are indexed with MOZILLA_INTERNAL_API set. Unfortunately + # this means that MOZILLA_EXTERNAL_API code will suffer. + # + # TODO: If we're doing this for MOZILLA_EXTERNAL_API then we may want + # to do it for other LIBRARY_DEFINES's defines too. Well, at least for + # STATIC_EXPORTABLE_JS_API which may be important to JS people. + # (The other two LIBRARY_DEFINES defines -- MOZ_HAS_MOZGLUE and + # IMPL_LIBXUL -- don't affect much and probably don't matter to anyone). + # + # TODO: Should we also always set DEBUG so that DEBUG code is always + # indexed? Or is there significant amounts of non-DEBUG code that + # would be adversely affected? + # + # TODO: Investigate whether the ordering of directories in the project + # file can be used to our advantage so that the first indexing of + # important headers has the defines we want. + # + dirsettings_template += add_objdir_include_path("ipc/ipdl/_ipdlheaders") + dirsettings_template += add_define("MOZILLA_INTERNAL_API", "1") + + for path, args in self._args_for_dirs.items(): + dirsettings = dirsettings_template + dirsettings = dirsettings.replace("@RELATIVE_PATH@", path) + for i in args["includes"]: + dirsettings += add_abs_include_path(i) + for d in args["defines"]: + assert d[:2] == u"-D" or d[:2] == u"-U" + if d[:2] == u"-U": + # gfx/harfbuzz/src uses -UDEBUG, at least on Mac + # netwerk/sctp/src uses -U__APPLE__ on Mac + # XXX We should make this code smart enough to remove existing defines. + continue + d = d[2:] # get rid of leading "-D" + name_value = d.split("=", 1) + name = name_value[0] + value = "" + if len(name_value) == 2: + value = name_value[1] + dirsettings += add_define(name, str(value)) + dirsettings += LANGUAGE_SETTINGS_TEMPLATE_DIR_FOOTER + fh.write(dirsettings) + + fh.write( + LANGUAGE_SETTINGS_TEMPLATE_FOOTER.replace( + "@COMPILER_FLAGS@", self._cxx + " " + self._cppflags + ) + ) + + def _write_launch_files(self, launch_dir): + bin_dir = os.path.join(self.environment.topobjdir, "dist") + + # TODO Improve binary detection + if self._macbundle: + exe_path = os.path.join(bin_dir, self._macbundle, "Contents/MacOS") + else: + exe_path = os.path.join(bin_dir, "bin") + + exe_path = os.path.join(exe_path, self._appname + self._bin_suffix) + + main_gecko_launch = os.path.join(launch_dir, "gecko.launch") + with open(main_gecko_launch, "w") as fh: + launch = GECKO_LAUNCH_CONFIG_TEMPLATE + launch = launch.replace("@LAUNCH_PROGRAM@", exe_path) + launch = launch.replace("@LAUNCH_ARGS@", "-P -no-remote") + fh.write(launch) + + # TODO Add more launch configs (and delegate calls to mach) + + def _write_project(self, fh): + project = PROJECT_TEMPLATE + + project = project.replace("@PROJECT_NAME@", self._project_name) + project = project.replace("@PROJECT_TOPSRCDIR@", self.environment.topsrcdir) + project = project.replace( + "@GENERATED_IPDL_FILES@", + os.path.join(self.environment.topobjdir, "ipc", "ipdl"), + ) + project = project.replace( + "@GENERATED_WEBIDL_FILES@", + os.path.join(self.environment.topobjdir, "dom", "bindings"), + ) + fh.write(project) + + def _write_cproject(self, fh): + cproject_header = CPROJECT_TEMPLATE_HEADER + cproject_header = cproject_header.replace( + "@PROJECT_TOPSRCDIR@", self.environment.topobjdir + ) + cproject_header = cproject_header.replace( + "@MACH_COMMAND@", os.path.join(self.environment.topsrcdir, "mach") + ) + fh.write(cproject_header) + fh.write(CPROJECT_TEMPLATE_FOOTER) + + +PROJECT_TEMPLATE = """ + + @PROJECT_NAME@ + + + + + + org.eclipse.cdt.managedbuilder.core.genmakebuilder + clean,full,incremental, + + + + + org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder + + + + + + + org.eclipse.cdt.core.cnature + org.eclipse.cdt.core.ccnature + org.eclipse.cdt.managedbuilder.core.managedBuildNature + org.eclipse.cdt.managedbuilder.core.ScannerConfigNature + + + + tree + 2 + @PROJECT_TOPSRCDIR@ + + + generated-ipdl + 2 + @GENERATED_IPDL_FILES@ + + + generated-webidl + 2 + @GENERATED_WEBIDL_FILES@ + + + + + 17111971 + tree + 30 + + org.eclipse.ui.ide.multiFilter + 1.0-name-matches-false-false-obj-* + + + + 14081994 + tree + 22 + + org.eclipse.ui.ide.multiFilter + 1.0-name-matches-false-false-*.rej + + + + 25121970 + tree + 22 + + org.eclipse.ui.ide.multiFilter + 1.0-name-matches-false-false-*.orig + + + + 10102004 + tree + 10 + + org.eclipse.ui.ide.multiFilter + 1.0-name-matches-false-false-.hg + + + + 23122002 + tree + 22 + + org.eclipse.ui.ide.multiFilter + 1.0-name-matches-false-false-*.pyc + + + + +""" + +CPROJECT_TEMPLATE_HEADER = """ + + + + + + + + + + + + + + + + + + + + + + + + +""" +CPROJECT_TEMPLATE_FILEINFO = """ + + + + + +""" +CPROJECT_TEMPLATE_FOOTER = """ + + + + + + + + + + + + + + + + + + + + + + +""" + +WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE = """ + + + + + + + + +""" + + +# The settings set via this template can be found in the UI by opening +# the Properties for a directory in the Project Explorer tab, then going to +# C/C++ General > Preprocessor Include Paths, Macros, etc., selecting the +# C++ item from the Languages column, and then expanding the +# CDT User Settings Entries item to the right. + +LANGUAGE_SETTINGS_TEMPLATE_HEADER = """ + + + + + +""" + +LANGUAGE_SETTINGS_TEMPLATE_DIR_HEADER = """ + + + +""" + +LANGUAGE_SETTINGS_TEMPLATE_DIR_INCLUDE = """ + + +""" + +LANGUAGE_SETTINGS_TEMPLATE_DIR_DEFINE = """ +""" + +LANGUAGE_SETTINGS_TEMPLATE_DIR_FOOTER = """ +""" + +LANGUAGE_SETTINGS_TEMPLATE_FOOTER = """ + + + + + + + + + +""" + + +GECKO_LAUNCH_CONFIG_TEMPLATE = """ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +""" + + +EDITOR_SETTINGS = """eclipse.preferences.version=1 +lineNumberRuler=true +overviewRuler_migration=migrated_3.1 +printMargin=true +printMarginColumn=80 +showCarriageReturn=false +showEnclosedSpaces=false +showLeadingSpaces=false +showLineFeed=false +showWhitespaceCharacters=true +spacesForTabs=true +tabWidth=2 +undoHistorySize=200 +""" + + +STATIC_CORE_RESOURCES_PREFS = """eclipse.preferences.version=1 +refresh.enabled=true +""" + +STATIC_CORE_RUNTIME_PREFS = """eclipse.preferences.version=1 +content-types/org.eclipse.cdt.core.cxxSource/file-extensions=mm +content-types/org.eclipse.core.runtime.xml/file-extensions=xul +content-types/org.eclipse.wst.jsdt.core.jsSource/file-extensions=jsm +""" + +STATIC_UI_PREFS = """eclipse.preferences.version=1 +showIntro=false +""" + +STATIC_CDT_CORE_PREFS = """eclipse.preferences.version=1 +indexer.updatePolicy=0 +""" + +FORMATTER_SETTINGS = """org.eclipse.cdt.core.formatter.alignment_for_arguments_in_method_invocation=16 +org.eclipse.cdt.core.formatter.alignment_for_assignment=16 +org.eclipse.cdt.core.formatter.alignment_for_base_clause_in_type_declaration=80 +org.eclipse.cdt.core.formatter.alignment_for_binary_expression=16 +org.eclipse.cdt.core.formatter.alignment_for_compact_if=16 +org.eclipse.cdt.core.formatter.alignment_for_conditional_expression=34 +org.eclipse.cdt.core.formatter.alignment_for_conditional_expression_chain=18 +org.eclipse.cdt.core.formatter.alignment_for_constructor_initializer_list=48 +org.eclipse.cdt.core.formatter.alignment_for_declarator_list=16 +org.eclipse.cdt.core.formatter.alignment_for_enumerator_list=48 +org.eclipse.cdt.core.formatter.alignment_for_expression_list=0 +org.eclipse.cdt.core.formatter.alignment_for_expressions_in_array_initializer=16 +org.eclipse.cdt.core.formatter.alignment_for_member_access=0 +org.eclipse.cdt.core.formatter.alignment_for_overloaded_left_shift_chain=16 +org.eclipse.cdt.core.formatter.alignment_for_parameters_in_method_declaration=16 +org.eclipse.cdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16 +org.eclipse.cdt.core.formatter.brace_position_for_array_initializer=end_of_line +org.eclipse.cdt.core.formatter.brace_position_for_block=end_of_line +org.eclipse.cdt.core.formatter.brace_position_for_block_in_case=next_line_shifted +org.eclipse.cdt.core.formatter.brace_position_for_method_declaration=next_line +org.eclipse.cdt.core.formatter.brace_position_for_namespace_declaration=end_of_line +org.eclipse.cdt.core.formatter.brace_position_for_switch=end_of_line +org.eclipse.cdt.core.formatter.brace_position_for_type_declaration=next_line +org.eclipse.cdt.core.formatter.comment.min_distance_between_code_and_line_comment=1 +org.eclipse.cdt.core.formatter.comment.never_indent_line_comments_on_first_column=true +org.eclipse.cdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments=true +org.eclipse.cdt.core.formatter.compact_else_if=true +org.eclipse.cdt.core.formatter.continuation_indentation=2 +org.eclipse.cdt.core.formatter.continuation_indentation_for_array_initializer=2 +org.eclipse.cdt.core.formatter.format_guardian_clause_on_one_line=false +org.eclipse.cdt.core.formatter.indent_access_specifier_compare_to_type_header=false +org.eclipse.cdt.core.formatter.indent_access_specifier_extra_spaces=0 +org.eclipse.cdt.core.formatter.indent_body_declarations_compare_to_access_specifier=true +org.eclipse.cdt.core.formatter.indent_body_declarations_compare_to_namespace_header=false +org.eclipse.cdt.core.formatter.indent_breaks_compare_to_cases=true +org.eclipse.cdt.core.formatter.indent_declaration_compare_to_template_header=true +org.eclipse.cdt.core.formatter.indent_empty_lines=false +org.eclipse.cdt.core.formatter.indent_statements_compare_to_block=true +org.eclipse.cdt.core.formatter.indent_statements_compare_to_body=true +org.eclipse.cdt.core.formatter.indent_switchstatements_compare_to_cases=true +org.eclipse.cdt.core.formatter.indent_switchstatements_compare_to_switch=false +org.eclipse.cdt.core.formatter.indentation.size=2 +org.eclipse.cdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert +org.eclipse.cdt.core.formatter.insert_new_line_after_template_declaration=insert +org.eclipse.cdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert +org.eclipse.cdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert +org.eclipse.cdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert +org.eclipse.cdt.core.formatter.insert_new_line_before_colon_in_constructor_initializer_list=do not insert +org.eclipse.cdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert +org.eclipse.cdt.core.formatter.insert_new_line_before_identifier_in_function_declaration=insert +org.eclipse.cdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert +org.eclipse.cdt.core.formatter.insert_new_line_in_empty_block=insert +org.eclipse.cdt.core.formatter.insert_space_after_assignment_operator=insert +org.eclipse.cdt.core.formatter.insert_space_after_binary_operator=insert +org.eclipse.cdt.core.formatter.insert_space_after_closing_angle_bracket_in_template_arguments=insert +org.eclipse.cdt.core.formatter.insert_space_after_closing_angle_bracket_in_template_parameters=insert +org.eclipse.cdt.core.formatter.insert_space_after_closing_brace_in_block=insert +org.eclipse.cdt.core.formatter.insert_space_after_closing_paren_in_cast=insert +org.eclipse.cdt.core.formatter.insert_space_after_colon_in_base_clause=insert +org.eclipse.cdt.core.formatter.insert_space_after_colon_in_case=insert +org.eclipse.cdt.core.formatter.insert_space_after_colon_in_conditional=insert +org.eclipse.cdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert +org.eclipse.cdt.core.formatter.insert_space_after_comma_in_array_initializer=insert +org.eclipse.cdt.core.formatter.insert_space_after_comma_in_base_types=insert +org.eclipse.cdt.core.formatter.insert_space_after_comma_in_declarator_list=insert +org.eclipse.cdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert +org.eclipse.cdt.core.formatter.insert_space_after_comma_in_expression_list=insert +org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert +org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert +org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert +org.eclipse.cdt.core.formatter.insert_space_after_comma_in_template_arguments=insert +org.eclipse.cdt.core.formatter.insert_space_after_comma_in_template_parameters=insert +org.eclipse.cdt.core.formatter.insert_space_after_opening_angle_bracket_in_template_arguments=do not insert +org.eclipse.cdt.core.formatter.insert_space_after_opening_angle_bracket_in_template_parameters=do not insert +org.eclipse.cdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert +org.eclipse.cdt.core.formatter.insert_space_after_opening_bracket=do not insert +org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert +org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert +org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_exception_specification=do not insert +org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert +org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert +org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert +org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert +org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert +org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert +org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert +org.eclipse.cdt.core.formatter.insert_space_after_postfix_operator=do not insert +org.eclipse.cdt.core.formatter.insert_space_after_prefix_operator=do not insert +org.eclipse.cdt.core.formatter.insert_space_after_question_in_conditional=insert +org.eclipse.cdt.core.formatter.insert_space_after_semicolon_in_for=insert +org.eclipse.cdt.core.formatter.insert_space_after_unary_operator=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_assignment_operator=insert +org.eclipse.cdt.core.formatter.insert_space_before_binary_operator=insert +org.eclipse.cdt.core.formatter.insert_space_before_closing_angle_bracket_in_template_arguments=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_closing_angle_bracket_in_template_parameters=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert +org.eclipse.cdt.core.formatter.insert_space_before_closing_bracket=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_exception_specification=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_colon_in_base_clause=insert +org.eclipse.cdt.core.formatter.insert_space_before_colon_in_case=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_colon_in_conditional=insert +org.eclipse.cdt.core.formatter.insert_space_before_colon_in_default=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_comma_in_base_types=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_comma_in_declarator_list=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_comma_in_expression_list=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_comma_in_template_arguments=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_comma_in_template_parameters=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_angle_bracket_in_template_arguments=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_angle_bracket_in_template_parameters=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_block=insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_namespace_declaration=insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_switch=insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_bracket=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_catch=insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_exception_specification=insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_for=insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_if=insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_switch=insert +org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_while=insert +org.eclipse.cdt.core.formatter.insert_space_before_postfix_operator=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_prefix_operator=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_question_in_conditional=insert +org.eclipse.cdt.core.formatter.insert_space_before_semicolon=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_semicolon_in_for=do not insert +org.eclipse.cdt.core.formatter.insert_space_before_unary_operator=do not insert +org.eclipse.cdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert +org.eclipse.cdt.core.formatter.insert_space_between_empty_brackets=do not insert +org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_exception_specification=do not insert +org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert +org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert +org.eclipse.cdt.core.formatter.join_wrapped_lines=false +org.eclipse.cdt.core.formatter.keep_else_statement_on_same_line=false +org.eclipse.cdt.core.formatter.keep_empty_array_initializer_on_one_line=false +org.eclipse.cdt.core.formatter.keep_imple_if_on_one_line=false +org.eclipse.cdt.core.formatter.keep_then_statement_on_same_line=false +org.eclipse.cdt.core.formatter.lineSplit=80 +org.eclipse.cdt.core.formatter.number_of_empty_lines_to_preserve=1 +org.eclipse.cdt.core.formatter.put_empty_statement_on_new_line=true +org.eclipse.cdt.core.formatter.tabulation.char=space +org.eclipse.cdt.core.formatter.tabulation.size=2 +org.eclipse.cdt.core.formatter.use_tabs_only_for_leading_indentations=false +""" + +STATIC_CDT_UI_PREFS = """eclipse.preferences.version=1 +buildConsoleLines=10000 +Console.limitConsoleOutput=false +ensureNewlineAtEOF=false +formatter_profile=_Mozilla +formatter_settings_version=1 +org.eclipse.cdt.ui.formatterprofiles.version=1 +removeTrailingWhitespace=true +removeTrailingWhitespaceEditedLines=true +scalability.numberOfLines=15000 +markOccurrences=true +markOverloadedOperatorsOccurrences=true +stickyOccurrences=false +""" + +NOINDEX_TEMPLATE = """eclipse.preferences.version=1 +indexer/indexerId=org.eclipse.cdt.core.nullIndexer +""" diff --git a/python/mozbuild/mozbuild/backend/fastermake.py b/python/mozbuild/mozbuild/backend/fastermake.py new file mode 100644 index 0000000000..324db29866 --- /dev/null +++ b/python/mozbuild/mozbuild/backend/fastermake.py @@ -0,0 +1,300 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from operator import itemgetter + +import mozpack.path as mozpath +import six +from mozpack.manifests import InstallManifest + +from mozbuild.backend.base import PartialBackend +from mozbuild.backend.make import MakeBackend +from mozbuild.frontend.context import ObjDirPath, Path +from mozbuild.frontend.data import ( + ChromeManifestEntry, + FinalTargetFiles, + FinalTargetPreprocessedFiles, + GeneratedFile, + JARManifest, + LocalizedFiles, + LocalizedPreprocessedFiles, + XPIDLModule, +) +from mozbuild.makeutil import Makefile +from mozbuild.util import OrderedDefaultDict + + +class FasterMakeBackend(MakeBackend, PartialBackend): + def _init(self): + super(FasterMakeBackend, self)._init() + + self._manifest_entries = OrderedDefaultDict(set) + + self._install_manifests = OrderedDefaultDict(InstallManifest) + + self._dependencies = OrderedDefaultDict(list) + self._l10n_dependencies = OrderedDefaultDict(list) + + self._has_xpidl = False + + self._generated_files_map = {} + self._generated_files = [] + + def _add_preprocess(self, obj, path, dest, target=None, **kwargs): + if target is None: + target = mozpath.basename(path) + # This matches what PP_TARGETS do in config/rules. + if target.endswith(".in"): + target = target[:-3] + if target.endswith(".css"): + kwargs["marker"] = "%" + depfile = mozpath.join( + self.environment.topobjdir, + "faster", + ".deps", + mozpath.join(obj.install_target, dest, target).replace("/", "_"), + ) + self._install_manifests[obj.install_target].add_preprocess( + mozpath.join(obj.srcdir, path), + mozpath.join(dest, target), + depfile, + **kwargs + ) + + def consume_object(self, obj): + if isinstance(obj, JARManifest) and obj.install_target.startswith("dist/bin"): + self._consume_jar_manifest(obj) + + elif isinstance( + obj, (FinalTargetFiles, FinalTargetPreprocessedFiles) + ) and obj.install_target.startswith("dist/bin"): + ab_cd = self.environment.substs["MOZ_UI_LOCALE"][0] + localized = isinstance(obj, (LocalizedFiles, LocalizedPreprocessedFiles)) + defines = obj.defines or {} + if defines: + defines = defines.defines + for path, files in obj.files.walk(): + for f in files: + # For localized files we need to find the file from the locale directory. + if localized and not isinstance(f, ObjDirPath) and ab_cd != "en-US": + src = self.localized_path(obj.relsrcdir, f) + + dep_target = "install-%s" % obj.install_target + + if "*" not in src: + merge = mozpath.abspath( + mozpath.join( + self.environment.topobjdir, + "l10n_merge", + obj.relsrcdir, + f, + ) + ) + self._l10n_dependencies[dep_target].append( + (merge, f.full_path, src) + ) + src = merge + else: + src = f.full_path + + if isinstance(obj, FinalTargetPreprocessedFiles): + self._add_preprocess( + obj, src, path, target=f.target_basename, defines=defines + ) + elif "*" in f: + + def _prefix(s): + for p in mozpath.split(s): + if "*" not in p: + yield p + "/" + + prefix = "".join(_prefix(src)) + + if "*" in f.target_basename: + target = path + else: + target = mozpath.join(path, f.target_basename) + self._install_manifests[obj.install_target].add_pattern_link( + prefix, src[len(prefix) :], target + ) + else: + self._install_manifests[obj.install_target].add_link( + src, mozpath.join(path, f.target_basename) + ) + if isinstance(f, ObjDirPath): + dep_target = "install-%s" % obj.install_target + dep = mozpath.relpath(f.full_path, self.environment.topobjdir) + if dep in self._generated_files_map: + # Only the first output file is specified as a + # dependency. If there are multiple output files + # from a single GENERATED_FILES invocation that are + # installed, we only want to run the command once. + dep = self._generated_files_map[dep] + self._dependencies[dep_target].append(dep) + + elif isinstance(obj, ChromeManifestEntry) and obj.install_target.startswith( + "dist/bin" + ): + top_level = mozpath.join(obj.install_target, "chrome.manifest") + if obj.path != top_level: + entry = "manifest %s" % mozpath.relpath(obj.path, obj.install_target) + self._manifest_entries[top_level].add(entry) + self._manifest_entries[obj.path].add(str(obj.entry)) + + elif isinstance(obj, GeneratedFile): + if obj.outputs: + first_output = mozpath.relpath( + mozpath.join(obj.objdir, obj.outputs[0]), self.environment.topobjdir + ) + for o in obj.outputs[1:]: + fullpath = mozpath.join(obj.objdir, o) + self._generated_files_map[ + mozpath.relpath(fullpath, self.environment.topobjdir) + ] = first_output + self._generated_files.append(obj) + return False + + elif isinstance(obj, XPIDLModule): + self._has_xpidl = True + # We're not actually handling XPIDL files. + return False + + else: + return False + + return True + + def consume_finished(self): + mk = Makefile() + # Add the default rule at the very beginning. + mk.create_rule(["default"]) + mk.add_statement("TOPSRCDIR = %s" % self.environment.topsrcdir) + mk.add_statement("TOPOBJDIR = %s" % self.environment.topobjdir) + mk.add_statement("MDDEPDIR = .deps") + mk.add_statement("TOUCH ?= touch") + mk.add_statement("include $(TOPSRCDIR)/config/makefiles/functions.mk") + mk.add_statement("include $(TOPSRCDIR)/config/AB_rCD.mk") + mk.add_statement("AB_CD = en-US") + if not self._has_xpidl: + mk.add_statement("NO_XPIDL = 1") + + # Add a few necessary variables inherited from configure + for var in ( + "PYTHON3", + "ACDEFINES", + "MOZ_BUILD_APP", + "MOZ_WIDGET_TOOLKIT", + ): + value = self.environment.substs.get(var) + if value is not None: + mk.add_statement("%s = %s" % (var, value)) + + install_manifests_bases = self._install_manifests.keys() + + # Add information for chrome manifest generation + manifest_targets = [] + + for target, entries in six.iteritems(self._manifest_entries): + manifest_targets.append(target) + install_target = mozpath.basedir(target, install_manifests_bases) + self._install_manifests[install_target].add_content( + "".join("%s\n" % e for e in sorted(entries)), + mozpath.relpath(target, install_target), + ) + + # Add information for install manifests. + mk.add_statement( + "INSTALL_MANIFESTS = %s" % " ".join(sorted(self._install_manifests.keys())) + ) + + # Add dependencies we inferred: + for target, deps in sorted(six.iteritems(self._dependencies)): + mk.create_rule([target]).add_dependencies( + "$(TOPOBJDIR)/%s" % d for d in sorted(deps) + ) + + # This is not great, but it's better to have some dependencies on these Python files. + python_deps = [ + "$(TOPSRCDIR)/python/mozbuild/mozbuild/action/l10n_merge.py", + "$(TOPSRCDIR)/third_party/python/compare-locales/compare_locales/compare.py", + "$(TOPSRCDIR)/third_party/python/compare-locales/compare_locales/paths.py", + ] + # Add l10n dependencies we inferred: + for target, deps in sorted(six.iteritems(self._l10n_dependencies)): + mk.create_rule([target]).add_dependencies( + "%s" % d[0] for d in sorted(deps, key=itemgetter(0)) + ) + for (merge, ref_file, l10n_file) in deps: + rule = mk.create_rule([merge]).add_dependencies( + [ref_file, l10n_file] + python_deps + ) + rule.add_commands( + [ + "$(PYTHON3) -m mozbuild.action.l10n_merge " + "--output {} --ref-file {} --l10n-file {}".format( + merge, ref_file, l10n_file + ) + ] + ) + # Add a dummy rule for the l10n file since it might not exist. + mk.create_rule([l10n_file]) + + mk.add_statement("include $(TOPSRCDIR)/config/faster/rules.mk") + + for base, install_manifest in six.iteritems(self._install_manifests): + with self._write_file( + mozpath.join( + self.environment.topobjdir, + "faster", + "install_%s" % base.replace("/", "_"), + ) + ) as fh: + install_manifest.write(fileobj=fh) + + # Write a single unified manifest for consumption by |mach watch|. + # Since this doesn't start 'install_', it's not processed by the build. + unified_manifest = InstallManifest() + for base, install_manifest in six.iteritems(self._install_manifests): + # Expect 'dist/bin/**', which includes 'dist/bin' with no trailing slash. + assert base.startswith("dist/bin") + base = base[len("dist/bin") :] + if base and base[0] == "/": + base = base[1:] + unified_manifest.add_entries_from(install_manifest, base=base) + + with self._write_file( + mozpath.join( + self.environment.topobjdir, "faster", "unified_install_dist_bin" + ) + ) as fh: + unified_manifest.write(fileobj=fh) + + for obj in self._generated_files: + for stmt in self._format_statements_for_generated_file(obj, "default"): + mk.add_statement(stmt) + + with self._write_file( + mozpath.join(self.environment.topobjdir, "faster", "Makefile") + ) as fh: + mk.dump(fh, removal_guard=False) + + def _pretty_path(self, path, obj): + if path.startswith(self.environment.topobjdir): + return mozpath.join( + "$(TOPOBJDIR)", mozpath.relpath(path, self.environment.topobjdir) + ) + elif path.startswith(self.environment.topsrcdir): + return mozpath.join( + "$(TOPSRCDIR)", mozpath.relpath(path, self.environment.topsrcdir) + ) + else: + return path + + def _format_generated_file_input_name(self, path, obj): + return self._pretty_path(path.full_path, obj) + + def _format_generated_file_output_name(self, path, obj): + if not isinstance(path, Path): + path = ObjDirPath(obj._context, "!" + path) + return self._pretty_path(path.full_path, obj) diff --git a/python/mozbuild/mozbuild/backend/mach_commands.py b/python/mozbuild/mozbuild/backend/mach_commands.py new file mode 100644 index 0000000000..1b83ebc826 --- /dev/null +++ b/python/mozbuild/mozbuild/backend/mach_commands.py @@ -0,0 +1,420 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import logging +import os +import subprocess +import sys + +import mozpack.path as mozpath +from mach.decorators import Command, CommandArgument +from mozfile import which + +from mozbuild import build_commands + + +@Command( + "ide", + category="devenv", + description="Generate a project and launch an IDE.", + virtualenv_name="build", +) +@CommandArgument("ide", choices=["eclipse", "visualstudio", "vscode"]) +@CommandArgument( + "--no-interactive", + default=False, + action="store_true", + help="Just generate the configuration", +) +@CommandArgument("args", nargs=argparse.REMAINDER) +def run(command_context, ide, no_interactive, args): + interactive = not no_interactive + + if ide == "eclipse": + backend = "CppEclipse" + elif ide == "visualstudio": + backend = "VisualStudio" + elif ide == "vscode": + backend = "Clangd" + + if ide == "eclipse" and not which("eclipse"): + command_context.log( + logging.ERROR, + "ide", + {}, + "Eclipse CDT 8.4 or later must be installed in your PATH.", + ) + command_context.log( + logging.ERROR, + "ide", + {}, + "Download: http://www.eclipse.org/cdt/downloads.php", + ) + return 1 + + if ide == "vscode": + rc = build_commands.configure(command_context) + + if rc != 0: + return rc + + # First install what we can through install manifests. + rc = command_context._run_make( + directory=command_context.topobjdir, + target="pre-export", + line_handler=None, + ) + if rc != 0: + return rc + + # Then build the rest of the build dependencies by running the full + # export target, because we can't do anything better. + for target in ("export", "pre-compile"): + rc = command_context._run_make( + directory=command_context.topobjdir, + target=target, + line_handler=None, + ) + if rc != 0: + return rc + else: + # Here we refresh the whole build. 'build export' is sufficient here and is + # probably more correct but it's also nice having a single target to get a fully + # built and indexed project (gives a easy target to use before go out to lunch). + res = command_context._mach_context.commands.dispatch( + "build", command_context._mach_context + ) + if res != 0: + return 1 + + # Generate or refresh the IDE backend. + python = command_context.virtualenv_manager.python_path + config_status = os.path.join(command_context.topobjdir, "config.status") + args = [python, config_status, "--backend=%s" % backend] + res = command_context._run_command_in_objdir( + args=args, pass_thru=True, ensure_exit_code=False + ) + if res != 0: + return 1 + + if ide == "eclipse": + eclipse_workspace_dir = get_eclipse_workspace_path(command_context) + subprocess.check_call(["eclipse", "-data", eclipse_workspace_dir]) + elif ide == "visualstudio": + visual_studio_workspace_dir = get_visualstudio_workspace_path(command_context) + subprocess.call(["explorer.exe", visual_studio_workspace_dir]) + elif ide == "vscode": + return setup_vscode(command_context, interactive) + + +def get_eclipse_workspace_path(command_context): + from mozbuild.backend.cpp_eclipse import CppEclipseBackend + + return CppEclipseBackend.get_workspace_path( + command_context.topsrcdir, command_context.topobjdir + ) + + +def get_visualstudio_workspace_path(command_context): + return os.path.normpath( + os.path.join(command_context.topobjdir, "msvc", "mozilla.sln") + ) + + +def setup_vscode(command_context, interactive): + from mozbuild.backend.clangd import find_vscode_cmd + + # Check if platform has VSCode installed + if interactive: + vscode_cmd = find_vscode_cmd() + if vscode_cmd is None: + choice = prompt_bool( + "VSCode cannot be found, and may not be installed. Proceed?" + ) + if not choice: + return 1 + + vscode_settings = mozpath.join( + command_context.topsrcdir, ".vscode", "settings.json" + ) + + new_settings = {} + artifact_prefix = "" + if command_context.config_environment.is_artifact_build: + artifact_prefix = ( + "\nArtifact build configured: Skipping clang and rust setup. " + "If you later switch to a full build, please re-run this command." + ) + else: + new_settings = setup_clangd_rust_in_vscode(command_context) + + # Add file associations. + new_settings = { + **new_settings, + "files.associations": { + "*.jsm": "javascript", + "*.sjs": "javascript", + }, + # Note, the top-level editor settings are left as default to allow the + # user's defaults (if any) to take effect. + "[javascript][javascriptreact][typescript][typescriptreact][json][html]": { + "editor.defaultFormatter": "esbenp.prettier-vscode", + "editor.formatOnSave": True, + }, + } + + import difflib + import json + + # Load the existing .vscode/settings.json file, to check if if needs to + # be created or updated. + try: + with open(vscode_settings) as fh: + old_settings_str = fh.read() + except FileNotFoundError: + print( + "Configuration for {} will be created.{}".format( + vscode_settings, artifact_prefix + ) + ) + old_settings_str = None + + if old_settings_str is None: + # No old settings exist + with open(vscode_settings, "w") as fh: + json.dump(new_settings, fh, indent=4) + else: + # Merge our new settings with the existing settings, and check if we + # need to make changes. Only prompt & write out the updated config + # file if settings actually changed. + try: + old_settings = json.loads(old_settings_str) + prompt_prefix = "" + except ValueError: + old_settings = {} + prompt_prefix = ( + "\n**WARNING**: Parsing of existing settings file failed. " + "Existing settings will be lost!" + ) + + # If we've got an old section with the formatting configuration, remove it + # so that we effectively "upgrade" the user to include json from the new + # settings. The user is presented with the diffs so should spot any issues. + if "[javascript][javascriptreact][typescript][typescriptreact]" in old_settings: + old_settings.pop( + "[javascript][javascriptreact][typescript][typescriptreact]" + ) + if ( + "[javascript][javascriptreact][typescript][typescriptreact][json]" + in old_settings + ): + old_settings.pop( + "[javascript][javascriptreact][typescript][typescriptreact][json]" + ) + + settings = {**old_settings, **new_settings} + + if old_settings != settings: + # Prompt the user with a diff of the changes we're going to make + new_settings_str = json.dumps(settings, indent=4) + if interactive: + print( + "\nThe following modifications to {settings} will occur:\n{diff}".format( + settings=vscode_settings, + diff="".join( + difflib.unified_diff( + old_settings_str.splitlines(keepends=True), + new_settings_str.splitlines(keepends=True), + "a/.vscode/settings.json", + "b/.vscode/settings.json", + n=30, + ) + ), + ) + ) + choice = prompt_bool( + "{}{}\nProceed with modifications to {}?".format( + artifact_prefix, prompt_prefix, vscode_settings + ) + ) + if not choice: + return 1 + + with open(vscode_settings, "w") as fh: + fh.write(new_settings_str) + + if not interactive: + return 0 + + # Open vscode with new configuration, or ask the user to do so if the + # binary was not found. + if vscode_cmd is None: + print( + "Please open VS Code manually and load directory: {}".format( + command_context.topsrcdir + ) + ) + return 0 + + rc = subprocess.call(vscode_cmd + [command_context.topsrcdir]) + + if rc != 0: + command_context.log( + logging.ERROR, + "ide", + {}, + "Unable to open VS Code. Please open VS Code manually and load " + "directory: {}".format(command_context.topsrcdir), + ) + return rc + + return 0 + + +def setup_clangd_rust_in_vscode(command_context): + clangd_cc_path = mozpath.join(command_context.topobjdir, "clangd") + + # Verify if the required files are present + clang_tools_path = mozpath.join( + command_context._mach_context.state_dir, "clang-tools" + ) + clang_tidy_bin = mozpath.join(clang_tools_path, "clang-tidy", "bin") + + clangd_path = mozpath.join( + clang_tidy_bin, + "clangd" + command_context.config_environment.substs.get("BIN_SUFFIX", ""), + ) + + if not os.path.exists(clangd_path): + command_context.log( + logging.ERROR, + "ide", + {}, + "Unable to locate clangd in {}.".format(clang_tidy_bin), + ) + rc = get_clang_tools(command_context, clang_tools_path) + + if rc != 0: + return rc + + import multiprocessing + + from mozbuild.code_analysis.utils import ClangTidyConfig + + clang_tidy_cfg = ClangTidyConfig(command_context.topsrcdir) + + if sys.platform == "win32": + cargo_check_command = [sys.executable, "mach"] + else: + cargo_check_command = ["./mach"] + + cargo_check_command += [ + "--log-no-times", + "cargo", + "check", + "-j", + str(multiprocessing.cpu_count() // 2), + "--all-crates", + "--message-format-json", + ] + + clang_tidy = {} + clang_tidy["Checks"] = ",".join(clang_tidy_cfg.checks) + clang_tidy.update(clang_tidy_cfg.checks_config) + + # Write .clang-tidy yml + import yaml + + with open(".clang-tidy", "w") as file: + yaml.dump(clang_tidy, file) + + clangd_cfg = { + "CompileFlags": { + "CompilationDatabase": clangd_cc_path, + } + } + + with open(".clangd", "w") as file: + yaml.dump(clangd_cfg, file) + + return { + "clangd.path": clangd_path, + "clangd.arguments": [ + "-j", + str(multiprocessing.cpu_count() // 2), + "--limit-results", + "0", + "--completion-style", + "detailed", + "--background-index", + "--all-scopes-completion", + "--log", + "info", + "--pch-storage", + "disk", + "--clang-tidy", + ], + "rust-analyzer.server.extraEnv": { + # Point rust-analyzer at the real target directory used by our + # build, so it can discover the files created when we run `./mach + # cargo check`. + "CARGO_TARGET_DIR": command_context.topobjdir, + }, + "rust-analyzer.cargo.buildScripts.overrideCommand": cargo_check_command, + "rust-analyzer.check.overrideCommand": cargo_check_command, + } + + +def get_clang_tools(command_context, clang_tools_path): + import shutil + + if os.path.isdir(clang_tools_path): + shutil.rmtree(clang_tools_path) + + # Create base directory where we store clang binary + os.mkdir(clang_tools_path) + + from mozbuild.artifact_commands import artifact_toolchain + + job, _ = command_context.platform + + if job is None: + command_context.log( + logging.ERROR, + "ide", + {}, + "The current platform isn't supported. " + "Currently only the following platforms are " + "supported: win32/win64, linux64 and macosx64.", + ) + return 1 + + job += "-clang-tidy" + + # We want to unpack data in the clang-tidy mozbuild folder + currentWorkingDir = os.getcwd() + os.chdir(clang_tools_path) + rc = artifact_toolchain( + command_context, verbose=False, from_build=[job], no_unpack=False, retry=0 + ) + # Change back the cwd + os.chdir(currentWorkingDir) + + return rc + + +def prompt_bool(prompt, limit=5): + """Prompts the user with prompt and requires a boolean value.""" + from distutils.util import strtobool + + for _ in range(limit): + try: + return strtobool(input(prompt + " [Y/N]\n")) + except ValueError: + print( + "ERROR! Please enter a valid option! Please use any of the following:" + " Y, N, True, False, 1, 0" + ) + return False diff --git a/python/mozbuild/mozbuild/backend/make.py b/python/mozbuild/mozbuild/backend/make.py new file mode 100644 index 0000000000..90b37e6758 --- /dev/null +++ b/python/mozbuild/mozbuild/backend/make.py @@ -0,0 +1,139 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import mozpack.path as mozpath + +from mozbuild.frontend.data import GeneratedFile +from mozbuild.shellutil import quote as shell_quote + +from .common import CommonBackend + + +class MakeBackend(CommonBackend): + """Class encapsulating logic for backends that use Make.""" + + def _init(self): + CommonBackend._init(self) + + def _format_statements_for_generated_file(self, obj, tier, extra_dependencies=""): + """Return the list of statements to write to the Makefile for this + GeneratedFile. + + This function will invoke _format_generated_file_input_name and + _format_generated_file_output_name to munge the input/output filenames + before sending them to the output. + """ + assert isinstance(obj, GeneratedFile) + + # Localized generated files can use {AB_CD} and {AB_rCD} in their + # output paths. + if obj.localized: + substs = {"AB_CD": "$(AB_CD)", "AB_rCD": "$(AB_rCD)"} + else: + substs = {} + + outputs = [] + needs_AB_rCD = False + for o in obj.outputs: + needs_AB_rCD = needs_AB_rCD or ("AB_rCD" in o) + try: + outputs.append( + self._format_generated_file_output_name(o.format(**substs), obj) + ) + except KeyError as e: + raise ValueError( + "%s not in %s is not a valid substitution in %s" + % (e.args[0], ", ".join(sorted(substs.keys())), o) + ) + + first_output = outputs[0] + dep_file = mozpath.join( + mozpath.dirname(first_output), + "$(MDDEPDIR)", + "%s.pp" % mozpath.basename(first_output), + ) + # The stub target file needs to go in MDDEPDIR so that it doesn't + # get written into generated Android resource directories, breaking + # Gradle tooling and/or polluting the Android packages. + stub_file = mozpath.join( + mozpath.dirname(first_output), + "$(MDDEPDIR)", + "%s.stub" % mozpath.basename(first_output), + ) + + if obj.inputs: + inputs = [ + self._format_generated_file_input_name(f, obj) for f in obj.inputs + ] + else: + inputs = [] + + force = "" + if obj.force: + force = " FORCE" + elif obj.localized: + force = " $(if $(IS_LANGUAGE_REPACK),FORCE)" + + ret = [] + + if obj.script: + # If we are doing an artifact build, we don't run compiler, so + # we can skip generated files that are needed during compile, + # or let the rule run as the result of something depending on + # it. + if ( + not (obj.required_before_compile or obj.required_during_compile) + or not self.environment.is_artifact_build + ): + if tier and not needs_AB_rCD: + # Android localized resources have special Makefile + # handling. + + # Double-colon tiers via a variable that the backend adds as a dependency + # later. See https://bugzilla.mozilla.org/show_bug.cgi?id=1645986#c0 as + # to why. + if tier in ("export", "pre-compile", "libs", "misc"): + dep = "%s_TARGETS" % tier.replace("-", "_").upper() + ret.append("%s += %s" % (dep, stub_file)) + else: + ret.append("%s: %s" % (tier, stub_file)) + for output in outputs: + ret.append("%s: %s ;" % (output, stub_file)) + ret.append("EXTRA_MDDEPEND_FILES += %s" % dep_file) + + ret.append( + ( + """{stub}: {script}{inputs}{backend}{force} +\t$(REPORT_BUILD) +\t$(call py_action,file_generate,{locale}{script} """ # wrap for E501 + """{method} {output} {dep_file} {stub}{inputs}{flags}) +\t@$(TOUCH) $@ +""" + ).format( + stub=stub_file, + output=first_output, + dep_file=dep_file, + inputs=" " + " ".join(inputs) if inputs else "", + flags=" " + " ".join(shell_quote(f) for f in obj.flags) + if obj.flags + else "", + backend=" " + extra_dependencies if extra_dependencies else "", + # Locale repacks repack multiple locales from a single configured objdir, + # so standard mtime dependencies won't work properly when the build is re-run + # with a different locale as input. IS_LANGUAGE_REPACK will reliably be set + # in this situation, so simply force the generation to run in that case. + force=force, + locale="--locale=$(AB_CD) " if obj.localized else "", + script=obj.script, + method=obj.method, + ) + ) + + return ret + + def _format_generated_file_input_name(self, path, obj): + raise NotImplementedError("Subclass must implement") + + def _format_generated_file_output_name(self, path, obj): + raise NotImplementedError("Subclass must implement") diff --git a/python/mozbuild/mozbuild/backend/recursivemake.py b/python/mozbuild/mozbuild/backend/recursivemake.py new file mode 100644 index 0000000000..d92864d081 --- /dev/null +++ b/python/mozbuild/mozbuild/backend/recursivemake.py @@ -0,0 +1,1904 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import io +import logging +import os +import re +from collections import defaultdict, namedtuple +from itertools import chain +from operator import itemgetter + +import mozpack.path as mozpath +import six +from mozpack.manifests import InstallManifest +from six import StringIO + +from mozbuild import frontend +from mozbuild.frontend.context import ( + AbsolutePath, + ObjDirPath, + Path, + RenamedSourcePath, + SourcePath, +) +from mozbuild.shellutil import quote as shell_quote + +from ..frontend.data import ( + BaseLibrary, + BaseProgram, + BaseRustLibrary, + ChromeManifestEntry, + ComputedFlags, + ConfigFileSubstitution, + ContextDerived, + Defines, + DirectoryTraversal, + ExternalLibrary, + FinalTargetFiles, + FinalTargetPreprocessedFiles, + GeneratedFile, + HostDefines, + HostLibrary, + HostProgram, + HostRustProgram, + HostSharedLibrary, + HostSimpleProgram, + HostSources, + InstallationTarget, + JARManifest, + Linkable, + LocalInclude, + LocalizedFiles, + LocalizedPreprocessedFiles, + ObjdirFiles, + ObjdirPreprocessedFiles, + PerSourceFlag, + Program, + RustProgram, + RustTests, + SandboxedWasmLibrary, + SharedLibrary, + SimpleProgram, + Sources, + StaticLibrary, + TestManifest, + VariablePassthru, + WasmSources, + XPIDLModule, +) +from ..makeutil import Makefile +from ..util import FileAvoidWrite, OrderedDefaultDict, ensureParentDir, pairwise +from .common import CommonBackend +from .make import MakeBackend + +# To protect against accidentally adding logic to Makefiles that belong in moz.build, +# we check if moz.build-like variables are defined in Makefiles. If they are, we throw +# an error to encourage the usage of moz.build instead. +_MOZBUILD_ONLY_VARIABLES = set(frontend.context.VARIABLES.keys()) - { + # The migration to moz.build from Makefiles still isn't complete, and there's still + # some straggling Makefile logic that uses variables that only moz.build should + # use. + # These remaining variables are excluded from our blacklist. As the variables here + # are migrated from Makefiles in the future, they should be removed from this + # "override" list. + "XPI_NAME", + "USE_EXTENSION_MANIFEST", + "CFLAGS", + "CXXFLAGS", +} + +DEPRECATED_VARIABLES = [ + "ALLOW_COMPILER_WARNINGS", + "EXPORT_LIBRARY", + "EXTRA_LIBS", + "FAIL_ON_WARNINGS", + "HOST_LIBS", + "LIBXUL_LIBRARY", + "MOCHITEST_A11Y_FILES", + "MOCHITEST_BROWSER_FILES", + "MOCHITEST_BROWSER_FILES_PARTS", + "MOCHITEST_CHROME_FILES", + "MOCHITEST_FILES", + "MOCHITEST_FILES_PARTS", + "MOCHITEST_METRO_FILES", + "MOCHITEST_ROBOCOP_FILES", + "MODULE_OPTIMIZE_FLAGS", + "MOZ_CHROME_FILE_FORMAT", + "SHORT_LIBNAME", + "TESTING_JS_MODULES", + "TESTING_JS_MODULE_DIR", +] + +MOZBUILD_VARIABLES_MESSAGE = "It should only be defined in moz.build files." + +DEPRECATED_VARIABLES_MESSAGE = ( + "This variable has been deprecated. It does nothing. It must be removed " + "in order to build." +) + + +def make_quote(s): + return s.replace("#", "\#").replace("$", "$$") + + +class BackendMakeFile(object): + """Represents a generated backend.mk file. + + This is both a wrapper around a file handle as well as a container that + holds accumulated state. + + It's worth taking a moment to explain the make dependencies. The + generated backend.mk as well as the Makefile.in (if it exists) are in the + GLOBAL_DEPS list. This means that if one of them changes, all targets + in that Makefile are invalidated. backend.mk also depends on all of its + input files. + + It's worth considering the effect of file mtimes on build behavior. + + Since we perform an "all or none" traversal of moz.build files (the whole + tree is scanned as opposed to individual files), if we were to blindly + write backend.mk files, the net effect of updating a single mozbuild file + in the tree is all backend.mk files have new mtimes. This would in turn + invalidate all make targets across the whole tree! This would effectively + undermine incremental builds as any mozbuild change would cause the entire + tree to rebuild! + + The solution is to not update the mtimes of backend.mk files unless they + actually change. We use FileAvoidWrite to accomplish this. + """ + + def __init__(self, srcdir, objdir, environment, topsrcdir, topobjdir, dry_run): + self.topsrcdir = topsrcdir + self.srcdir = srcdir + self.objdir = objdir + self.relobjdir = mozpath.relpath(objdir, topobjdir) + self.environment = environment + self.name = mozpath.join(objdir, "backend.mk") + + self.xpt_name = None + + self.fh = FileAvoidWrite(self.name, capture_diff=True, dry_run=dry_run) + self.fh.write("# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT EDIT.\n") + self.fh.write("\n") + + def write(self, buf): + self.fh.write(buf) + + def write_once(self, buf): + buf = six.ensure_text(buf) + if "\n" + buf not in six.ensure_text(self.fh.getvalue()): + self.write(buf) + + # For compatibility with makeutil.Makefile + def add_statement(self, stmt): + self.write("%s\n" % stmt) + + def close(self): + if self.xpt_name: + # We just recompile all xpidls because it's easier and less error + # prone. + self.fh.write("NONRECURSIVE_TARGETS += export\n") + self.fh.write("NONRECURSIVE_TARGETS_export += xpidl\n") + self.fh.write( + "NONRECURSIVE_TARGETS_export_xpidl_DIRECTORY = " + "$(DEPTH)/xpcom/xpidl\n" + ) + self.fh.write("NONRECURSIVE_TARGETS_export_xpidl_TARGETS += " "export\n") + + return self.fh.close() + + @property + def diff(self): + return self.fh.diff + + +class RecursiveMakeTraversal(object): + """ + Helper class to keep track of how the "traditional" recursive make backend + recurses subdirectories. This is useful until all adhoc rules are removed + from Makefiles. + + Each directory may have one or more types of subdirectories: + - (normal) dirs + - tests + """ + + SubDirectoryCategories = ["dirs", "tests"] + SubDirectoriesTuple = namedtuple("SubDirectories", SubDirectoryCategories) + + class SubDirectories(SubDirectoriesTuple): + def __new__(self): + return RecursiveMakeTraversal.SubDirectoriesTuple.__new__(self, [], []) + + def __init__(self): + self._traversal = {} + self._attached = set() + + def add(self, dir, dirs=[], tests=[]): + """ + Adds a directory to traversal, registering its subdirectories, + sorted by categories. If the directory was already added to + traversal, adds the new subdirectories to the already known lists. + """ + subdirs = self._traversal.setdefault(dir, self.SubDirectories()) + for key, value in (("dirs", dirs), ("tests", tests)): + assert key in self.SubDirectoryCategories + # Callers give us generators + value = list(value) + getattr(subdirs, key).extend(value) + self._attached |= set(value) + + @staticmethod + def default_filter(current, subdirs): + """ + Default filter for use with compute_dependencies and traverse. + """ + return current, [], subdirs.dirs + subdirs.tests + + def call_filter(self, current, filter): + """ + Helper function to call a filter from compute_dependencies and + traverse. + """ + return filter(current, self.get_subdirs(current)) + + def compute_dependencies(self, filter=None): + """ + Compute make dependencies corresponding to the registered directory + traversal. + + filter is a function with the following signature: + def filter(current, subdirs) + + where current is the directory being traversed, and subdirs the + SubDirectories instance corresponding to it. + The filter function returns a tuple (filtered_current, filtered_parallel, + filtered_dirs) where filtered_current is either current or None if + the current directory is to be skipped, and filtered_parallel and + filtered_dirs are lists of parallel directories and sequential + directories, which can be rearranged from whatever is given in the + SubDirectories members. + + The default filter corresponds to a default recursive traversal. + + """ + filter = filter or self.default_filter + + deps = {} + + def recurse(start_node, prev_nodes=None): + current, parallel, sequential = self.call_filter(start_node, filter) + if current is not None: + if start_node != "": + deps[start_node] = prev_nodes + prev_nodes = (start_node,) + if start_node not in self._traversal: + return prev_nodes + parallel_nodes = [] + for node in parallel: + nodes = recurse(node, prev_nodes) + if nodes and nodes != ("",): + parallel_nodes.extend(nodes) + if parallel_nodes: + prev_nodes = tuple(parallel_nodes) + for dir in sequential: + prev_nodes = recurse(dir, prev_nodes) + return prev_nodes + + return recurse(""), deps + + def traverse(self, start, filter=None): + """ + Iterate over the filtered subdirectories, following the traditional + make traversal order. + """ + if filter is None: + filter = self.default_filter + + current, parallel, sequential = self.call_filter(start, filter) + if current is not None: + yield start + if start not in self._traversal: + return + for node in parallel: + for n in self.traverse(node, filter): + yield n + for dir in sequential: + for d in self.traverse(dir, filter): + yield d + + def get_subdirs(self, dir): + """ + Returns all direct subdirectories under the given directory. + """ + result = self._traversal.get(dir, self.SubDirectories()) + if dir == "": + unattached = set(self._traversal) - self._attached - set([""]) + if unattached: + new_result = self.SubDirectories() + new_result.dirs.extend(result.dirs) + new_result.dirs.extend(sorted(unattached)) + new_result.tests.extend(result.tests) + result = new_result + return result + + +class RecursiveMakeBackend(MakeBackend): + """Backend that integrates with the existing recursive make build system. + + This backend facilitates the transition from Makefile.in to moz.build + files. + + This backend performs Makefile.in -> Makefile conversion. It also writes + out .mk files containing content derived from moz.build files. Both are + consumed by the recursive make builder. + + This backend may eventually evolve to write out non-recursive make files. + However, as long as there are Makefile.in files in the tree, we are tied to + recursive make and thus will need this backend. + """ + + def _init(self): + MakeBackend._init(self) + + self._backend_files = {} + self._idl_dirs = set() + + self._makefile_in_count = 0 + self._makefile_out_count = 0 + + self._test_manifests = {} + + self.backend_input_files.add( + mozpath.join(self.environment.topobjdir, "config", "autoconf.mk") + ) + + self._install_manifests = defaultdict(InstallManifest) + # The build system relies on some install manifests always existing + # even if they are empty, because the directories are still filled + # by the build system itself, and the install manifests are only + # used for a "magic" rm -rf. + self._install_manifests["dist_public"] + self._install_manifests["dist_private"] + + self._traversal = RecursiveMakeTraversal() + self._compile_graph = OrderedDefaultDict(set) + self._rust_targets = set() + self._gkrust_target = None + self._pre_compile = set() + + self._no_skip = { + "pre-export": set(), + "export": set(), + "libs": set(), + "misc": set(), + "tools": set(), + "check": set(), + "syms": set(), + } + + def summary(self): + summary = super(RecursiveMakeBackend, self).summary() + summary.extend( + "; {makefile_in:d} -> {makefile_out:d} Makefile", + makefile_in=self._makefile_in_count, + makefile_out=self._makefile_out_count, + ) + return summary + + def _get_backend_file_for(self, obj): + # For generated files that we put in the export or misc tiers, we use the + # top-level backend file, except for localized files, which we need to keep + # in each directory for dependencies from jar manifests for l10n repacks. + if ( + isinstance(obj, GeneratedFile) + and not obj.required_during_compile + and not obj.localized + ): + objdir = self.environment.topobjdir + else: + objdir = obj.objdir + + if objdir not in self._backend_files: + self._backend_files[objdir] = BackendMakeFile( + obj.srcdir, + objdir, + obj.config, + obj.topsrcdir, + self.environment.topobjdir, + self.dry_run, + ) + return self._backend_files[objdir] + + def consume_object(self, obj): + """Write out build files necessary to build with recursive make.""" + + if not isinstance(obj, ContextDerived): + return False + + backend_file = self._get_backend_file_for(obj) + + consumed = CommonBackend.consume_object(self, obj) + + # CommonBackend handles XPIDLModule, but we want to do + # some extra things for them. + if isinstance(obj, XPIDLModule): + backend_file.xpt_name = "%s.xpt" % obj.name + self._idl_dirs.add(obj.relobjdir) + + # If CommonBackend acknowledged the object, we're done with it. + if consumed: + return True + + if not isinstance(obj, Defines): + self.consume_object(obj.defines) + + if isinstance(obj, Linkable): + self._process_test_support_file(obj) + + if isinstance(obj, DirectoryTraversal): + self._process_directory_traversal(obj, backend_file) + elif isinstance(obj, ConfigFileSubstitution): + # Other ConfigFileSubstitution should have been acked by + # CommonBackend. + assert os.path.basename(obj.output_path) == "Makefile" + self._create_makefile(obj) + elif isinstance(obj, Sources): + suffix_map = { + ".s": "ASFILES", + ".c": "CSRCS", + ".m": "CMSRCS", + ".mm": "CMMSRCS", + ".cpp": "CPPSRCS", + ".S": "SSRCS", + } + variables = [suffix_map[obj.canonical_suffix]] + for files, base, cls, prefix in ( + (obj.static_files, backend_file.srcdir, SourcePath, ""), + (obj.generated_files, backend_file.objdir, ObjDirPath, "!"), + ): + for f in sorted(files): + p = self._pretty_path( + cls(obj._context, prefix + mozpath.relpath(f, base)), + backend_file, + ) + for var in variables: + backend_file.write("%s += %s\n" % (var, p)) + self._compile_graph[mozpath.join(backend_file.relobjdir, "target-objects")] + elif isinstance(obj, HostSources): + suffix_map = { + ".c": "HOST_CSRCS", + ".mm": "HOST_CMMSRCS", + ".cpp": "HOST_CPPSRCS", + } + variables = [suffix_map[obj.canonical_suffix]] + for files, base, cls, prefix in ( + (obj.static_files, backend_file.srcdir, SourcePath, ""), + (obj.generated_files, backend_file.objdir, ObjDirPath, "!"), + ): + for f in sorted(files): + p = self._pretty_path( + cls(obj._context, prefix + mozpath.relpath(f, base)), + backend_file, + ) + for var in variables: + backend_file.write("%s += %s\n" % (var, p)) + self._compile_graph[mozpath.join(backend_file.relobjdir, "host-objects")] + elif isinstance(obj, WasmSources): + suffix_map = {".c": "WASM_CSRCS", ".cpp": "WASM_CPPSRCS"} + variables = [suffix_map[obj.canonical_suffix]] + for files, base, cls, prefix in ( + (obj.static_files, backend_file.srcdir, SourcePath, ""), + (obj.generated_files, backend_file.objdir, ObjDirPath, "!"), + ): + for f in sorted(files): + p = self._pretty_path( + cls(obj._context, prefix + mozpath.relpath(f, base)), + backend_file, + ) + for var in variables: + backend_file.write("%s += %s\n" % (var, p)) + self._compile_graph[mozpath.join(backend_file.relobjdir, "target-objects")] + elif isinstance(obj, VariablePassthru): + # Sorted so output is consistent and we don't bump mtimes. + for k, v in sorted(obj.variables.items()): + if isinstance(v, list): + for item in v: + backend_file.write( + "%s += %s\n" % (k, make_quote(shell_quote(item))) + ) + elif isinstance(v, bool): + if v: + backend_file.write("%s := 1\n" % k) + elif isinstance(v, Path): + path = self._pretty_path(Path(obj._context, v), backend_file) + backend_file.write("%s := %s\n" % (k, path)) + else: + backend_file.write("%s := %s\n" % (k, v)) + elif isinstance(obj, HostDefines): + self._process_defines(obj, backend_file, which="HOST_DEFINES") + elif isinstance(obj, Defines): + self._process_defines(obj, backend_file) + + elif isinstance(obj, GeneratedFile): + if obj.required_before_export: + tier = "pre-export" + elif obj.required_before_compile: + tier = "export" + elif obj.required_during_compile: + tier = "pre-compile" + else: + tier = "misc" + relobjdir = mozpath.relpath(obj.objdir, self.environment.topobjdir) + if tier == "pre-compile": + self._pre_compile.add(relobjdir) + else: + self._no_skip[tier].add(relobjdir) + backend_file.write_once("include $(topsrcdir)/config/AB_rCD.mk\n") + relobjdir = mozpath.relpath(obj.objdir, backend_file.objdir) + # For generated files that we handle in the top-level backend file, + # we want to have a `directory/tier` target depending on the file. + # For the others, we want a `tier` target. + if tier != "pre-compile" and relobjdir: + tier = "%s/%s" % (relobjdir, tier) + for stmt in self._format_statements_for_generated_file( + obj, tier, extra_dependencies="backend.mk" if obj.flags else "" + ): + backend_file.write(stmt + "\n") + + elif isinstance(obj, JARManifest): + self._no_skip["misc"].add(backend_file.relobjdir) + backend_file.write("JAR_MANIFEST := %s\n" % obj.path.full_path) + + elif isinstance(obj, RustProgram): + self._process_rust_program(obj, backend_file) + # Hook the program into the compile graph. + build_target = self._build_target_for_obj(obj) + self._compile_graph[build_target] + self._rust_targets.add(build_target) + + elif isinstance(obj, HostRustProgram): + self._process_host_rust_program(obj, backend_file) + # Hook the program into the compile graph. + build_target = self._build_target_for_obj(obj) + self._compile_graph[build_target] + self._rust_targets.add(build_target) + + elif isinstance(obj, RustTests): + self._process_rust_tests(obj, backend_file) + + elif isinstance(obj, Program): + self._process_program(obj, backend_file) + self._process_linked_libraries(obj, backend_file) + self._no_skip["syms"].add(backend_file.relobjdir) + + elif isinstance(obj, HostProgram): + self._process_host_program(obj, backend_file) + self._process_linked_libraries(obj, backend_file) + + elif isinstance(obj, SimpleProgram): + self._process_simple_program(obj, backend_file) + self._process_linked_libraries(obj, backend_file) + self._no_skip["syms"].add(backend_file.relobjdir) + + elif isinstance(obj, HostSimpleProgram): + self._process_host_simple_program(obj.program, backend_file) + self._process_linked_libraries(obj, backend_file) + + elif isinstance(obj, LocalInclude): + self._process_local_include(obj.path, backend_file) + + elif isinstance(obj, PerSourceFlag): + self._process_per_source_flag(obj, backend_file) + + elif isinstance(obj, ComputedFlags): + self._process_computed_flags(obj, backend_file) + + elif isinstance(obj, InstallationTarget): + self._process_installation_target(obj, backend_file) + + elif isinstance(obj, BaseRustLibrary): + self.backend_input_files.add(obj.cargo_file) + self._process_rust_library(obj, backend_file) + # No need to call _process_linked_libraries, because Rust + # libraries are self-contained objects at this point. + + # Hook the library into the compile graph. + build_target = self._build_target_for_obj(obj) + self._compile_graph[build_target] + self._rust_targets.add(build_target) + if obj.is_gkrust: + self._gkrust_target = build_target + + elif isinstance(obj, SharedLibrary): + self._process_shared_library(obj, backend_file) + self._process_linked_libraries(obj, backend_file) + self._no_skip["syms"].add(backend_file.relobjdir) + + elif isinstance(obj, StaticLibrary): + self._process_static_library(obj, backend_file) + self._process_linked_libraries(obj, backend_file) + + elif isinstance(obj, SandboxedWasmLibrary): + self._process_sandboxed_wasm_library(obj, backend_file) + + elif isinstance(obj, HostLibrary): + self._process_linked_libraries(obj, backend_file) + + elif isinstance(obj, HostSharedLibrary): + self._process_host_shared_library(obj, backend_file) + self._process_linked_libraries(obj, backend_file) + + elif isinstance(obj, ObjdirFiles): + self._process_objdir_files(obj, obj.files, backend_file) + + elif isinstance(obj, ObjdirPreprocessedFiles): + self._process_final_target_pp_files( + obj, obj.files, backend_file, "OBJDIR_PP_FILES" + ) + + elif isinstance(obj, LocalizedFiles): + self._process_localized_files(obj, obj.files, backend_file) + + elif isinstance(obj, LocalizedPreprocessedFiles): + self._process_localized_pp_files(obj, obj.files, backend_file) + + elif isinstance(obj, FinalTargetFiles): + self._process_final_target_files(obj, obj.files, backend_file) + + elif isinstance(obj, FinalTargetPreprocessedFiles): + self._process_final_target_pp_files( + obj, obj.files, backend_file, "DIST_FILES" + ) + + elif isinstance(obj, ChromeManifestEntry): + self._process_chrome_manifest_entry(obj, backend_file) + + elif isinstance(obj, TestManifest): + self._process_test_manifest(obj, backend_file) + + else: + return False + + return True + + def _fill_root_mk(self): + """ + Create two files, root.mk and root-deps.mk, the first containing + convenience variables, and the other dependency definitions for a + hopefully proper directory traversal. + """ + for tier, no_skip in self._no_skip.items(): + self.log( + logging.DEBUG, + "fill_root_mk", + {"number": len(no_skip), "tier": tier}, + "Using {number} directories during {tier}", + ) + + def should_skip(tier, dir): + if tier in self._no_skip: + return dir not in self._no_skip[tier] + return False + + # Traverse directories in parallel, and skip static dirs + def parallel_filter(current, subdirs): + all_subdirs = subdirs.dirs + subdirs.tests + if should_skip(tier, current) or current.startswith("subtiers/"): + current = None + return current, all_subdirs, [] + + # build everything in parallel, including static dirs + # Because of bug 925236 and possible other unknown race conditions, + # don't parallelize the libs tier. + def libs_filter(current, subdirs): + if should_skip("libs", current) or current.startswith("subtiers/"): + current = None + return current, [], subdirs.dirs + subdirs.tests + + # Because of bug 925236 and possible other unknown race conditions, + # don't parallelize the tools tier. There aren't many directories for + # this tier anyways. + def tools_filter(current, subdirs): + if should_skip("tools", current) or current.startswith("subtiers/"): + current = None + return current, [], subdirs.dirs + subdirs.tests + + filters = [ + ("export", parallel_filter), + ("libs", libs_filter), + ("misc", parallel_filter), + ("tools", tools_filter), + ("check", parallel_filter), + ] + + root_deps_mk = Makefile() + + # Fill the dependencies for traversal of each tier. + for tier, filter in sorted(filters, key=itemgetter(0)): + main, all_deps = self._traversal.compute_dependencies(filter) + for dir, deps in sorted(all_deps.items()): + if deps is not None or (dir in self._idl_dirs and tier == "export"): + rule = root_deps_mk.create_rule(["%s/%s" % (dir, tier)]) + if deps: + rule.add_dependencies( + "%s/%s" % (d, tier) for d in sorted(deps) if d + ) + rule = root_deps_mk.create_rule(["recurse_%s" % tier]) + if main: + rule.add_dependencies("%s/%s" % (d, tier) for d in sorted(main)) + + rule = root_deps_mk.create_rule(["recurse_pre-compile"]) + rule.add_dependencies("%s/pre-compile" % d for d in sorted(self._pre_compile)) + + targets_with_pre_compile = sorted( + t for t in self._compile_graph if mozpath.dirname(t) in self._pre_compile + ) + for t in targets_with_pre_compile: + relobjdir = mozpath.dirname(t) + rule = root_deps_mk.create_rule([t]) + rule.add_dependencies(["%s/pre-compile" % relobjdir]) + + all_compile_deps = ( + six.moves.reduce(lambda x, y: x | y, self._compile_graph.values()) + if self._compile_graph + else set() + ) + # Include the following as dependencies of the top recursion target for + # compilation: + # - nodes that are not dependended upon by anything. Typically, this + # would include programs, that need to be recursed, but that nothing + # depends on. + # - nodes that have no dependencies of their own. Technically, this is + # not necessary, because other things have dependencies on them, and + # they all end up rooting to nodes from the above category. But the + # way make works[1] is such that there can be benefits listing them + # as direct dependencies of the top recursion target, to somehow + # prioritize them. + # 1. See bug 1262241 comment 5. + compile_roots = [ + t + for t, deps in six.iteritems(self._compile_graph) + if not deps or t not in all_compile_deps + ] + + def add_category_rules(category, roots, graph): + rule = root_deps_mk.create_rule(["recurse_%s" % category]) + # Directories containing rust compilations don't generally depend + # on other directories in the tree, so putting them first here will + # start them earlier in the build. + rust_roots = sorted(r for r in roots if r in self._rust_targets) + if category == "compile" and rust_roots: + rust_rule = root_deps_mk.create_rule(["recurse_rust"]) + rust_rule.add_dependencies(rust_roots) + # Ensure our cargo invocations are serialized, and gecko comes + # first. Cargo will lock on the build output directory anyway, + # so trying to run things in parallel is not useful. Dependencies + # for gecko are especially expensive to build and parallelize + # poorly, so prioritizing these will save some idle time in full + # builds. + for prior_target, target in pairwise( + sorted( + [t for t in rust_roots], key=lambda t: t != self._gkrust_target + ) + ): + r = root_deps_mk.create_rule([target]) + r.add_dependencies([prior_target]) + + rule.add_dependencies(chain(rust_roots, sorted(roots))) + for target, deps in sorted(graph.items()): + if deps: + rule = root_deps_mk.create_rule([target]) + rule.add_dependencies(sorted(deps)) + + non_default_roots = defaultdict(list) + non_default_graphs = defaultdict(lambda: OrderedDefaultDict(set)) + + for root in compile_roots: + # If this is a non-default target, separate the root from the + # rest of the compile graph. + target_name = mozpath.basename(root) + + if target_name not in ("target", "target-objects", "host", "host-objects"): + non_default_roots[target_name].append(root) + non_default_graphs[target_name][root] = self._compile_graph[root] + del self._compile_graph[root] + + for root in chain(*non_default_roots.values()): + compile_roots.remove(root) + dirname = mozpath.dirname(root) + # If a directory only contains non-default compile targets, we don't + # attempt to dump symbols there. + if ( + dirname in self._no_skip["syms"] + and "%s/target" % dirname not in self._compile_graph + ): + self._no_skip["syms"].remove(dirname) + + add_category_rules("compile", compile_roots, self._compile_graph) + for category, graph in sorted(six.iteritems(non_default_graphs)): + add_category_rules(category, non_default_roots[category], graph) + + root_mk = Makefile() + + # Fill root.mk with the convenience variables. + for tier, filter in filters: + all_dirs = self._traversal.traverse("", filter) + root_mk.add_statement("%s_dirs := %s" % (tier, " ".join(all_dirs))) + + # Need a list of compile targets because we can't use pattern rules: + # https://savannah.gnu.org/bugs/index.php?42833 + root_mk.add_statement( + "pre_compile_targets := %s" + % " ".join(sorted("%s/pre-compile" % p for p in self._pre_compile)) + ) + root_mk.add_statement( + "compile_targets := %s" + % " ".join(sorted(set(self._compile_graph.keys()) | all_compile_deps)) + ) + root_mk.add_statement( + "syms_targets := %s" + % " ".join(sorted(set("%s/syms" % d for d in self._no_skip["syms"]))) + ) + root_mk.add_statement( + "rust_targets := %s" % " ".join(sorted(self._rust_targets)) + ) + + root_mk.add_statement( + "non_default_tiers := %s" % " ".join(sorted(non_default_roots.keys())) + ) + + for category, graphs in sorted(six.iteritems(non_default_graphs)): + category_dirs = [mozpath.dirname(target) for target in graphs.keys()] + root_mk.add_statement("%s_dirs := %s" % (category, " ".join(category_dirs))) + + root_mk.add_statement("include root-deps.mk") + + with self._write_file( + mozpath.join(self.environment.topobjdir, "root.mk") + ) as root: + root_mk.dump(root, removal_guard=False) + + with self._write_file( + mozpath.join(self.environment.topobjdir, "root-deps.mk") + ) as root_deps: + root_deps_mk.dump(root_deps, removal_guard=False) + + def _add_unified_build_rules( + self, + makefile, + unified_source_mapping, + unified_files_makefile_variable="unified_files", + include_curdir_build_rules=True, + ): + + # In case it's a generator. + unified_source_mapping = sorted(unified_source_mapping) + + explanation = ( + "\n" + "# We build files in 'unified' mode by including several files\n" + "# together into a single source file. This cuts down on\n" + "# compilation times and debug information size." + ) + makefile.add_statement(explanation) + + all_sources = " ".join(source for source, _ in unified_source_mapping) + makefile.add_statement( + "%s := %s" % (unified_files_makefile_variable, all_sources) + ) + + if include_curdir_build_rules: + makefile.add_statement( + "\n" + '# Make sometimes gets confused between "foo" and "$(CURDIR)/foo".\n' + "# Help it out by explicitly specifiying dependencies." + ) + makefile.add_statement( + "all_absolute_unified_files := \\\n" + " $(addprefix $(CURDIR)/,$(%s))" % unified_files_makefile_variable + ) + rule = makefile.create_rule(["$(all_absolute_unified_files)"]) + rule.add_dependencies(["$(CURDIR)/%: %"]) + + def _check_blacklisted_variables(self, makefile_in, makefile_content): + if "EXTERNALLY_MANAGED_MAKE_FILE" in makefile_content: + # Bypass the variable restrictions for externally managed makefiles. + return + + for l in makefile_content.splitlines(): + l = l.strip() + # Don't check comments + if l.startswith("#"): + continue + for x in chain(_MOZBUILD_ONLY_VARIABLES, DEPRECATED_VARIABLES): + if x not in l: + continue + + # Finding the variable name in the Makefile is not enough: it + # may just appear as part of something else, like DIRS appears + # in GENERATED_DIRS. + if re.search(r"\b%s\s*[:?+]?=" % x, l): + if x in _MOZBUILD_ONLY_VARIABLES: + message = MOZBUILD_VARIABLES_MESSAGE + else: + message = DEPRECATED_VARIABLES_MESSAGE + raise Exception( + "Variable %s is defined in %s. %s" % (x, makefile_in, message) + ) + + def consume_finished(self): + CommonBackend.consume_finished(self) + + for objdir, backend_file in sorted(self._backend_files.items()): + srcdir = backend_file.srcdir + with self._write_file(fh=backend_file) as bf: + makefile_in = mozpath.join(srcdir, "Makefile.in") + makefile = mozpath.join(objdir, "Makefile") + + # If Makefile.in exists, use it as a template. Otherwise, + # create a stub. + stub = not os.path.exists(makefile_in) + if not stub: + self.log( + logging.DEBUG, + "substitute_makefile", + {"path": makefile}, + "Substituting makefile: {path}", + ) + self._makefile_in_count += 1 + + # In the export and libs tiers, we don't skip directories + # containing a Makefile.in. + # topobjdir is handled separatedly, don't do anything for + # it. + if bf.relobjdir: + for tier in ("export", "libs"): + self._no_skip[tier].add(bf.relobjdir) + else: + self.log( + logging.DEBUG, + "stub_makefile", + {"path": makefile}, + "Creating stub Makefile: {path}", + ) + + obj = self.Substitution() + obj.output_path = makefile + obj.input_path = makefile_in + obj.topsrcdir = backend_file.topsrcdir + obj.topobjdir = bf.environment.topobjdir + obj.config = bf.environment + self._create_makefile(obj, stub=stub) + with io.open(obj.output_path, encoding="utf-8") as fh: + content = fh.read() + # Directories with a Makefile containing a tools target, or + # XPI_PKGNAME can't be skipped and must run during the + # 'tools' tier. + for t in ("XPI_PKGNAME", "tools"): + if t not in content: + continue + if t == "tools" and not re.search( + "(?:^|\s)tools.*::", content, re.M + ): + continue + if objdir == self.environment.topobjdir: + continue + self._no_skip["tools"].add( + mozpath.relpath(objdir, self.environment.topobjdir) + ) + + # Directories with a Makefile containing a check target + # can't be skipped and must run during the 'check' tier. + if re.search("(?:^|\s)check.*::", content, re.M): + self._no_skip["check"].add( + mozpath.relpath(objdir, self.environment.topobjdir) + ) + + # Detect any Makefile.ins that contain variables on the + # moz.build-only list + self._check_blacklisted_variables(makefile_in, content) + + self._fill_root_mk() + + # Make the master test manifest files. + for flavor, t in self._test_manifests.items(): + install_prefix, manifests = t + manifest_stem = mozpath.join(install_prefix, "%s.ini" % flavor) + self._write_master_test_manifest( + mozpath.join(self.environment.topobjdir, "_tests", manifest_stem), + manifests, + ) + + # Catch duplicate inserts. + try: + self._install_manifests["_tests"].add_optional_exists(manifest_stem) + except ValueError: + pass + + self._write_manifests("install", self._install_manifests) + + ensureParentDir(mozpath.join(self.environment.topobjdir, "dist", "foo")) + + def _pretty_path_parts(self, path, backend_file): + assert isinstance(path, Path) + if isinstance(path, SourcePath): + if path.full_path.startswith(backend_file.srcdir): + return "$(srcdir)", path.full_path[len(backend_file.srcdir) :] + if path.full_path.startswith(backend_file.topsrcdir): + return "$(topsrcdir)", path.full_path[len(backend_file.topsrcdir) :] + elif isinstance(path, ObjDirPath): + if path.full_path.startswith(backend_file.objdir): + return "", path.full_path[len(backend_file.objdir) + 1 :] + if path.full_path.startswith(self.environment.topobjdir): + return "$(DEPTH)", path.full_path[len(self.environment.topobjdir) :] + + return "", path.full_path + + def _pretty_path(self, path, backend_file): + return "".join(self._pretty_path_parts(path, backend_file)) + + def _process_unified_sources(self, obj): + backend_file = self._get_backend_file_for(obj) + + suffix_map = { + ".c": "UNIFIED_CSRCS", + ".m": "UNIFIED_CMSRCS", + ".mm": "UNIFIED_CMMSRCS", + ".cpp": "UNIFIED_CPPSRCS", + } + + var = suffix_map[obj.canonical_suffix] + non_unified_var = var[len("UNIFIED_") :] + + if obj.have_unified_mapping: + self._add_unified_build_rules( + backend_file, + obj.unified_source_mapping, + unified_files_makefile_variable=var, + include_curdir_build_rules=False, + ) + backend_file.write("%s += $(%s)\n" % (non_unified_var, var)) + else: + # Sorted so output is consistent and we don't bump mtimes. + source_files = list(sorted(obj.files)) + + backend_file.write("%s += %s\n" % (non_unified_var, " ".join(source_files))) + + self._compile_graph[mozpath.join(backend_file.relobjdir, "target-objects")] + + def _process_directory_traversal(self, obj, backend_file): + """Process a data.DirectoryTraversal instance.""" + fh = backend_file.fh + + def relativize(base, dirs): + return (mozpath.relpath(d.translated, base) for d in dirs) + + if obj.dirs: + fh.write( + "DIRS := %s\n" % " ".join(relativize(backend_file.objdir, obj.dirs)) + ) + self._traversal.add( + backend_file.relobjdir, + dirs=relativize(self.environment.topobjdir, obj.dirs), + ) + + # The directory needs to be registered whether subdirectories have been + # registered or not. + self._traversal.add(backend_file.relobjdir) + + def _process_defines(self, obj, backend_file, which="DEFINES"): + """Output the DEFINES rules to the given backend file.""" + defines = list(obj.get_defines()) + if defines: + defines = " ".join(shell_quote(d) for d in defines) + backend_file.write_once("%s += %s\n" % (which, defines)) + + def _process_installation_target(self, obj, backend_file): + # A few makefiles need to be able to override the following rules via + # make XPI_NAME=blah commands, so we default to the lazy evaluation as + # much as possible here to avoid breaking things. + if obj.xpiname: + backend_file.write("XPI_NAME = %s\n" % (obj.xpiname)) + if obj.subdir: + backend_file.write("DIST_SUBDIR = %s\n" % (obj.subdir)) + if obj.target and not obj.is_custom(): + backend_file.write("FINAL_TARGET = $(DEPTH)/%s\n" % (obj.target)) + else: + backend_file.write( + "FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME)," + "$(DIST)/bin)$(DIST_SUBDIR:%=/%)\n" + ) + + if not obj.enabled: + backend_file.write("NO_DIST_INSTALL := 1\n") + + def _handle_idl_manager(self, manager): + build_files = self._install_manifests["xpidl"] + + for p in ("Makefile", "backend.mk", ".deps/.mkdir.done"): + build_files.add_optional_exists(p) + + for stem in manager.idl_stems(): + self._install_manifests["dist_include"].add_optional_exists("%s.h" % stem) + + for module in manager.modules: + build_files.add_optional_exists(mozpath.join(".deps", "%s.pp" % module)) + + modules = manager.modules + xpt_modules = sorted(modules.keys()) + + mk = Makefile() + all_directories = set() + + for module_name in xpt_modules: + module = manager.modules[module_name] + all_directories |= module.directories + deps = sorted(module.idl_files) + + # It may seem strange to have the .idl files listed as + # prerequisites both here and in the auto-generated .pp files. + # It is necessary to list them here to handle the case where a + # new .idl is added to an xpt. If we add a new .idl and nothing + # else has changed, the new .idl won't be referenced anywhere + # except in the command invocation. Therefore, the .xpt won't + # be rebuilt because the dependencies say it is up to date. By + # listing the .idls here, we ensure the make file has a + # reference to the new .idl. Since the new .idl presumably has + # an mtime newer than the .xpt, it will trigger xpt generation. + + mk.add_statement("%s_deps := %s" % (module_name, " ".join(deps))) + + build_files.add_optional_exists("%s.xpt" % module_name) + + mk.add_statement("all_idl_dirs := %s" % " ".join(sorted(all_directories))) + + rules = StringIO() + mk.dump(rules, removal_guard=False) + + # Create dependency for output header so we force regeneration if the + # header was deleted. This ideally should not be necessary. However, + # some processes (such as PGO at the time this was implemented) wipe + # out dist/include without regard to our install manifests. + + obj = self.Substitution() + obj.output_path = mozpath.join( + self.environment.topobjdir, "config", "makefiles", "xpidl", "Makefile" + ) + obj.input_path = mozpath.join( + self.environment.topsrcdir, "config", "makefiles", "xpidl", "Makefile.in" + ) + obj.topsrcdir = self.environment.topsrcdir + obj.topobjdir = self.environment.topobjdir + obj.config = self.environment + self._create_makefile( + obj, + extra=dict( + xpidl_rules=rules.getvalue(), xpidl_modules=" ".join(xpt_modules) + ), + ) + + def _process_program(self, obj, backend_file): + backend_file.write( + "PROGRAM = %s\n" % self._pretty_path(obj.output_path, backend_file) + ) + if not obj.cxx_link and not self.environment.bin_suffix: + backend_file.write("PROG_IS_C_ONLY_%s := 1\n" % obj.program) + + def _process_host_program(self, program, backend_file): + backend_file.write( + "HOST_PROGRAM = %s\n" % self._pretty_path(program.output_path, backend_file) + ) + + def _process_rust_program_base( + self, obj, backend_file, target_variable, target_cargo_variable + ): + backend_file.write_once("CARGO_FILE := %s\n" % obj.cargo_file) + target_dir = mozpath.normpath(backend_file.environment.topobjdir) + backend_file.write_once("CARGO_TARGET_DIR := %s\n" % target_dir) + backend_file.write("%s += $(DEPTH)/%s\n" % (target_variable, obj.location)) + backend_file.write("%s += %s\n" % (target_cargo_variable, obj.name)) + + def _process_rust_program(self, obj, backend_file): + self._process_rust_program_base( + obj, backend_file, "RUST_PROGRAMS", "RUST_CARGO_PROGRAMS" + ) + + def _process_host_rust_program(self, obj, backend_file): + self._process_rust_program_base( + obj, backend_file, "HOST_RUST_PROGRAMS", "HOST_RUST_CARGO_PROGRAMS" + ) + + def _process_rust_tests(self, obj, backend_file): + if obj.config.substs.get("MOZ_RUST_TESTS"): + # If --enable-rust-tests has been set, run these as a part of + # make check. + self._no_skip["check"].add(backend_file.relobjdir) + backend_file.write("check:: force-cargo-test-run\n") + build_target = self._build_target_for_obj(obj) + self._compile_graph[build_target] + self._process_non_default_target(obj, "force-cargo-test-run", backend_file) + backend_file.write_once("CARGO_FILE := $(srcdir)/Cargo.toml\n") + backend_file.write_once("RUST_TESTS := %s\n" % " ".join(obj.names)) + backend_file.write_once("RUST_TEST_FEATURES := %s\n" % " ".join(obj.features)) + + def _process_simple_program(self, obj, backend_file): + if obj.is_unit_test: + backend_file.write("CPP_UNIT_TESTS += %s\n" % obj.program) + assert obj.cxx_link + else: + backend_file.write("SIMPLE_PROGRAMS += %s\n" % obj.program) + if not obj.cxx_link and not self.environment.bin_suffix: + backend_file.write("PROG_IS_C_ONLY_%s := 1\n" % obj.program) + + def _process_host_simple_program(self, program, backend_file): + backend_file.write("HOST_SIMPLE_PROGRAMS += %s\n" % program) + + def _process_test_support_file(self, obj): + # Ensure test support programs and libraries are tracked by an + # install manifest for the benefit of the test packager. + if not obj.install_target.startswith("_tests"): + return + + dest_basename = None + if isinstance(obj, BaseLibrary): + dest_basename = obj.lib_name + elif isinstance(obj, BaseProgram): + dest_basename = obj.program + if dest_basename is None: + return + + self._install_manifests["_tests"].add_optional_exists( + mozpath.join(obj.install_target[len("_tests") + 1 :], dest_basename) + ) + + def _process_test_manifest(self, obj, backend_file): + # Much of the logic in this function could be moved to CommonBackend. + for source in obj.source_relpaths: + self.backend_input_files.add(mozpath.join(obj.topsrcdir, source)) + + # Don't allow files to be defined multiple times unless it is allowed. + # We currently allow duplicates for non-test files or test files if + # the manifest is listed as a duplicate. + for source, (dest, is_test) in obj.installs.items(): + try: + self._install_manifests["_test_files"].add_link(source, dest) + except ValueError: + if not obj.dupe_manifest and is_test: + raise + + for base, pattern, dest in obj.pattern_installs: + try: + self._install_manifests["_test_files"].add_pattern_link( + base, pattern, dest + ) + except ValueError: + if not obj.dupe_manifest: + raise + + for dest in obj.external_installs: + try: + self._install_manifests["_test_files"].add_optional_exists(dest) + except ValueError: + if not obj.dupe_manifest: + raise + + m = self._test_manifests.setdefault(obj.flavor, (obj.install_prefix, set())) + m[1].add(obj.manifest_obj_relpath) + + try: + from reftest import ReftestManifest + + if isinstance(obj.manifest, ReftestManifest): + # Mark included files as part of the build backend so changes + # result in re-config. + self.backend_input_files |= obj.manifest.manifests + except ImportError: + # Ignore errors caused by the reftest module not being present. + # This can happen when building SpiderMonkey standalone, for example. + pass + + def _process_local_include(self, local_include, backend_file): + d, path = self._pretty_path_parts(local_include, backend_file) + if isinstance(local_include, ObjDirPath) and not d: + # path doesn't start with a slash in this case + d = "$(CURDIR)/" + elif d == "$(DEPTH)": + d = "$(topobjdir)" + quoted_path = shell_quote(path) if path else path + if quoted_path != path: + path = quoted_path[0] + d + quoted_path[1:] + else: + path = d + path + backend_file.write("LOCAL_INCLUDES += -I%s\n" % path) + + def _process_per_source_flag(self, per_source_flag, backend_file): + for flag in per_source_flag.flags: + backend_file.write( + "%s_FLAGS += %s\n" % (mozpath.basename(per_source_flag.file_name), flag) + ) + + def _process_computed_flags(self, computed_flags, backend_file): + for var, flags in computed_flags.get_flags(): + backend_file.write( + "COMPUTED_%s += %s\n" + % (var, " ".join(make_quote(shell_quote(f)) for f in flags)) + ) + + def _process_non_default_target(self, libdef, target_name, backend_file): + backend_file.write("%s:: %s\n" % (libdef.output_category, target_name)) + backend_file.write("MOZBUILD_NON_DEFAULT_TARGETS += %s\n" % target_name) + + def _process_shared_library(self, libdef, backend_file): + backend_file.write_once("LIBRARY_NAME := %s\n" % libdef.basename) + backend_file.write("FORCE_SHARED_LIB := 1\n") + backend_file.write("IMPORT_LIBRARY := %s\n" % libdef.import_name) + backend_file.write("SHARED_LIBRARY := %s\n" % libdef.lib_name) + if libdef.soname: + backend_file.write("DSO_SONAME := %s\n" % libdef.soname) + if libdef.symbols_file: + if libdef.symbols_link_arg: + backend_file.write("EXTRA_DSO_LDOPTS += %s\n" % libdef.symbols_link_arg) + if not libdef.cxx_link: + backend_file.write("LIB_IS_C_ONLY := 1\n") + if libdef.output_category: + self._process_non_default_target(libdef, libdef.lib_name, backend_file) + # Override the install rule target for this library. This is hacky, + # but can go away as soon as we start building libraries in their + # final location (bug 1459764). + backend_file.write("SHARED_LIBRARY_TARGET := %s\n" % libdef.output_category) + + def _process_static_library(self, libdef, backend_file): + backend_file.write_once("LIBRARY_NAME := %s\n" % libdef.basename) + backend_file.write("FORCE_STATIC_LIB := 1\n") + backend_file.write("REAL_LIBRARY := %s\n" % libdef.lib_name) + if libdef.no_expand_lib: + backend_file.write("NO_EXPAND_LIBS := 1\n") + + def _process_sandboxed_wasm_library(self, libdef, backend_file): + backend_file.write("WASM_ARCHIVE := %s\n" % libdef.basename) + + def _process_rust_library(self, libdef, backend_file): + backend_file.write_once( + "%s := %s\n" % (libdef.LIB_FILE_VAR, libdef.import_name) + ) + backend_file.write_once("CARGO_FILE := $(srcdir)/Cargo.toml\n") + # Need to normalize the path so Cargo sees the same paths from all + # possible invocations of Cargo with this CARGO_TARGET_DIR. Otherwise, + # Cargo's dependency calculations don't work as we expect and we wind + # up recompiling lots of things. + target_dir = mozpath.normpath(backend_file.environment.topobjdir) + backend_file.write("CARGO_TARGET_DIR := %s\n" % target_dir) + if libdef.features: + backend_file.write( + "%s := %s\n" % (libdef.FEATURES_VAR, " ".join(libdef.features)) + ) + if libdef.output_category: + self._process_non_default_target(libdef, libdef.import_name, backend_file) + + def _process_host_shared_library(self, libdef, backend_file): + backend_file.write("HOST_SHARED_LIBRARY = %s\n" % libdef.lib_name) + + def _build_target_for_obj(self, obj): + if hasattr(obj, "output_category") and obj.output_category: + target_name = obj.output_category + else: + target_name = obj.KIND + if target_name == "wasm": + target_name = "target" + return "%s/%s" % ( + mozpath.relpath(obj.objdir, self.environment.topobjdir), + target_name, + ) + + def _process_linked_libraries(self, obj, backend_file): + def pretty_relpath(lib, name): + return os.path.normpath( + mozpath.join(mozpath.relpath(lib.objdir, obj.objdir), name) + ) + + objs, shared_libs, os_libs, static_libs = self._expand_libs(obj) + + obj_target = obj.name + if isinstance(obj, Program): + obj_target = self._pretty_path(obj.output_path, backend_file) + + objs_ref = " \\\n ".join(os.path.relpath(o, obj.objdir) for o in objs) + # Don't bother with a list file if we're only linking objects built + # in this directory or building a real static library. This + # accommodates clang-plugin, where we would otherwise pass an + # incorrect list file format to the host compiler as well as when + # creating an archive with AR, which doesn't understand list files. + if ( + objs == obj.objs + and not isinstance(obj, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)) + or isinstance(obj, (StaticLibrary, SandboxedWasmLibrary)) + and obj.no_expand_lib + ): + backend_file.write_once("%s_OBJS := %s\n" % (obj.name, objs_ref)) + backend_file.write("%s: %s\n" % (obj_target, objs_ref)) + elif not isinstance(obj, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)): + list_file_path = "%s.list" % obj.name.replace(".", "_") + list_file_ref = self._make_list_file( + obj.KIND, obj.objdir, objs, list_file_path + ) + backend_file.write_once("%s_OBJS := %s\n" % (obj.name, list_file_ref)) + backend_file.write_once("%s: %s\n" % (obj_target, list_file_path)) + backend_file.write("%s: %s\n" % (obj_target, objs_ref)) + + if getattr(obj, "symbols_file", None): + backend_file.write_once("%s: %s\n" % (obj_target, obj.symbols_file)) + + for lib in shared_libs: + assert obj.KIND != "host" and obj.KIND != "wasm" + backend_file.write_once( + "SHARED_LIBS += %s\n" % pretty_relpath(lib, lib.import_name) + ) + + # We have to link any Rust libraries after all intermediate static + # libraries have been listed to ensure that the Rust libraries are + # searched after the C/C++ objects that might reference Rust symbols. + var = "HOST_LIBS" if obj.KIND == "host" else "STATIC_LIBS" + for lib in chain( + (l for l in static_libs if not isinstance(l, BaseRustLibrary)), + (l for l in static_libs if isinstance(l, BaseRustLibrary)), + ): + backend_file.write_once( + "%s += %s\n" % (var, pretty_relpath(lib, lib.import_name)) + ) + + for lib in os_libs: + if obj.KIND == "target": + backend_file.write_once("OS_LIBS += %s\n" % lib) + elif obj.KIND == "host": + backend_file.write_once("HOST_EXTRA_LIBS += %s\n" % lib) + + if not isinstance(obj, (StaticLibrary, HostLibrary)) or obj.no_expand_lib: + # This will create the node even if there aren't any linked libraries. + build_target = self._build_target_for_obj(obj) + self._compile_graph[build_target] + + # Make the build target depend on all the target/host-objects that + # recursively are linked into it. + def recurse_libraries(obj): + for lib in obj.linked_libraries: + if ( + isinstance(lib, (StaticLibrary, HostLibrary)) + and not lib.no_expand_lib + ): + recurse_libraries(lib) + elif not isinstance(lib, ExternalLibrary): + self._compile_graph[build_target].add( + self._build_target_for_obj(lib) + ) + relobjdir = mozpath.relpath(obj.objdir, self.environment.topobjdir) + objects_target = mozpath.join(relobjdir, "%s-objects" % obj.KIND) + if objects_target in self._compile_graph: + self._compile_graph[build_target].add(objects_target) + + recurse_libraries(obj) + + # Process library-based defines + self._process_defines(obj.lib_defines, backend_file) + + def _add_install_target(self, backend_file, install_target, tier, dest, files): + self._no_skip[tier].add(backend_file.relobjdir) + for f in files: + backend_file.write("%s_FILES += %s\n" % (install_target, f)) + backend_file.write("%s_DEST := %s\n" % (install_target, dest)) + backend_file.write("%s_TARGET := %s\n" % (install_target, tier)) + backend_file.write("INSTALL_TARGETS += %s\n" % install_target) + + def _process_final_target_files(self, obj, files, backend_file): + target = obj.install_target + path = mozpath.basedir( + target, ("dist/bin", "dist/xpi-stage", "_tests", "dist/include") + ) + if not path: + raise Exception("Cannot install to " + target) + + # Exports are not interesting to artifact builds. + if path == "dist/include" and self.environment.is_artifact_build: + return + + manifest = path.replace("/", "_") + install_manifest = self._install_manifests[manifest] + reltarget = mozpath.relpath(target, path) + + for path, files in files.walk(): + target_var = (mozpath.join(target, path) if path else target).replace( + "/", "_" + ) + # We don't necessarily want to combine these, because non-wildcard + # absolute files tend to be libraries, and we don't want to mix + # those in with objdir headers that will be installed during export. + # (See bug 1642882 for details.) + objdir_files = [] + absolute_files = [] + + for f in files: + assert not isinstance(f, RenamedSourcePath) + dest_dir = mozpath.join(reltarget, path) + dest_file = mozpath.join(dest_dir, f.target_basename) + if not isinstance(f, ObjDirPath): + if "*" in f: + if f.startswith("/") or isinstance(f, AbsolutePath): + basepath, wild = os.path.split(f.full_path) + if "*" in basepath: + raise Exception( + "Wildcards are only supported in the filename part" + " of srcdir-relative or absolute paths." + ) + + install_manifest.add_pattern_link(basepath, wild, dest_dir) + else: + install_manifest.add_pattern_link(f.srcdir, f, dest_dir) + elif isinstance(f, AbsolutePath): + if not f.full_path.lower().endswith((".dll", ".pdb", ".so")): + raise Exception( + "Absolute paths installed to FINAL_TARGET_FILES must" + " only be shared libraries or associated debug" + " information." + ) + install_manifest.add_optional_exists(dest_file) + absolute_files.append(f.full_path) + else: + install_manifest.add_link(f.full_path, dest_file) + else: + install_manifest.add_optional_exists(dest_file) + objdir_files.append(self._pretty_path(f, backend_file)) + install_location = "$(DEPTH)/%s" % mozpath.join(target, path) + if objdir_files: + tier = "export" if obj.install_target == "dist/include" else "misc" + # We cannot generate multilocale.txt during misc at the moment. + if objdir_files[0] == "multilocale.txt": + tier = "libs" + self._add_install_target( + backend_file, target_var, tier, install_location, objdir_files + ) + if absolute_files: + # Unfortunately, we can't use _add_install_target because on + # Windows, the absolute file paths that we want to install + # from often have spaces. So we write our own rule. + self._no_skip["misc"].add(backend_file.relobjdir) + backend_file.write( + "misc::\n%s\n" + % "\n".join( + "\t$(INSTALL) %s %s" + % (make_quote(shell_quote(f)), install_location) + for f in absolute_files + ) + ) + + def _process_final_target_pp_files(self, obj, files, backend_file, name): + # Bug 1177710 - We'd like to install these via manifests as + # preprocessed files. But they currently depend on non-standard flags + # being added via some Makefiles, so for now we just pass them through + # to the underlying Makefile.in. + # + # Note that if this becomes a manifest, OBJDIR_PP_FILES will likely + # still need to use PP_TARGETS internally because we can't have an + # install manifest for the root of the objdir. + for i, (path, files) in enumerate(files.walk()): + self._no_skip["misc"].add(backend_file.relobjdir) + var = "%s_%d" % (name, i) + for f in files: + backend_file.write( + "%s += %s\n" % (var, self._pretty_path(f, backend_file)) + ) + backend_file.write( + "%s_PATH := $(DEPTH)/%s\n" + % (var, mozpath.join(obj.install_target, path)) + ) + backend_file.write("%s_TARGET := misc\n" % var) + backend_file.write("PP_TARGETS += %s\n" % var) + + def _write_localized_files_files(self, files, name, backend_file): + for f in files: + if not isinstance(f, ObjDirPath): + # The emitter asserts that all srcdir files start with `en-US/` + e, f = f.split("en-US/") + assert not e + if "*" in f: + # We can't use MERGE_FILE for wildcards because it takes + # only the first match internally. This is only used + # in one place in the tree currently so we'll hardcode + # that specific behavior for now. + backend_file.write( + "%s += $(wildcard $(LOCALE_SRCDIR)/%s)\n" % (name, f) + ) + else: + backend_file.write("%s += $(call MERGE_FILE,%s)\n" % (name, f)) + else: + # Objdir files are allowed from LOCALIZED_GENERATED_FILES + backend_file.write( + "%s += %s\n" % (name, self._pretty_path(f, backend_file)) + ) + + def _process_localized_files(self, obj, files, backend_file): + target = obj.install_target + path = mozpath.basedir(target, ("dist/bin",)) + if not path: + raise Exception("Cannot install localized files to " + target) + for i, (path, files) in enumerate(files.walk()): + name = "LOCALIZED_FILES_%d" % i + self._no_skip["misc"].add(backend_file.relobjdir) + self._write_localized_files_files(files, name + "_FILES", backend_file) + # Use FINAL_TARGET here because some l10n repack rules set + # XPI_NAME to generate langpacks. + backend_file.write("%s_DEST = $(FINAL_TARGET)/%s\n" % (name, path)) + backend_file.write("%s_TARGET := misc\n" % name) + backend_file.write("INSTALL_TARGETS += %s\n" % name) + + def _process_localized_pp_files(self, obj, files, backend_file): + target = obj.install_target + path = mozpath.basedir(target, ("dist/bin",)) + if not path: + raise Exception("Cannot install localized files to " + target) + for i, (path, files) in enumerate(files.walk()): + name = "LOCALIZED_PP_FILES_%d" % i + self._no_skip["misc"].add(backend_file.relobjdir) + self._write_localized_files_files(files, name, backend_file) + # Use FINAL_TARGET here because some l10n repack rules set + # XPI_NAME to generate langpacks. + backend_file.write("%s_PATH = $(FINAL_TARGET)/%s\n" % (name, path)) + backend_file.write("%s_TARGET := misc\n" % name) + # Localized files will have different content in different + # localizations, and some preprocessed files may not have + # any preprocessor directives. + backend_file.write( + "%s_FLAGS := --silence-missing-directive-warnings\n" % name + ) + backend_file.write("PP_TARGETS += %s\n" % name) + + def _process_objdir_files(self, obj, files, backend_file): + # We can't use an install manifest for the root of the objdir, since it + # would delete all the other files that get put there by the build + # system. + for i, (path, files) in enumerate(files.walk()): + self._no_skip["misc"].add(backend_file.relobjdir) + for f in files: + backend_file.write( + "OBJDIR_%d_FILES += %s\n" % (i, self._pretty_path(f, backend_file)) + ) + backend_file.write("OBJDIR_%d_DEST := $(topobjdir)/%s\n" % (i, path)) + backend_file.write("OBJDIR_%d_TARGET := misc\n" % i) + backend_file.write("INSTALL_TARGETS += OBJDIR_%d\n" % i) + + def _process_chrome_manifest_entry(self, obj, backend_file): + fragment = Makefile() + rule = fragment.create_rule(targets=["misc:"]) + + top_level = mozpath.join(obj.install_target, "chrome.manifest") + if obj.path != top_level: + args = [ + mozpath.join("$(DEPTH)", top_level), + make_quote( + shell_quote( + "manifest %s" % mozpath.relpath(obj.path, obj.install_target) + ) + ), + ] + rule.add_commands(["$(call py_action,buildlist,%s)" % " ".join(args)]) + args = [ + mozpath.join("$(DEPTH)", obj.path), + make_quote(shell_quote(str(obj.entry))), + ] + rule.add_commands(["$(call py_action,buildlist,%s)" % " ".join(args)]) + fragment.dump(backend_file.fh, removal_guard=False) + + self._no_skip["misc"].add(obj.relsrcdir) + + def _write_manifests(self, dest, manifests): + man_dir = mozpath.join(self.environment.topobjdir, "_build_manifests", dest) + + for k, manifest in manifests.items(): + with self._write_file(mozpath.join(man_dir, k)) as fh: + manifest.write(fileobj=fh) + + def _write_master_test_manifest(self, path, manifests): + with self._write_file(path) as master: + master.write( + "# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n\n" + ) + + for manifest in sorted(manifests): + master.write("[include:%s]\n" % manifest) + + class Substitution(object): + """BaseConfigSubstitution-like class for use with _create_makefile.""" + + __slots__ = ("input_path", "output_path", "topsrcdir", "topobjdir", "config") + + def _create_makefile(self, obj, stub=False, extra=None): + """Creates the given makefile. Makefiles are treated the same as + config files, but some additional header and footer is added to the + output. + + When the stub argument is True, no source file is used, and a stub + makefile with the default header and footer only is created. + """ + with self._get_preprocessor(obj) as pp: + if extra: + pp.context.update(extra) + if not pp.context.get("autoconfmk", ""): + pp.context["autoconfmk"] = "autoconf.mk" + pp.handleLine( + "# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n" + ) + pp.handleLine("DEPTH := @DEPTH@\n") + pp.handleLine("topobjdir := @topobjdir@\n") + pp.handleLine("topsrcdir := @top_srcdir@\n") + pp.handleLine("srcdir := @srcdir@\n") + pp.handleLine("srcdir_rel := @srcdir_rel@\n") + pp.handleLine("relativesrcdir := @relativesrcdir@\n") + pp.handleLine("include $(DEPTH)/config/@autoconfmk@\n") + if not stub: + pp.do_include(obj.input_path) + # Empty line to avoid failures when last line in Makefile.in ends + # with a backslash. + pp.handleLine("\n") + pp.handleLine("include $(topsrcdir)/config/recurse.mk\n") + if not stub: + # Adding the Makefile.in here has the desired side-effect + # that if the Makefile.in disappears, this will force + # moz.build traversal. This means that when we remove empty + # Makefile.in files, the old file will get replaced with + # the autogenerated one automatically. + self.backend_input_files.add(obj.input_path) + + self._makefile_out_count += 1 + + def _handle_linked_rust_crates(self, obj, extern_crate_file): + backend_file = self._get_backend_file_for(obj) + + backend_file.write("RS_STATICLIB_CRATE_SRC := %s\n" % extern_crate_file) + + def _handle_ipdl_sources( + self, + ipdl_dir, + sorted_ipdl_sources, + sorted_nonstatic_ipdl_sources, + sorted_static_ipdl_sources, + ): + # Write out a master list of all IPDL source files. + mk = Makefile() + + sorted_nonstatic_ipdl_basenames = list() + for source in sorted_nonstatic_ipdl_sources: + basename = os.path.basename(source) + sorted_nonstatic_ipdl_basenames.append(basename) + rule = mk.create_rule([basename]) + rule.add_dependencies([source]) + rule.add_commands( + [ + "$(RM) $@", + "$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) " + "$< -o $@)", + ] + ) + + mk.add_statement( + "ALL_IPDLSRCS := %s %s" + % ( + " ".join(sorted_nonstatic_ipdl_basenames), + " ".join(sorted_static_ipdl_sources), + ) + ) + + # Preprocessed ipdl files are generated in ipdl_dir. + mk.add_statement( + "IPDLDIRS := %s %s" + % ( + ipdl_dir, + " ".join( + sorted(set(mozpath.dirname(p) for p in sorted_static_ipdl_sources)) + ), + ) + ) + + with self._write_file(mozpath.join(ipdl_dir, "ipdlsrcs.mk")) as ipdls: + mk.dump(ipdls, removal_guard=False) + + def _handle_webidl_build( + self, + bindings_dir, + unified_source_mapping, + webidls, + expected_build_output_files, + global_define_files, + ): + include_dir = mozpath.join(self.environment.topobjdir, "dist", "include") + for f in expected_build_output_files: + if f.startswith(include_dir): + self._install_manifests["dist_include"].add_optional_exists( + mozpath.relpath(f, include_dir) + ) + + # We pass WebIDL info to make via a completely generated make file. + mk = Makefile() + mk.add_statement( + "nonstatic_webidl_files := %s" + % " ".join(sorted(webidls.all_non_static_basenames())) + ) + mk.add_statement( + "globalgen_sources := %s" % " ".join(sorted(global_define_files)) + ) + mk.add_statement( + "test_sources := %s" + % " ".join(sorted("%sBinding.cpp" % s for s in webidls.all_test_stems())) + ) + + # Add rules to preprocess bindings. + # This should ideally be using PP_TARGETS. However, since the input + # filenames match the output filenames, the existing PP_TARGETS rules + # result in circular dependencies and other make weirdness. One + # solution is to rename the input or output files repsectively. See + # bug 928195 comment 129. + for source in sorted(webidls.all_preprocessed_sources()): + basename = os.path.basename(source) + rule = mk.create_rule([basename]) + # GLOBAL_DEPS would be used here, but due to the include order of + # our makefiles it's not set early enough to be useful, so we use + # WEBIDL_PP_DEPS, which has analagous content. + rule.add_dependencies([source, "$(WEBIDL_PP_DEPS)"]) + rule.add_commands( + [ + # Remove the file before writing so bindings that go from + # static to preprocessed don't end up writing to a symlink, + # which would modify content in the source directory. + "$(RM) $@", + "$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) " + "$< -o $@)", + ] + ) + + self._add_unified_build_rules( + mk, + unified_source_mapping, + unified_files_makefile_variable="unified_binding_cpp_files", + ) + + webidls_mk = mozpath.join(bindings_dir, "webidlsrcs.mk") + with self._write_file(webidls_mk) as fh: + mk.dump(fh, removal_guard=False) + + # Add the test directory to the compile graph. + if self.environment.substs.get("ENABLE_TESTS"): + self._compile_graph[ + mozpath.join( + mozpath.relpath(bindings_dir, self.environment.topobjdir), + "test", + "target-objects", + ) + ] + + def _format_generated_file_input_name(self, path, obj): + if obj.localized: + # Localized generated files can have locale-specific inputs, which + # are indicated by paths starting with `en-US/` or containing + # `locales/en-US/`. + if "locales/en-US" in path: + # We need an "absolute source path" relative to + # topsrcdir, like "/source/path". + if not path.startswith("/"): + path = "/" + mozpath.relpath(path.full_path, obj.topsrcdir) + e, f = path.split("locales/en-US/", 1) + assert f + return "$(call MERGE_RELATIVE_FILE,{},{}locales)".format( + f, e if not e.startswith("/") else e[len("/") :] + ) + elif path.startswith("en-US/"): + e, f = path.split("en-US/", 1) + assert not e + return "$(call MERGE_FILE,%s)" % f + return self._pretty_path(path, self._get_backend_file_for(obj)) + else: + return self._pretty_path(path, self._get_backend_file_for(obj)) + + def _format_generated_file_output_name(self, path, obj): + if not isinstance(path, Path): + path = ObjDirPath(obj._context, "!" + path) + return self._pretty_path(path, self._get_backend_file_for(obj)) diff --git a/python/mozbuild/mozbuild/backend/static_analysis.py b/python/mozbuild/mozbuild/backend/static_analysis.py new file mode 100644 index 0000000000..2b3ce96e75 --- /dev/null +++ b/python/mozbuild/mozbuild/backend/static_analysis.py @@ -0,0 +1,52 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +# This module provides a backend static-analysis, like clang-tidy and coverity. +# The main difference between this and the default database backend is that this one +# tracks folders that can be built in the non-unified environment and generates +# the coresponding build commands for the files. + +import os + +import mozpack.path as mozpath + +from mozbuild.compilation.database import CompileDBBackend + + +class StaticAnalysisBackend(CompileDBBackend): + def _init(self): + CompileDBBackend._init(self) + self.non_unified_build = [] + + # List of directories can be built outside of the unified build system. + with open( + mozpath.join(self.environment.topsrcdir, "build", "non-unified-compat") + ) as fh: + content = fh.readlines() + self.non_unified_build = [ + mozpath.join(self.environment.topsrcdir, line.strip()) + for line in content + ] + + def _build_cmd(self, cmd, filename, unified): + cmd = list(cmd) + # Maybe the file is in non-unified environment or it resides under a directory + # that can also be built in non-unified environment + if unified is None or any( + filename.startswith(path) for path in self.non_unified_build + ): + cmd.append(filename) + else: + cmd.append(unified) + + return cmd + + def _outputfile_path(self): + database_path = os.path.join(self.environment.topobjdir, "static-analysis") + + if not os.path.exists(database_path): + os.mkdir(database_path) + + # Output the database (a JSON file) to objdir/static-analysis/compile_commands.json + return mozpath.join(database_path, "compile_commands.json") diff --git a/python/mozbuild/mozbuild/backend/test_manifest.py b/python/mozbuild/mozbuild/backend/test_manifest.py new file mode 100644 index 0000000000..ba1e5135f4 --- /dev/null +++ b/python/mozbuild/mozbuild/backend/test_manifest.py @@ -0,0 +1,110 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from collections import defaultdict + +import mozpack.path as mozpath +import six +import six.moves.cPickle as pickle + +from mozbuild.backend.base import PartialBackend +from mozbuild.frontend.data import TestManifest + + +class TestManifestBackend(PartialBackend): + """Partial backend that generates test metadata files.""" + + def _init(self): + self.tests_by_path = defaultdict(list) + self.installs_by_path = defaultdict(list) + self.deferred_installs = set() + self.manifest_defaults = {} + + # Add config.status so performing a build will invalidate this backend. + self.backend_input_files.add( + mozpath.join(self.environment.topobjdir, "config.status") + ) + + def consume_object(self, obj): + if not isinstance(obj, TestManifest): + return + + self.backend_input_files.add(obj.path) + self.backend_input_files |= obj.context_all_paths + for source in obj.source_relpaths: + self.backend_input_files.add(mozpath.join(obj.topsrcdir, source)) + try: + from reftest import ReftestManifest + + if isinstance(obj.manifest, ReftestManifest): + # Mark included files as part of the build backend so changes + # result in re-config. + self.backend_input_files |= obj.manifest.manifests + except ImportError: + # Ignore errors caused by the reftest module not being present. + # This can happen when building SpiderMonkey standalone, for example. + pass + + for test in obj.tests: + self.add(test, obj.flavor, obj.topsrcdir) + self.add_defaults(obj.manifest) + self.add_installs(obj, obj.topsrcdir) + + def consume_finished(self): + topobjdir = self.environment.topobjdir + + with self._write_file( + mozpath.join(topobjdir, "all-tests.pkl"), readmode="rb" + ) as fh: + pickle.dump(dict(self.tests_by_path), fh, protocol=2) + + with self._write_file( + mozpath.join(topobjdir, "test-defaults.pkl"), readmode="rb" + ) as fh: + pickle.dump(self.manifest_defaults, fh, protocol=2) + + path = mozpath.join(topobjdir, "test-installs.pkl") + with self._write_file(path, readmode="rb") as fh: + pickle.dump( + { + k: v + for k, v in self.installs_by_path.items() + if k in self.deferred_installs + }, + fh, + protocol=2, + ) + + def add(self, t, flavor, topsrcdir): + t = dict(t) + t["flavor"] = flavor + + path = mozpath.normpath(t["path"]) + manifest = mozpath.normpath(t["manifest"]) + assert mozpath.basedir(path, [topsrcdir]) + assert mozpath.basedir(manifest, [topsrcdir]) + + key = path[len(topsrcdir) + 1 :] + t["file_relpath"] = key + t["dir_relpath"] = mozpath.dirname(key) + t["srcdir_relpath"] = key + t["manifest_relpath"] = manifest[len(topsrcdir) + 1 :] + + self.tests_by_path[key].append(t) + + def add_defaults(self, manifest): + if not hasattr(manifest, "manifest_defaults"): + return + for sub_manifest, defaults in manifest.manifest_defaults.items(): + self.manifest_defaults[sub_manifest] = defaults + + def add_installs(self, obj, topsrcdir): + for src, (dest, _) in six.iteritems(obj.installs): + key = src[len(topsrcdir) + 1 :] + self.installs_by_path[key].append((src, dest)) + for src, pat, dest in obj.pattern_installs: + key = mozpath.join(src[len(topsrcdir) + 1 :], pat) + self.installs_by_path[key].append((src, pat, dest)) + for path in obj.deferred_installs: + self.deferred_installs.add(path[2:]) diff --git a/python/mozbuild/mozbuild/backend/visualstudio.py b/python/mozbuild/mozbuild/backend/visualstudio.py new file mode 100644 index 0000000000..b9b30804b8 --- /dev/null +++ b/python/mozbuild/mozbuild/backend/visualstudio.py @@ -0,0 +1,712 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# This file contains a build backend for generating Visual Studio project +# files. + +import errno +import os +import re +import sys +import uuid +from pathlib import Path +from xml.dom import getDOMImplementation + +from mozpack.files import FileFinder + +from mozbuild.base import ExecutionSummary + +from ..frontend.data import ( + Defines, + HostProgram, + HostSources, + Library, + LocalInclude, + Program, + SandboxedWasmLibrary, + Sources, + UnifiedSources, +) +from .common import CommonBackend + +MSBUILD_NAMESPACE = "http://schemas.microsoft.com/developer/msbuild/2003" +MSNATVIS_NAMESPACE = "http://schemas.microsoft.com/vstudio/debugger/natvis/2010" + + +def get_id(name): + if sys.version_info[0] == 2: + name = name.encode("utf-8") + return str(uuid.uuid5(uuid.NAMESPACE_URL, name)).upper() + + +def visual_studio_product_to_solution_version(version): + if version == "2017": + return "12.00", "15" + elif version == "2019": + return "12.00", "16" + elif version == "2022": + return "12.00", "17" + else: + raise Exception("Unknown version seen: %s" % version) + + +def visual_studio_product_to_platform_toolset_version(version): + if version == "2017": + return "v141" + elif version == "2019": + return "v142" + elif version == "2022": + return "v143" + else: + raise Exception("Unknown version seen: %s" % version) + + +class VisualStudioBackend(CommonBackend): + """Generate Visual Studio project files. + + This backend is used to produce Visual Studio projects and a solution + to foster developing Firefox with Visual Studio. + + This backend is currently considered experimental. There are many things + not optimal about how it works. + """ + + def _init(self): + CommonBackend._init(self) + + # These should eventually evolve into parameters. + self._out_dir = os.path.join(self.environment.topobjdir, "msvc") + self._projsubdir = "projects" + + self._version = self.environment.substs.get("MSVS_VERSION", "2017") + + self._paths_to_sources = {} + self._paths_to_includes = {} + self._paths_to_defines = {} + self._paths_to_configs = {} + self._libs_to_paths = {} + self._progs_to_paths = {} + + def summary(self): + return ExecutionSummary( + "VisualStudio backend executed in {execution_time:.2f}s\n" + "Generated Visual Studio solution at {path:s}", + execution_time=self._execution_time, + path=os.path.join(self._out_dir, "mozilla.sln"), + ) + + def consume_object(self, obj): + reldir = getattr(obj, "relsrcdir", None) + + if hasattr(obj, "config") and reldir not in self._paths_to_configs: + self._paths_to_configs[reldir] = obj.config + + if isinstance(obj, Sources): + self._add_sources(reldir, obj) + + elif isinstance(obj, HostSources): + self._add_sources(reldir, obj) + + elif isinstance(obj, UnifiedSources): + # XXX we should be letting CommonBackend.consume_object call this + # for us instead. + self._process_unified_sources(obj) + + elif isinstance(obj, Library) and not isinstance(obj, SandboxedWasmLibrary): + self._libs_to_paths[obj.basename] = reldir + + elif isinstance(obj, Program) or isinstance(obj, HostProgram): + self._progs_to_paths[obj.program] = reldir + + elif isinstance(obj, Defines): + self._paths_to_defines.setdefault(reldir, {}).update(obj.defines) + + elif isinstance(obj, LocalInclude): + includes = self._paths_to_includes.setdefault(reldir, []) + includes.append(obj.path.full_path) + + # Just acknowledge everything. + return True + + def _add_sources(self, reldir, obj): + s = self._paths_to_sources.setdefault(reldir, set()) + s.update(obj.files) + + def _process_unified_sources(self, obj): + reldir = getattr(obj, "relsrcdir", None) + + s = self._paths_to_sources.setdefault(reldir, set()) + s.update(obj.files) + + def consume_finished(self): + out_dir = self._out_dir + out_proj_dir = os.path.join(self._out_dir, self._projsubdir) + + projects = self._write_projects_for_sources( + self._libs_to_paths, "library", out_proj_dir + ) + projects.update( + self._write_projects_for_sources( + self._progs_to_paths, "binary", out_proj_dir + ) + ) + + # Generate projects that can be used to build common targets. + for target in ("export", "binaries", "tools", "full"): + basename = "target_%s" % target + command = "$(SolutionDir)\\mach.bat build" + if target != "full": + command += " %s" % target + + project_id = self._write_vs_project( + out_proj_dir, + basename, + target, + build_command=command, + clean_command="$(SolutionDir)\\mach.bat clobber", + ) + + projects[basename] = (project_id, basename, target) + + # A project that can be used to regenerate the visual studio projects. + basename = "target_vs" + project_id = self._write_vs_project( + out_proj_dir, + basename, + "visual-studio", + build_command="$(SolutionDir)\\mach.bat build-backend -b VisualStudio", + ) + projects[basename] = (project_id, basename, "visual-studio") + + # Write out a shared property file with common variables. + props_path = os.path.join(out_proj_dir, "mozilla.props") + with self._write_file(props_path, readmode="rb") as fh: + self._write_props(fh) + + # Generate some wrapper scripts that allow us to invoke mach inside + # a MozillaBuild-like environment. We currently only use the batch + # script. We'd like to use the PowerShell script. However, it seems + # to buffer output from within Visual Studio (surely this is + # configurable) and the default execution policy of PowerShell doesn't + # allow custom scripts to be executed. + with self._write_file(os.path.join(out_dir, "mach.bat"), readmode="rb") as fh: + self._write_mach_batch(fh) + + with self._write_file(os.path.join(out_dir, "mach.ps1"), readmode="rb") as fh: + self._write_mach_powershell(fh) + + # Write out a solution file to tie it all together. + solution_path = os.path.join(out_dir, "mozilla.sln") + with self._write_file(solution_path, readmode="rb") as fh: + self._write_solution(fh, projects) + + def _write_projects_for_sources(self, sources, prefix, out_dir): + projects = {} + for item, path in sorted(sources.items()): + config = self._paths_to_configs.get(path, None) + sources = self._paths_to_sources.get(path, set()) + sources = set(os.path.join("$(TopSrcDir)", path, s) for s in sources) + sources = set(os.path.normpath(s) for s in sources) + + finder = FileFinder(os.path.join(self.environment.topsrcdir, path)) + + headers = [t[0] for t in finder.find("*.h")] + headers = [ + os.path.normpath(os.path.join("$(TopSrcDir)", path, f)) for f in headers + ] + + includes = [ + os.path.join("$(TopSrcDir)", path), + os.path.join("$(TopObjDir)", path), + ] + includes.extend(self._paths_to_includes.get(path, [])) + includes.append("$(TopObjDir)\\dist\\include\\nss") + includes.append("$(TopObjDir)\\dist\\include") + + for v in ( + "NSPR_CFLAGS", + "NSS_CFLAGS", + "MOZ_JPEG_CFLAGS", + "MOZ_PNG_CFLAGS", + "MOZ_ZLIB_CFLAGS", + "MOZ_PIXMAN_CFLAGS", + ): + if not config: + break + + args = config.substs.get(v, []) + + for i, arg in enumerate(args): + if arg.startswith("-I"): + includes.append(os.path.normpath(arg[2:])) + + # Pull in system defaults. + includes.append("$(DefaultIncludes)") + + includes = [os.path.normpath(i) for i in includes] + + defines = [] + for k, v in self._paths_to_defines.get(path, {}).items(): + if v is True: + defines.append(k) + else: + defines.append("%s=%s" % (k, v)) + + debugger = None + if prefix == "binary": + if item.startswith(self.environment.substs["MOZ_APP_NAME"]): + app_args = "-no-remote -profile $(TopObjDir)\\tmp\\profile-default" + if self.environment.substs.get("MOZ_LAUNCHER_PROCESS", False): + app_args += " -wait-for-browser" + debugger = ("$(TopObjDir)\\dist\\bin\\%s" % item, app_args) + else: + debugger = ("$(TopObjDir)\\dist\\bin\\%s" % item, "") + + basename = "%s_%s" % (prefix, item) + + project_id = self._write_vs_project( + out_dir, + basename, + item, + includes=includes, + forced_includes=["$(TopObjDir)\\dist\\include\\mozilla-config.h"], + defines=defines, + headers=headers, + sources=sources, + debugger=debugger, + ) + + projects[basename] = (project_id, basename, item) + + return projects + + def _write_solution(self, fh, projects): + # Visual Studio appears to write out its current version in the + # solution file. Instead of trying to figure out what version it will + # write, try to parse the version out of the existing file and use it + # verbatim. + vs_version = None + try: + with open(fh.name, "rb") as sfh: + for line in sfh: + if line.startswith(b"VisualStudioVersion = "): + vs_version = line.split(b" = ", 1)[1].strip() + except IOError as e: + if e.errno != errno.ENOENT: + raise + + format_version, comment_version = visual_studio_product_to_solution_version( + self._version + ) + # This is a Visual C++ Project type. + project_type = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942" + + # Visual Studio seems to require this header. + fh.write( + "Microsoft Visual Studio Solution File, Format Version %s\r\n" + % format_version + ) + fh.write("# Visual Studio %s\r\n" % comment_version) + + if vs_version: + fh.write("VisualStudioVersion = %s\r\n" % vs_version) + + # Corresponds to VS2013. + fh.write("MinimumVisualStudioVersion = 12.0.31101.0\r\n") + + binaries_id = projects["target_binaries"][0] + + # Write out entries for each project. + for key in sorted(projects): + project_id, basename, name = projects[key] + path = os.path.join(self._projsubdir, "%s.vcxproj" % basename) + + fh.write( + 'Project("{%s}") = "%s", "%s", "{%s}"\r\n' + % (project_type, name, path, project_id) + ) + + # Make all libraries depend on the binaries target. + if key.startswith("library_"): + fh.write("\tProjectSection(ProjectDependencies) = postProject\r\n") + fh.write("\t\t{%s} = {%s}\r\n" % (binaries_id, binaries_id)) + fh.write("\tEndProjectSection\r\n") + + fh.write("EndProject\r\n") + + # Write out solution folders for organizing things. + + # This is the UUID you use for solution folders. + container_id = "2150E333-8FDC-42A3-9474-1A3956D46DE8" + + def write_container(desc): + cid = get_id(desc) + fh.write( + 'Project("{%s}") = "%s", "%s", "{%s}"\r\n' + % (container_id, desc, desc, cid) + ) + fh.write("EndProject\r\n") + + return cid + + library_id = write_container("Libraries") + target_id = write_container("Build Targets") + binary_id = write_container("Binaries") + + fh.write("Global\r\n") + + # Make every project a member of our one configuration. + fh.write("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n") + fh.write("\t\tBuild|Win32 = Build|Win32\r\n") + fh.write("\tEndGlobalSection\r\n") + + # Set every project's active configuration to the one configuration and + # set up the default build project. + fh.write("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n") + for name, project in sorted(projects.items()): + fh.write("\t\t{%s}.Build|Win32.ActiveCfg = Build|Win32\r\n" % project[0]) + + # Only build the full build target by default. + # It's important we don't write multiple entries here because they + # conflict! + if name == "target_full": + fh.write("\t\t{%s}.Build|Win32.Build.0 = Build|Win32\r\n" % project[0]) + + fh.write("\tEndGlobalSection\r\n") + + fh.write("\tGlobalSection(SolutionProperties) = preSolution\r\n") + fh.write("\t\tHideSolutionNode = FALSE\r\n") + fh.write("\tEndGlobalSection\r\n") + + # Associate projects with containers. + fh.write("\tGlobalSection(NestedProjects) = preSolution\r\n") + for key in sorted(projects): + project_id = projects[key][0] + + if key.startswith("library_"): + container_id = library_id + elif key.startswith("target_"): + container_id = target_id + elif key.startswith("binary_"): + container_id = binary_id + else: + raise Exception("Unknown project type: %s" % key) + + fh.write("\t\t{%s} = {%s}\r\n" % (project_id, container_id)) + fh.write("\tEndGlobalSection\r\n") + + fh.write("EndGlobal\r\n") + + def _write_props(self, fh): + impl = getDOMImplementation() + doc = impl.createDocument(MSBUILD_NAMESPACE, "Project", None) + + project = doc.documentElement + project.setAttribute("xmlns", MSBUILD_NAMESPACE) + project.setAttribute("ToolsVersion", "4.0") + + ig = project.appendChild(doc.createElement("ImportGroup")) + ig.setAttribute("Label", "PropertySheets") + + pg = project.appendChild(doc.createElement("PropertyGroup")) + pg.setAttribute("Label", "UserMacros") + + ig = project.appendChild(doc.createElement("ItemGroup")) + + def add_var(k, v): + e = pg.appendChild(doc.createElement(k)) + e.appendChild(doc.createTextNode(v)) + + e = ig.appendChild(doc.createElement("BuildMacro")) + e.setAttribute("Include", k) + + e = e.appendChild(doc.createElement("Value")) + e.appendChild(doc.createTextNode("$(%s)" % k)) + + natvis = ig.appendChild(doc.createElement("Natvis")) + natvis.setAttribute("Include", "../../../toolkit/library/gecko.natvis") + + add_var("TopObjDir", os.path.normpath(self.environment.topobjdir)) + add_var("TopSrcDir", os.path.normpath(self.environment.topsrcdir)) + add_var("PYTHON", "$(TopObjDir)\\_virtualenv\\Scripts\\python.exe") + add_var("MACH", "$(TopSrcDir)\\mach") + + # From MozillaBuild. + add_var("DefaultIncludes", os.environ.get("INCLUDE", "")) + + fh.write(b"\xef\xbb\xbf") + doc.writexml(fh, addindent=" ", newl="\r\n") + + def _create_natvis_type( + self, doc, visualizer, name, displayString, stringView=None + ): + + t = visualizer.appendChild(doc.createElement("Type")) + t.setAttribute("Name", name) + + ds = t.appendChild(doc.createElement("DisplayString")) + ds.appendChild(doc.createTextNode(displayString)) + + if stringView is not None: + sv = t.appendChild(doc.createElement("DisplayString")) + sv.appendChild(doc.createTextNode(stringView)) + + def _create_natvis_simple_string_type(self, doc, visualizer, name): + self._create_natvis_type( + doc, visualizer, name + "", "{mData,su}", "mData,su" + ) + self._create_natvis_type( + doc, visualizer, name + "", "{mData,s}", "mData,s" + ) + + def _create_natvis_string_tuple_type(self, doc, visualizer, chartype, formatstring): + t = visualizer.appendChild(doc.createElement("Type")) + t.setAttribute("Name", "nsTSubstringTuple<" + chartype + ">") + + ds1 = t.appendChild(doc.createElement("DisplayString")) + ds1.setAttribute("Condition", "mHead != nullptr") + ds1.appendChild( + doc.createTextNode("{mHead,na} {mFragB->mData," + formatstring + "}") + ) + + ds2 = t.appendChild(doc.createElement("DisplayString")) + ds2.setAttribute("Condition", "mHead == nullptr") + ds2.appendChild( + doc.createTextNode( + "{mFragA->mData," + + formatstring + + "} {mFragB->mData," + + formatstring + + "}" + ) + ) + + def _relevant_environment_variables(self): + # Write out the environment variables, presumably coming from + # MozillaBuild. + for k, v in sorted(os.environ.items()): + if not re.match("^[a-zA-Z0-9_]+$", k): + continue + + if k in ("OLDPWD", "PS1"): + continue + + if k.startswith("_"): + continue + + yield k, v + + yield "TOPSRCDIR", self.environment.topsrcdir + yield "TOPOBJDIR", self.environment.topobjdir + + def _write_mach_powershell(self, fh): + for k, v in self._relevant_environment_variables(): + fh.write(b'$env:%s = "%s"\r\n' % (k.encode("utf-8"), v.encode("utf-8"))) + + relpath = os.path.relpath( + self.environment.topsrcdir, self.environment.topobjdir + ).replace("\\", "/") + + fh.write( + b'$bashargs = "%s/mach", "--log-no-times"\r\n' % relpath.encode("utf-8") + ) + fh.write(b"$bashargs = $bashargs + $args\r\n") + + fh.write(b"$expanded = $bashargs -join ' '\r\n") + fh.write(b'$procargs = "-c", $expanded\r\n') + + if (Path(os.environ["MOZILLABUILD"]) / "msys2").exists(): + bash_path = rb"msys2\usr\bin\bash" + else: + bash_path = rb"msys\bin\bash" + + fh.write( + b"Start-Process -WorkingDirectory $env:TOPOBJDIR " + b"-FilePath $env:MOZILLABUILD\\%b " + b"-ArgumentList $procargs " + b"-Wait -NoNewWindow\r\n" % bash_path + ) + + def _write_mach_batch(self, fh): + """Write out a batch script that builds the tree. + + The script "bootstraps" into the MozillaBuild environment by setting + the environment variables that are active in the current MozillaBuild + environment. Then, it builds the tree. + """ + for k, v in self._relevant_environment_variables(): + fh.write(b'SET "%s=%s"\r\n' % (k.encode("utf-8"), v.encode("utf-8"))) + + fh.write(b"cd %TOPOBJDIR%\r\n") + + # We need to convert Windows-native paths to msys paths. Easiest way is + # relative paths, since munging c:\ to /c/ is slightly more + # complicated. + relpath = os.path.relpath( + self.environment.topsrcdir, self.environment.topobjdir + ).replace("\\", "/") + + if (Path(os.environ["MOZILLABUILD"]) / "msys2").exists(): + bash_path = rb"msys2\usr\bin\bash" + else: + bash_path = rb"msys\bin\bash" + + # We go through mach because it has the logic for choosing the most + # appropriate build tool. + fh.write( + b'"%%MOZILLABUILD%%\\%b" ' + b'-c "%s/mach --log-no-times %%1 %%2 %%3 %%4 %%5 %%6 %%7"' + % (bash_path, relpath.encode("utf-8")) + ) + + def _write_vs_project(self, out_dir, basename, name, **kwargs): + root = "%s.vcxproj" % basename + project_id = get_id(basename) + + with self._write_file(os.path.join(out_dir, root), readmode="rb") as fh: + project_id, name = VisualStudioBackend.write_vs_project( + fh, self._version, project_id, name, **kwargs + ) + + with self._write_file( + os.path.join(out_dir, "%s.user" % root), readmode="rb" + ) as fh: + fh.write('\r\n') + fh.write('\r\n' % MSBUILD_NAMESPACE) + fh.write("\r\n") + + return project_id + + @staticmethod + def write_vs_project( + fh, + version, + project_id, + name, + includes=[], + forced_includes=[], + defines=[], + build_command=None, + clean_command=None, + debugger=None, + headers=[], + sources=[], + ): + + impl = getDOMImplementation() + doc = impl.createDocument(MSBUILD_NAMESPACE, "Project", None) + + project = doc.documentElement + project.setAttribute("DefaultTargets", "Build") + project.setAttribute("ToolsVersion", "4.0") + project.setAttribute("xmlns", MSBUILD_NAMESPACE) + + ig = project.appendChild(doc.createElement("ItemGroup")) + ig.setAttribute("Label", "ProjectConfigurations") + + pc = ig.appendChild(doc.createElement("ProjectConfiguration")) + pc.setAttribute("Include", "Build|Win32") + + c = pc.appendChild(doc.createElement("Configuration")) + c.appendChild(doc.createTextNode("Build")) + + p = pc.appendChild(doc.createElement("Platform")) + p.appendChild(doc.createTextNode("Win32")) + + pg = project.appendChild(doc.createElement("PropertyGroup")) + pg.setAttribute("Label", "Globals") + + n = pg.appendChild(doc.createElement("ProjectName")) + n.appendChild(doc.createTextNode(name)) + + k = pg.appendChild(doc.createElement("Keyword")) + k.appendChild(doc.createTextNode("MakeFileProj")) + + g = pg.appendChild(doc.createElement("ProjectGuid")) + g.appendChild(doc.createTextNode("{%s}" % project_id)) + + rn = pg.appendChild(doc.createElement("RootNamespace")) + rn.appendChild(doc.createTextNode("mozilla")) + + pts = pg.appendChild(doc.createElement("PlatformToolset")) + pts.appendChild( + doc.createTextNode( + visual_studio_product_to_platform_toolset_version(version) + ) + ) + + i = project.appendChild(doc.createElement("Import")) + i.setAttribute("Project", "$(VCTargetsPath)\\Microsoft.Cpp.Default.props") + + ig = project.appendChild(doc.createElement("ImportGroup")) + ig.setAttribute("Label", "ExtensionTargets") + + ig = project.appendChild(doc.createElement("ImportGroup")) + ig.setAttribute("Label", "ExtensionSettings") + + ig = project.appendChild(doc.createElement("ImportGroup")) + ig.setAttribute("Label", "PropertySheets") + i = ig.appendChild(doc.createElement("Import")) + i.setAttribute("Project", "mozilla.props") + + pg = project.appendChild(doc.createElement("PropertyGroup")) + pg.setAttribute("Label", "Configuration") + ct = pg.appendChild(doc.createElement("ConfigurationType")) + ct.appendChild(doc.createTextNode("Makefile")) + + pg = project.appendChild(doc.createElement("PropertyGroup")) + pg.setAttribute("Condition", "'$(Configuration)|$(Platform)'=='Build|Win32'") + + if build_command: + n = pg.appendChild(doc.createElement("NMakeBuildCommandLine")) + n.appendChild(doc.createTextNode(build_command)) + + if clean_command: + n = pg.appendChild(doc.createElement("NMakeCleanCommandLine")) + n.appendChild(doc.createTextNode(clean_command)) + + if includes: + n = pg.appendChild(doc.createElement("NMakeIncludeSearchPath")) + n.appendChild(doc.createTextNode(";".join(includes))) + + if forced_includes: + n = pg.appendChild(doc.createElement("NMakeForcedIncludes")) + n.appendChild(doc.createTextNode(";".join(forced_includes))) + + if defines: + n = pg.appendChild(doc.createElement("NMakePreprocessorDefinitions")) + n.appendChild(doc.createTextNode(";".join(defines))) + + if debugger: + n = pg.appendChild(doc.createElement("LocalDebuggerCommand")) + n.appendChild(doc.createTextNode(debugger[0])) + + n = pg.appendChild(doc.createElement("LocalDebuggerCommandArguments")) + n.appendChild(doc.createTextNode(debugger[1])) + + # Sets IntelliSense to use c++17 Language Standard + n = pg.appendChild(doc.createElement("AdditionalOptions")) + n.appendChild(doc.createTextNode("/std:c++17")) + + i = project.appendChild(doc.createElement("Import")) + i.setAttribute("Project", "$(VCTargetsPath)\\Microsoft.Cpp.props") + + i = project.appendChild(doc.createElement("Import")) + i.setAttribute("Project", "$(VCTargetsPath)\\Microsoft.Cpp.targets") + + # Now add files to the project. + ig = project.appendChild(doc.createElement("ItemGroup")) + for header in sorted(headers or []): + n = ig.appendChild(doc.createElement("ClInclude")) + n.setAttribute("Include", header) + + ig = project.appendChild(doc.createElement("ItemGroup")) + for source in sorted(sources or []): + n = ig.appendChild(doc.createElement("ClCompile")) + n.setAttribute("Include", source) + + fh.write(b"\xef\xbb\xbf") + doc.writexml(fh, addindent=" ", newl="\r\n") + + return project_id, name diff --git a/python/mozbuild/mozbuild/base.py b/python/mozbuild/mozbuild/base.py new file mode 100644 index 0000000000..9822a9b76e --- /dev/null +++ b/python/mozbuild/mozbuild/base.py @@ -0,0 +1,1110 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import errno +import io +import json +import logging +import multiprocessing +import os +import subprocess +import sys +from pathlib import Path + +import mozpack.path as mozpath +import six +from mach.mixin.process import ProcessExecutionMixin +from mozboot.mozconfig import MozconfigFindException +from mozfile import which +from mozversioncontrol import ( + GitRepository, + HgRepository, + InvalidRepoPath, + MissingConfigureInfo, + MissingVCSTool, + get_repository_from_build_config, + get_repository_object, +) + +from .backend.configenvironment import ConfigEnvironment, ConfigStatusFailure +from .configure import ConfigureSandbox +from .controller.clobber import Clobberer +from .mozconfig import MozconfigLoader, MozconfigLoadException +from .util import memoize, memoized_property + +try: + import psutil +except Exception: + psutil = None + + +class BadEnvironmentException(Exception): + """Base class for errors raised when the build environment is not sane.""" + + +class BuildEnvironmentNotFoundException(BadEnvironmentException, AttributeError): + """Raised when we could not find a build environment.""" + + +class ObjdirMismatchException(BadEnvironmentException): + """Raised when the current dir is an objdir and doesn't match the mozconfig.""" + + def __init__(self, objdir1, objdir2): + self.objdir1 = objdir1 + self.objdir2 = objdir2 + + def __str__(self): + return "Objdir mismatch: %s != %s" % (self.objdir1, self.objdir2) + + +class BinaryNotFoundException(Exception): + """Raised when the binary is not found in the expected location.""" + + def __init__(self, path): + self.path = path + + def __str__(self): + return "Binary expected at {} does not exist.".format(self.path) + + def help(self): + return "It looks like your program isn't built. You can run |./mach build| to build it." + + +class MozbuildObject(ProcessExecutionMixin): + """Base class providing basic functionality useful to many modules. + + Modules in this package typically require common functionality such as + accessing the current config, getting the location of the source directory, + running processes, etc. This classes provides that functionality. Other + modules can inherit from this class to obtain this functionality easily. + """ + + def __init__( + self, + topsrcdir, + settings, + log_manager, + topobjdir=None, + mozconfig=MozconfigLoader.AUTODETECT, + virtualenv_name=None, + ): + """Create a new Mozbuild object instance. + + Instances are bound to a source directory, a ConfigSettings instance, + and a LogManager instance. The topobjdir may be passed in as well. If + it isn't, it will be calculated from the active mozconfig. + """ + self.topsrcdir = mozpath.realpath(topsrcdir) + self.settings = settings + + self.populate_logger() + self.log_manager = log_manager + + self._make = None + self._topobjdir = mozpath.realpath(topobjdir) if topobjdir else topobjdir + self._mozconfig = mozconfig + self._config_environment = None + self._virtualenv_name = virtualenv_name or "common" + self._virtualenv_manager = None + + @classmethod + def from_environment(cls, cwd=None, detect_virtualenv_mozinfo=True, **kwargs): + """Create a MozbuildObject by detecting the proper one from the env. + + This examines environment state like the current working directory and + creates a MozbuildObject from the found source directory, mozconfig, etc. + + The role of this function is to identify a topsrcdir, topobjdir, and + mozconfig file. + + If the current working directory is inside a known objdir, we always + use the topsrcdir and mozconfig associated with that objdir. + + If the current working directory is inside a known srcdir, we use that + topsrcdir and look for mozconfigs using the default mechanism, which + looks inside environment variables. + + If the current Python interpreter is running from a virtualenv inside + an objdir, we use that as our objdir. + + If we're not inside a srcdir or objdir, an exception is raised. + + detect_virtualenv_mozinfo determines whether we should look for a + mozinfo.json file relative to the virtualenv directory. This was + added to facilitate testing. Callers likely shouldn't change the + default. + """ + + cwd = os.path.realpath(cwd or os.getcwd()) + topsrcdir = None + topobjdir = None + mozconfig = MozconfigLoader.AUTODETECT + + def load_mozinfo(path): + info = json.load(io.open(path, "rt", encoding="utf-8")) + topsrcdir = info.get("topsrcdir") + topobjdir = os.path.dirname(path) + mozconfig = info.get("mozconfig") + return topsrcdir, topobjdir, mozconfig + + for dir_path in [str(path) for path in [cwd] + list(Path(cwd).parents)]: + # If we find a mozinfo.json, we are in the objdir. + mozinfo_path = os.path.join(dir_path, "mozinfo.json") + if os.path.isfile(mozinfo_path): + topsrcdir, topobjdir, mozconfig = load_mozinfo(mozinfo_path) + break + + if not topsrcdir: + # See if we're running from a Python virtualenv that's inside an objdir. + # sys.prefix would look like "$objdir/_virtualenvs/$virtualenv/". + # Note that virtualenv-based objdir detection work for instrumented builds, + # because they aren't created in the scoped "instrumentated" objdir. + # However, working-directory-ancestor-based objdir resolution should fully + # cover that case. + mozinfo_path = os.path.join(sys.prefix, "..", "..", "mozinfo.json") + if detect_virtualenv_mozinfo and os.path.isfile(mozinfo_path): + topsrcdir, topobjdir, mozconfig = load_mozinfo(mozinfo_path) + + if not topsrcdir: + topsrcdir = str(Path(__file__).parent.parent.parent.parent.resolve()) + + topsrcdir = mozpath.realpath(topsrcdir) + if topobjdir: + topobjdir = mozpath.realpath(topobjdir) + + if topsrcdir == topobjdir: + raise BadEnvironmentException( + "The object directory appears " + "to be the same as your source directory (%s). This build " + "configuration is not supported." % topsrcdir + ) + + # If we can't resolve topobjdir, oh well. We'll figure out when we need + # one. + return cls( + topsrcdir, None, None, topobjdir=topobjdir, mozconfig=mozconfig, **kwargs + ) + + def resolve_mozconfig_topobjdir(self, default=None): + topobjdir = self.mozconfig.get("topobjdir") or default + if not topobjdir: + return None + + if "@CONFIG_GUESS@" in topobjdir: + topobjdir = topobjdir.replace("@CONFIG_GUESS@", self.resolve_config_guess()) + + if not os.path.isabs(topobjdir): + topobjdir = os.path.abspath(os.path.join(self.topsrcdir, topobjdir)) + + return mozpath.normsep(os.path.normpath(topobjdir)) + + def build_out_of_date(self, output, dep_file): + if not os.path.isfile(output): + print(" Output reference file not found: %s" % output) + return True + if not os.path.isfile(dep_file): + print(" Dependency file not found: %s" % dep_file) + return True + + deps = [] + with io.open(dep_file, "r", encoding="utf-8", newline="\n") as fh: + deps = fh.read().splitlines() + + mtime = os.path.getmtime(output) + for f in deps: + try: + dep_mtime = os.path.getmtime(f) + except OSError as e: + if e.errno == errno.ENOENT: + print(" Input not found: %s" % f) + return True + raise + if dep_mtime > mtime: + print(" %s is out of date with respect to %s" % (output, f)) + return True + return False + + def backend_out_of_date(self, backend_file): + if not os.path.isfile(backend_file): + return True + + # Check if any of our output files have been removed since + # we last built the backend, re-generate the backend if + # so. + outputs = [] + with io.open(backend_file, "r", encoding="utf-8", newline="\n") as fh: + outputs = fh.read().splitlines() + for output in outputs: + if not os.path.isfile(mozpath.join(self.topobjdir, output)): + return True + + dep_file = "%s.in" % backend_file + return self.build_out_of_date(backend_file, dep_file) + + @property + def topobjdir(self): + if self._topobjdir is None: + self._topobjdir = self.resolve_mozconfig_topobjdir( + default="obj-@CONFIG_GUESS@" + ) + + return self._topobjdir + + @property + def virtualenv_manager(self): + from mach.site import CommandSiteManager + from mozboot.util import get_state_dir + + if self._virtualenv_manager is None: + self._virtualenv_manager = CommandSiteManager.from_environment( + self.topsrcdir, + lambda: get_state_dir( + specific_to_topsrcdir=True, topsrcdir=self.topsrcdir + ), + self._virtualenv_name, + os.path.join(self.topobjdir, "_virtualenvs"), + ) + + return self._virtualenv_manager + + @staticmethod + @memoize + def get_base_mozconfig_info(topsrcdir, path, env_mozconfig): + # env_mozconfig is only useful for unittests, which change the value of + # the environment variable, which has an impact on autodetection (when + # path is MozconfigLoader.AUTODETECT), and memoization wouldn't account + # for it without the explicit (unused) argument. + out = six.StringIO() + env = os.environ + if path and path != MozconfigLoader.AUTODETECT: + env = dict(env) + env["MOZCONFIG"] = path + + # We use python configure to get mozconfig content and the value for + # --target (from mozconfig if necessary, guessed otherwise). + + # Modified configure sandbox that replaces '--help' dependencies with + # `always`, such that depends functions with a '--help' dependency are + # not automatically executed when including files. We don't want all of + # those from init.configure to execute, only a subset. + class ReducedConfigureSandbox(ConfigureSandbox): + def depends_impl(self, *args, **kwargs): + args = tuple( + a + if not isinstance(a, six.string_types) or a != "--help" + else self._always.sandboxed + for a in args + ) + return super(ReducedConfigureSandbox, self).depends_impl( + *args, **kwargs + ) + + # This may be called recursively from configure itself for $reasons, + # so avoid logging to the same logger (configure uses "moz.configure") + logger = logging.getLogger("moz.configure.reduced") + handler = logging.StreamHandler(out) + logger.addHandler(handler) + # If this were true, logging would still propagate to "moz.configure". + logger.propagate = False + sandbox = ReducedConfigureSandbox( + {}, + environ=env, + argv=["mach"], + logger=logger, + ) + base_dir = os.path.join(topsrcdir, "build", "moz.configure") + try: + sandbox.include_file(os.path.join(base_dir, "init.configure")) + # Force mozconfig options injection before getting the target. + sandbox._value_for(sandbox["mozconfig_options"]) + return { + "mozconfig": sandbox._value_for(sandbox["mozconfig"]), + "target": sandbox._value_for(sandbox["real_target"]), + "project": sandbox._value_for(sandbox._options["project"]), + "artifact-builds": sandbox._value_for( + sandbox._options["artifact-builds"] + ), + } + except SystemExit: + print(out.getvalue()) + raise + + @property + def base_mozconfig_info(self): + return self.get_base_mozconfig_info( + self.topsrcdir, self._mozconfig, os.environ.get("MOZCONFIG") + ) + + @property + def mozconfig(self): + """Returns information about the current mozconfig file. + + This a dict as returned by MozconfigLoader.read_mozconfig() + """ + return self.base_mozconfig_info["mozconfig"] + + @property + def config_environment(self): + """Returns the ConfigEnvironment for the current build configuration. + + This property is only available once configure has executed. + + If configure's output is not available, this will raise. + """ + if self._config_environment: + return self._config_environment + + config_status = os.path.join(self.topobjdir, "config.status") + + if not os.path.exists(config_status) or not os.path.getsize(config_status): + raise BuildEnvironmentNotFoundException( + "config.status not available. Run configure." + ) + + try: + self._config_environment = ConfigEnvironment.from_config_status( + config_status + ) + except ConfigStatusFailure as e: + six.raise_from( + BuildEnvironmentNotFoundException( + "config.status is outdated or broken. Run configure." + ), + e, + ) + + return self._config_environment + + @property + def defines(self): + return self.config_environment.defines + + @property + def substs(self): + return self.config_environment.substs + + @property + def distdir(self): + return os.path.join(self.topobjdir, "dist") + + @property + def bindir(self): + return os.path.join(self.topobjdir, "dist", "bin") + + @property + def includedir(self): + return os.path.join(self.topobjdir, "dist", "include") + + @property + def statedir(self): + return os.path.join(self.topobjdir, ".mozbuild") + + @property + def platform(self): + """Returns current platform and architecture name""" + import mozinfo + + platform_name = None + bits = str(mozinfo.info["bits"]) + if mozinfo.isLinux: + platform_name = "linux" + bits + elif mozinfo.isWin: + platform_name = "win" + bits + elif mozinfo.isMac: + platform_name = "macosx" + bits + + return platform_name, bits + "bit" + + @memoized_property + def repository(self): + """Get a `mozversioncontrol.Repository` object for the + top source directory.""" + # We try to obtain a repo using the configured VCS info first. + # If we don't have a configure context, fall back to auto-detection. + try: + return get_repository_from_build_config(self) + except ( + BuildEnvironmentNotFoundException, + MissingConfigureInfo, + MissingVCSTool, + ): + pass + + return get_repository_object(self.topsrcdir) + + def reload_config_environment(self): + """Force config.status to be re-read and return the new value + of ``self.config_environment``. + """ + self._config_environment = None + return self.config_environment + + def mozbuild_reader( + self, config_mode="build", vcs_revision=None, vcs_check_clean=True + ): + """Obtain a ``BuildReader`` for evaluating moz.build files. + + Given arguments, returns a ``mozbuild.frontend.reader.BuildReader`` + that can be used to evaluate moz.build files for this repo. + + ``config_mode`` is either ``build`` or ``empty``. If ``build``, + ``self.config_environment`` is used. This requires a configured build + system to work. If ``empty``, an empty config is used. ``empty`` is + appropriate for file-based traversal mode where ``Files`` metadata is + read. + + If ``vcs_revision`` is defined, it specifies a version control revision + to use to obtain files content. The default is to use the filesystem. + This mode is only supported with Mercurial repositories. + + If ``vcs_revision`` is not defined and the version control checkout is + sparse, this implies ``vcs_revision='.'``. + + If ``vcs_revision`` is ``.`` (denotes the parent of the working + directory), we will verify that the working directory is clean unless + ``vcs_check_clean`` is False. This prevents confusion due to uncommitted + file changes not being reflected in the reader. + """ + from mozpack.files import MercurialRevisionFinder + + from mozbuild.frontend.reader import BuildReader, EmptyConfig, default_finder + + if config_mode == "build": + config = self.config_environment + elif config_mode == "empty": + config = EmptyConfig(self.topsrcdir) + else: + raise ValueError("unknown config_mode value: %s" % config_mode) + + try: + repo = self.repository + except InvalidRepoPath: + repo = None + + if ( + repo + and repo != "SOURCE" + and not vcs_revision + and repo.sparse_checkout_present() + ): + vcs_revision = "." + + if vcs_revision is None: + finder = default_finder + else: + # If we failed to detect the repo prior, check again to raise its + # exception. + if not repo: + self.repository + assert False + + if repo.name != "hg": + raise Exception("do not support VCS reading mode for %s" % repo.name) + + if vcs_revision == "." and vcs_check_clean: + with repo: + if not repo.working_directory_clean(): + raise Exception( + "working directory is not clean; " + "refusing to use a VCS-based finder" + ) + + finder = MercurialRevisionFinder( + self.topsrcdir, rev=vcs_revision, recognize_repo_paths=True + ) + + return BuildReader(config, finder=finder) + + def is_clobber_needed(self): + if not os.path.exists(self.topobjdir): + return False + return Clobberer(self.topsrcdir, self.topobjdir).clobber_needed() + + def get_binary_path(self, what="app", validate_exists=True, where="default"): + """Obtain the path to a compiled binary for this build configuration. + + The what argument is the program or tool being sought after. See the + code implementation for supported values. + + If validate_exists is True (the default), we will ensure the found path + exists before returning, raising an exception if it doesn't. + + If where is 'staged-package', we will return the path to the binary in + the package staging directory. + + If no arguments are specified, we will return the main binary for the + configured XUL application. + """ + + if where not in ("default", "staged-package"): + raise Exception("Don't know location %s" % where) + + substs = self.substs + + stem = self.distdir + if where == "staged-package": + stem = os.path.join(stem, substs["MOZ_APP_NAME"]) + + if substs["OS_ARCH"] == "Darwin" and "MOZ_MACBUNDLE_NAME" in substs: + stem = os.path.join(stem, substs["MOZ_MACBUNDLE_NAME"], "Contents", "MacOS") + elif where == "default": + stem = os.path.join(stem, "bin") + + leaf = None + + leaf = (substs["MOZ_APP_NAME"] if what == "app" else what) + substs[ + "BIN_SUFFIX" + ] + path = os.path.join(stem, leaf) + + if validate_exists and not os.path.exists(path): + raise BinaryNotFoundException(path) + + return path + + def resolve_config_guess(self): + return self.base_mozconfig_info["target"].alias + + def notify(self, msg): + """Show a desktop notification with the supplied message + + On Linux and Mac, this will show a desktop notification with the message, + but on Windows we can only flash the screen. + """ + if "MOZ_NOSPAM" in os.environ or "MOZ_AUTOMATION" in os.environ: + return + + try: + if sys.platform.startswith("darwin"): + notifier = which("terminal-notifier") + if not notifier: + raise Exception( + "Install terminal-notifier to get " + "a notification when the build finishes." + ) + self.run_process( + [ + notifier, + "-title", + "Mozilla Build System", + "-group", + "mozbuild", + "-message", + msg, + ], + ensure_exit_code=False, + ) + elif sys.platform.startswith("win"): + from ctypes import POINTER, WINFUNCTYPE, Structure, sizeof, windll + from ctypes.wintypes import BOOL, DWORD, HANDLE, UINT + + class FLASHWINDOW(Structure): + _fields_ = [ + ("cbSize", UINT), + ("hwnd", HANDLE), + ("dwFlags", DWORD), + ("uCount", UINT), + ("dwTimeout", DWORD), + ] + + FlashWindowExProto = WINFUNCTYPE(BOOL, POINTER(FLASHWINDOW)) + FlashWindowEx = FlashWindowExProto(("FlashWindowEx", windll.user32)) + FLASHW_CAPTION = 0x01 + FLASHW_TRAY = 0x02 + FLASHW_TIMERNOFG = 0x0C + + # GetConsoleWindows returns NULL if no console is attached. We + # can't flash nothing. + console = windll.kernel32.GetConsoleWindow() + if not console: + return + + params = FLASHWINDOW( + sizeof(FLASHWINDOW), + console, + FLASHW_CAPTION | FLASHW_TRAY | FLASHW_TIMERNOFG, + 3, + 0, + ) + FlashWindowEx(params) + else: + notifier = which("notify-send") + if not notifier: + raise Exception( + "Install notify-send (usually part of " + "the libnotify package) to get a notification when " + "the build finishes." + ) + self.run_process( + [ + notifier, + "--app-name=Mozilla Build System", + "Mozilla Build System", + msg, + ], + ensure_exit_code=False, + ) + except Exception as e: + self.log( + logging.WARNING, + "notifier-failed", + {"error": str(e)}, + "Notification center failed: {error}", + ) + + def _ensure_objdir_exists(self): + if os.path.isdir(self.statedir): + return + + os.makedirs(self.statedir) + + def _ensure_state_subdir_exists(self, subdir): + path = os.path.join(self.statedir, subdir) + + if os.path.isdir(path): + return + + os.makedirs(path) + + def _get_state_filename(self, filename, subdir=None): + path = self.statedir + + if subdir: + path = os.path.join(path, subdir) + + return os.path.join(path, filename) + + def _wrap_path_argument(self, arg): + return PathArgument(arg, self.topsrcdir, self.topobjdir) + + def _run_make( + self, + directory=None, + filename=None, + target=None, + log=True, + srcdir=False, + line_handler=None, + append_env=None, + explicit_env=None, + ignore_errors=False, + ensure_exit_code=0, + silent=True, + print_directory=True, + pass_thru=False, + num_jobs=0, + job_size=0, + keep_going=False, + ): + """Invoke make. + + directory -- Relative directory to look for Makefile in. + filename -- Explicit makefile to run. + target -- Makefile target(s) to make. Can be a string or iterable of + strings. + srcdir -- If True, invoke make from the source directory tree. + Otherwise, make will be invoked from the object directory. + silent -- If True (the default), run make in silent mode. + print_directory -- If True (the default), have make print directories + while doing traversal. + """ + self._ensure_objdir_exists() + + args = [self.substs["GMAKE"]] + + if directory: + args.extend(["-C", directory.replace(os.sep, "/")]) + + if filename: + args.extend(["-f", filename]) + + if num_jobs == 0 and self.mozconfig["make_flags"]: + flags = iter(self.mozconfig["make_flags"]) + for flag in flags: + if flag == "-j": + try: + flag = flags.next() + except StopIteration: + break + try: + num_jobs = int(flag) + except ValueError: + args.append(flag) + elif flag.startswith("-j"): + try: + num_jobs = int(flag[2:]) + except (ValueError, IndexError): + break + else: + args.append(flag) + + if num_jobs == 0: + if job_size == 0: + job_size = 2.0 if self.substs.get("CC_TYPE") == "gcc" else 1.0 # GiB + + cpus = multiprocessing.cpu_count() + if not psutil or not job_size: + num_jobs = cpus + else: + mem_gb = psutil.virtual_memory().total / 1024 ** 3 + from_mem = round(mem_gb / job_size) + num_jobs = max(1, min(cpus, from_mem)) + print( + " Parallelism determined by memory: using %d jobs for %d cores " + "based on %.1f GiB RAM and estimated job size of %.1f GiB" + % (num_jobs, cpus, mem_gb, job_size) + ) + + args.append("-j%d" % num_jobs) + + if ignore_errors: + args.append("-k") + + if silent: + args.append("-s") + + # Print entering/leaving directory messages. Some consumers look at + # these to measure progress. + if print_directory: + args.append("-w") + + if keep_going: + args.append("-k") + + if isinstance(target, list): + args.extend(target) + elif target: + args.append(target) + + fn = self._run_command_in_objdir + + if srcdir: + fn = self._run_command_in_srcdir + + append_env = dict(append_env or ()) + append_env["MACH"] = "1" + + params = { + "args": args, + "line_handler": line_handler, + "append_env": append_env, + "explicit_env": explicit_env, + "log_level": logging.INFO, + "require_unix_environment": False, + "ensure_exit_code": ensure_exit_code, + "pass_thru": pass_thru, + # Make manages its children, so mozprocess doesn't need to bother. + # Having mozprocess manage children can also have side-effects when + # building on Windows. See bug 796840. + "ignore_children": True, + } + + if log: + params["log_name"] = "make" + + return fn(**params) + + def _run_command_in_srcdir(self, **args): + return self.run_process(cwd=self.topsrcdir, **args) + + def _run_command_in_objdir(self, **args): + return self.run_process(cwd=self.topobjdir, **args) + + def _is_windows(self): + return os.name in ("nt", "ce") + + def _is_osx(self): + return "darwin" in str(sys.platform).lower() + + def _spawn(self, cls): + """Create a new MozbuildObject-derived class instance from ourselves. + + This is used as a convenience method to create other + MozbuildObject-derived class instances. It can only be used on + classes that have the same constructor arguments as us. + """ + + return cls( + self.topsrcdir, self.settings, self.log_manager, topobjdir=self.topobjdir + ) + + def activate_virtualenv(self): + self.virtualenv_manager.activate() + + def _set_log_level(self, verbose): + self.log_manager.terminal_handler.setLevel( + logging.INFO if not verbose else logging.DEBUG + ) + + def _ensure_zstd(self): + try: + import zstandard # noqa: F401 + except (ImportError, AttributeError): + self.activate_virtualenv() + self.virtualenv_manager.install_pip_requirements( + os.path.join(self.topsrcdir, "build", "zstandard_requirements.txt") + ) + + +class MachCommandBase(MozbuildObject): + """Base class for mach command providers that wish to be MozbuildObjects. + + This provides a level of indirection so MozbuildObject can be refactored + without having to change everything that inherits from it. + """ + + def __init__(self, context, virtualenv_name=None, metrics=None, no_auto_log=False): + # Attempt to discover topobjdir through environment detection, as it is + # more reliable than mozconfig when cwd is inside an objdir. + topsrcdir = context.topdir + topobjdir = None + detect_virtualenv_mozinfo = True + if hasattr(context, "detect_virtualenv_mozinfo"): + detect_virtualenv_mozinfo = getattr(context, "detect_virtualenv_mozinfo") + try: + dummy = MozbuildObject.from_environment( + cwd=context.cwd, detect_virtualenv_mozinfo=detect_virtualenv_mozinfo + ) + topsrcdir = dummy.topsrcdir + topobjdir = dummy._topobjdir + if topobjdir: + # If we're inside a objdir and the found mozconfig resolves to + # another objdir, we abort. The reasoning here is that if you + # are inside an objdir you probably want to perform actions on + # that objdir, not another one. This prevents accidental usage + # of the wrong objdir when the current objdir is ambiguous. + config_topobjdir = dummy.resolve_mozconfig_topobjdir() + + if config_topobjdir and not Path(topobjdir).samefile( + Path(config_topobjdir) + ): + raise ObjdirMismatchException(topobjdir, config_topobjdir) + except BuildEnvironmentNotFoundException: + pass + except ObjdirMismatchException as e: + print( + "Ambiguous object directory detected. We detected that " + "both %s and %s could be object directories. This is " + "typically caused by having a mozconfig pointing to a " + "different object directory from the current working " + "directory. To solve this problem, ensure you do not have a " + "default mozconfig in searched paths." % (e.objdir1, e.objdir2) + ) + sys.exit(1) + + except MozconfigLoadException as e: + print(e) + sys.exit(1) + + MozbuildObject.__init__( + self, + topsrcdir, + context.settings, + context.log_manager, + topobjdir=topobjdir, + virtualenv_name=virtualenv_name, + ) + + self._mach_context = context + self.metrics = metrics + + # Incur mozconfig processing so we have unified error handling for + # errors. Otherwise, the exceptions could bubble back to mach's error + # handler. + try: + self.mozconfig + + except MozconfigFindException as e: + print(e) + sys.exit(1) + + except MozconfigLoadException as e: + print(e) + sys.exit(1) + + # Always keep a log of the last command, but don't do that for mach + # invokations from scripts (especially not the ones done by the build + # system itself). + try: + fileno = getattr(sys.stdout, "fileno", lambda: None)() + except io.UnsupportedOperation: + fileno = None + if fileno and os.isatty(fileno) and not no_auto_log: + self._ensure_state_subdir_exists(".") + logfile = self._get_state_filename("last_log.json") + try: + fd = open(logfile, "wt") + self.log_manager.add_json_handler(fd) + except Exception as e: + self.log( + logging.WARNING, + "mach", + {"error": str(e)}, + "Log will not be kept for this command: {error}.", + ) + + def _sub_mach(self, argv): + return subprocess.call( + [sys.executable, os.path.join(self.topsrcdir, "mach")] + argv + ) + + +class MachCommandConditions(object): + """A series of commonly used condition functions which can be applied to + mach commands with providers deriving from MachCommandBase. + """ + + @staticmethod + def is_firefox(cls): + """Must have a Firefox build.""" + if hasattr(cls, "substs"): + return cls.substs.get("MOZ_BUILD_APP") == "browser" + return False + + @staticmethod + def is_jsshell(cls): + """Must have a jsshell build.""" + if hasattr(cls, "substs"): + return cls.substs.get("MOZ_BUILD_APP") == "js" + return False + + @staticmethod + def is_thunderbird(cls): + """Must have a Thunderbird build.""" + if hasattr(cls, "substs"): + return cls.substs.get("MOZ_BUILD_APP") == "comm/mail" + return False + + @staticmethod + def is_firefox_or_thunderbird(cls): + """Must have a Firefox or Thunderbird build.""" + return MachCommandConditions.is_firefox( + cls + ) or MachCommandConditions.is_thunderbird(cls) + + @staticmethod + def is_android(cls): + """Must have an Android build.""" + if hasattr(cls, "substs"): + return cls.substs.get("MOZ_WIDGET_TOOLKIT") == "android" + return False + + @staticmethod + def is_not_android(cls): + """Must not have an Android build.""" + if hasattr(cls, "substs"): + return cls.substs.get("MOZ_WIDGET_TOOLKIT") != "android" + return False + + @staticmethod + def is_firefox_or_android(cls): + """Must have a Firefox or Android build.""" + return MachCommandConditions.is_firefox( + cls + ) or MachCommandConditions.is_android(cls) + + @staticmethod + def has_build(cls): + """Must have a build.""" + return MachCommandConditions.is_firefox_or_android( + cls + ) or MachCommandConditions.is_thunderbird(cls) + + @staticmethod + def has_build_or_shell(cls): + """Must have a build or a shell build.""" + return MachCommandConditions.has_build(cls) or MachCommandConditions.is_jsshell( + cls + ) + + @staticmethod + def is_hg(cls): + """Must have a mercurial source checkout.""" + try: + return isinstance(cls.repository, HgRepository) + except InvalidRepoPath: + return False + + @staticmethod + def is_git(cls): + """Must have a git source checkout.""" + try: + return isinstance(cls.repository, GitRepository) + except InvalidRepoPath: + return False + + @staticmethod + def is_artifact_build(cls): + """Must be an artifact build.""" + if hasattr(cls, "substs"): + return getattr(cls, "substs", {}).get("MOZ_ARTIFACT_BUILDS") + return False + + @staticmethod + def is_non_artifact_build(cls): + """Must not be an artifact build.""" + if hasattr(cls, "substs"): + return not MachCommandConditions.is_artifact_build(cls) + return False + + @staticmethod + def is_buildapp_in(cls, apps): + """Must have a build for one of the given app""" + for app in apps: + attr = getattr(MachCommandConditions, "is_{}".format(app), None) + if attr and attr(cls): + return True + return False + + +class PathArgument(object): + """Parse a filesystem path argument and transform it in various ways.""" + + def __init__(self, arg, topsrcdir, topobjdir, cwd=None): + self.arg = arg + self.topsrcdir = topsrcdir + self.topobjdir = topobjdir + self.cwd = os.getcwd() if cwd is None else cwd + + def relpath(self): + """Return a path relative to the topsrcdir or topobjdir. + + If the argument is a path to a location in one of the base directories + (topsrcdir or topobjdir), then strip off the base directory part and + just return the path within the base directory.""" + + abspath = os.path.abspath(os.path.join(self.cwd, self.arg)) + + # If that path is within topsrcdir or topobjdir, return an equivalent + # path relative to that base directory. + for base_dir in [self.topobjdir, self.topsrcdir]: + if abspath.startswith(os.path.abspath(base_dir)): + return mozpath.relpath(abspath, base_dir) + + return mozpath.normsep(self.arg) + + def srcdir_path(self): + return mozpath.join(self.topsrcdir, self.relpath()) + + def objdir_path(self): + return mozpath.join(self.topobjdir, self.relpath()) + + +class ExecutionSummary(dict): + """Helper for execution summaries.""" + + def __init__(self, summary_format, **data): + self._summary_format = "" + assert "execution_time" in data + self.extend(summary_format, **data) + + def extend(self, summary_format, **data): + self._summary_format += summary_format + self.update(data) + + def __str__(self): + return self._summary_format.format(**self) + + def __getattr__(self, key): + return self[key] diff --git a/python/mozbuild/mozbuild/bootstrap.py b/python/mozbuild/mozbuild/bootstrap.py new file mode 100644 index 0000000000..60a307145c --- /dev/null +++ b/python/mozbuild/mozbuild/bootstrap.py @@ -0,0 +1,61 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import functools +import io +import logging +import os +from pathlib import Path + +from mozbuild.configure import ConfigureSandbox + + +def _raw_sandbox(extra_args=[]): + # Here, we don't want an existing mozconfig to interfere with what we + # do, neither do we want the default for --enable-bootstrap (which is not + # always on) to prevent this from doing something. + out = io.StringIO() + logger = logging.getLogger("moz.configure") + handler = logging.StreamHandler(out) + logger.addHandler(handler) + logger.propagate = False + sandbox = ConfigureSandbox( + {}, + argv=["configure"] + + ["--enable-bootstrap", f"MOZCONFIG={os.devnull}"] + + extra_args, + logger=logger, + ) + return sandbox + + +@functools.lru_cache(maxsize=None) +def _bootstrap_sandbox(): + sandbox = _raw_sandbox() + moz_configure = ( + Path(__file__).parent.parent.parent.parent / "build" / "moz.configure" + ) + sandbox.include_file(str(moz_configure / "init.configure")) + # bootstrap_search_path_order has a dependency on developer_options, which + # is not defined in init.configure. Its value doesn't matter for us, though. + sandbox["developer_options"] = sandbox["always"] + sandbox.include_file(str(moz_configure / "bootstrap.configure")) + return sandbox + + +def bootstrap_toolchain(toolchain_job): + # Expand the `bootstrap_path` template for the given toolchain_job, and execute the + # expanded function via `_value_for`, which will trigger autobootstrap. + # Returns the path to the toolchain. + sandbox = _bootstrap_sandbox() + return sandbox._value_for(sandbox["bootstrap_path"](toolchain_job)) + + +def bootstrap_all_toolchains_for(configure_args=[]): + sandbox = _raw_sandbox(configure_args) + moz_configure = Path(__file__).parent.parent.parent.parent / "moz.configure" + sandbox.include_file(str(moz_configure)) + for depend in sandbox._depends.values(): + if depend.name == "bootstrap_path": + depend.result() diff --git a/python/mozbuild/mozbuild/build_commands.py b/python/mozbuild/mozbuild/build_commands.py new file mode 100644 index 0000000000..47398dc3a0 --- /dev/null +++ b/python/mozbuild/mozbuild/build_commands.py @@ -0,0 +1,366 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, # You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import os +import subprocess + +import mozpack.path as mozpath +from mach.decorators import Command, CommandArgument + +from mozbuild.backend import backends +from mozbuild.mozconfig import MozconfigLoader +from mozbuild.util import MOZBUILD_METRICS_PATH + +BUILD_WHAT_HELP = """ +What to build. Can be a top-level make target or a relative directory. If +multiple options are provided, they will be built serially. BUILDING ONLY PARTS +OF THE TREE CAN RESULT IN BAD TREE STATE. USE AT YOUR OWN RISK. +""".strip() + + +def _set_priority(priority, verbose): + # Choose the Windows API structure to standardize on. + PRIO_CLASS_BY_KEY = { + "idle": "IDLE_PRIORITY_CLASS", + "less": "BELOW_NORMAL_PRIORITY_CLASS", + "normal": "NORMAL_PRIORITY_CLASS", + "more": "ABOVE_NORMAL_PRIORITY_CLASS", + "high": "HIGH_PRIORITY_CLASS", + } + try: + prio_class = PRIO_CLASS_BY_KEY[priority] + except KeyError: + raise KeyError(f"priority '{priority}' not in {list(PRIO_CLASS_BY_KEY)}") + + if "nice" in dir(os): + # Translate the Windows priority classes into niceness values. + NICENESS_BY_PRIO_CLASS = { + "IDLE_PRIORITY_CLASS": 19, + "BELOW_NORMAL_PRIORITY_CLASS": 10, + "NORMAL_PRIORITY_CLASS": 0, + "ABOVE_NORMAL_PRIORITY_CLASS": -10, + "HIGH_PRIORITY_CLASS": -20, + } + niceness = NICENESS_BY_PRIO_CLASS[prio_class] + + os.nice(niceness) + if verbose: + print(f"os.nice({niceness})") + return True + + try: + import psutil + + prio_class_val = getattr(psutil, prio_class) + except ModuleNotFoundError: + return False + except AttributeError: + return False + + psutil.Process().nice(prio_class_val) + if verbose: + print(f"psutil.Process().nice(psutil.{prio_class})") + return True + + +# Interface to build the tree. + + +@Command( + "build", + category="build", + description="Build the tree.", + metrics_path=MOZBUILD_METRICS_PATH, + virtualenv_name="build", +) +@CommandArgument( + "--jobs", + "-j", + default="0", + metavar="jobs", + type=int, + help="Number of concurrent jobs to run. Default is based on the number of " + "CPUs and the estimated size of the jobs (see --job-size).", +) +@CommandArgument( + "--job-size", + default="0", + metavar="size", + type=float, + help="Estimated RAM required, in GiB, for each parallel job. Used to " + "compute a default number of concurrent jobs.", +) +@CommandArgument( + "-C", + "--directory", + default=None, + help="Change to a subdirectory of the build directory first.", +) +@CommandArgument("what", default=None, nargs="*", help=BUILD_WHAT_HELP) +@CommandArgument( + "-v", + "--verbose", + action="store_true", + help="Verbose output for what commands the build is running.", +) +@CommandArgument( + "--keep-going", + action="store_true", + help="Keep building after an error has occurred", +) +@CommandArgument( + "--priority", + default="less", + metavar="priority", + type=str, + help="idle/less/normal/more/high. (Default less)", +) +def build( + command_context, + what=None, + jobs=0, + job_size=0, + directory=None, + verbose=False, + keep_going=False, + priority="less", +): + """Build the source tree. + + With no arguments, this will perform a full build. + + Positional arguments define targets to build. These can be make targets + or patterns like "/" to indicate a make target within a + directory. + + There are a few special targets that can be used to perform a partial + build faster than what `mach build` would perform: + + * binaries - compiles and links all C/C++ sources and produces shared + libraries and executables (binaries). + + * faster - builds JavaScript, XUL, CSS, etc files. + + "binaries" and "faster" almost fully complement each other. However, + there are build actions not captured by either. If things don't appear to + be rebuilding, perform a vanilla `mach build` to rebuild the world. + """ + from mozbuild.controller.building import BuildDriver + + command_context.log_manager.enable_all_structured_loggers() + + loader = MozconfigLoader(command_context.topsrcdir) + mozconfig = loader.read_mozconfig(loader.AUTODETECT) + configure_args = mozconfig["configure_args"] + doing_pgo = configure_args and "MOZ_PGO=1" in configure_args + # Force verbosity on automation. + verbose = verbose or bool(os.environ.get("MOZ_AUTOMATION", False)) + # Keep going by default on automation so that we exhaust as many errors as + # possible. + keep_going = keep_going or bool(os.environ.get("MOZ_AUTOMATION", False)) + append_env = None + + # By setting the current process's priority, by default our child processes + # will also inherit this same priority. + if not _set_priority(priority, verbose): + print("--priority not supported on this platform.") + + if doing_pgo: + if what: + raise Exception("Cannot specify targets (%s) in MOZ_PGO=1 builds" % what) + instr = command_context._spawn(BuildDriver) + orig_topobjdir = instr._topobjdir + instr._topobjdir = mozpath.join(instr._topobjdir, "instrumented") + + append_env = {"MOZ_PROFILE_GENERATE": "1"} + status = instr.build( + command_context.metrics, + what=what, + jobs=jobs, + job_size=job_size, + directory=directory, + verbose=verbose, + keep_going=keep_going, + mach_context=command_context._mach_context, + append_env=append_env, + virtualenv_topobjdir=orig_topobjdir, + ) + if status != 0: + return status + + # Packaging the instrumented build is required to get the jarlog + # data. + status = instr._run_make( + directory=".", + target="package", + silent=not verbose, + ensure_exit_code=False, + append_env=append_env, + ) + if status != 0: + return status + + pgo_env = os.environ.copy() + if instr.config_environment.substs.get("CC_TYPE") in ("clang", "clang-cl"): + pgo_env["LLVM_PROFDATA"] = instr.config_environment.substs.get( + "LLVM_PROFDATA" + ) + pgo_env["JARLOG_FILE"] = mozpath.join(orig_topobjdir, "jarlog/en-US.log") + pgo_cmd = [ + command_context.virtualenv_manager.python_path, + mozpath.join(command_context.topsrcdir, "build/pgo/profileserver.py"), + ] + subprocess.check_call(pgo_cmd, cwd=instr.topobjdir, env=pgo_env) + + # Set the default build to MOZ_PROFILE_USE + append_env = {"MOZ_PROFILE_USE": "1"} + + driver = command_context._spawn(BuildDriver) + return driver.build( + command_context.metrics, + what=what, + jobs=jobs, + job_size=job_size, + directory=directory, + verbose=verbose, + keep_going=keep_going, + mach_context=command_context._mach_context, + append_env=append_env, + ) + + +@Command( + "configure", + category="build", + description="Configure the tree (run configure and config.status).", + metrics_path=MOZBUILD_METRICS_PATH, + virtualenv_name="build", +) +@CommandArgument( + "options", default=None, nargs=argparse.REMAINDER, help="Configure options" +) +def configure( + command_context, + options=None, + buildstatus_messages=False, + line_handler=None, +): + from mozbuild.controller.building import BuildDriver + + command_context.log_manager.enable_all_structured_loggers() + driver = command_context._spawn(BuildDriver) + + return driver.configure( + command_context.metrics, + options=options, + buildstatus_messages=buildstatus_messages, + line_handler=line_handler, + ) + + +@Command( + "resource-usage", + category="post-build", + description="Show information about system resource usage for a build.", + virtualenv_name="build", +) +@CommandArgument( + "--address", + default="localhost", + help="Address the HTTP server should listen on.", +) +@CommandArgument( + "--port", + type=int, + default=0, + help="Port number the HTTP server should listen on.", +) +@CommandArgument( + "--browser", + default="firefox", + help="Web browser to automatically open. See webbrowser Python module.", +) +@CommandArgument("--url", help="URL of JSON document to display") +def resource_usage(command_context, address=None, port=None, browser=None, url=None): + import webbrowser + + from mozbuild.html_build_viewer import BuildViewerServer + + server = BuildViewerServer(address, port) + + if url: + server.add_resource_json_url("url", url) + else: + last = command_context._get_state_filename("build_resources.json") + if not os.path.exists(last): + print( + "Build resources not available. If you have performed a " + "build and receive this message, the psutil Python package " + "likely failed to initialize properly." + ) + return 1 + + server.add_resource_json_file("last", last) + try: + webbrowser.get(browser).open_new_tab(server.url) + except Exception: + print("Cannot get browser specified, trying the default instead.") + try: + browser = webbrowser.get().open_new_tab(server.url) + except Exception: + print("Please open %s in a browser." % server.url) + + print("Hit CTRL+c to stop server.") + server.run() + + +@Command( + "build-backend", + category="build", + description="Generate a backend used to build the tree.", + virtualenv_name="build", +) +@CommandArgument("-d", "--diff", action="store_true", help="Show a diff of changes.") +# It would be nice to filter the choices below based on +# conditions, but that is for another day. +@CommandArgument( + "-b", + "--backend", + nargs="+", + choices=sorted(backends), + help="Which backend to build.", +) +@CommandArgument("-v", "--verbose", action="store_true", help="Verbose output.") +@CommandArgument( + "-n", + "--dry-run", + action="store_true", + help="Do everything except writing files out.", +) +def build_backend(command_context, backend, diff=False, verbose=False, dry_run=False): + python = command_context.virtualenv_manager.python_path + config_status = os.path.join(command_context.topobjdir, "config.status") + + if not os.path.exists(config_status): + print( + "config.status not found. Please run |mach configure| " + "or |mach build| prior to building the %s build backend." % backend + ) + return 1 + + args = [python, config_status] + if backend: + args.append("--backend") + args.extend(backend) + if diff: + args.append("--diff") + if verbose: + args.append("--verbose") + if dry_run: + args.append("--dry-run") + + return command_context._run_command_in_objdir( + args=args, pass_thru=True, ensure_exit_code=False + ) diff --git a/python/mozbuild/mozbuild/chunkify.py b/python/mozbuild/mozbuild/chunkify.py new file mode 100644 index 0000000000..b2c1057450 --- /dev/null +++ b/python/mozbuild/mozbuild/chunkify.py @@ -0,0 +1,56 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +# This file is a direct clone of +# https://github.com/bhearsum/chunkify/blob/master/chunkify/__init__.py +# of version 1.2. Its license (MPL2) is contained in repo root LICENSE file. +# Please make modifications there where possible. + +from itertools import islice + + +class ChunkingError(Exception): + pass + + +def split_evenly(n, chunks): + """Split an integer into evenly distributed list + + >>> split_evenly(7, 3) + [3, 2, 2] + + >>> split_evenly(12, 3) + [4, 4, 4] + + >>> split_evenly(35, 10) + [4, 4, 4, 4, 4, 3, 3, 3, 3, 3] + + >>> split_evenly(1, 2) + Traceback (most recent call last): + ... + ChunkingError: Number of chunks is greater than number + + """ + if n < chunks: + raise ChunkingError("Number of chunks is greater than number") + if n % chunks == 0: + # Either we can evenly split or only 1 chunk left + return [n // chunks] * chunks + # otherwise the current chunk should be a bit larger + max_size = n // chunks + 1 + return [max_size] + split_evenly(n - max_size, chunks - 1) + + +def chunkify(things, this_chunk, chunks): + if this_chunk > chunks: + raise ChunkingError("this_chunk is greater than total chunks") + + dist = split_evenly(len(things), chunks) + start = sum(dist[: this_chunk - 1]) + end = start + dist[this_chunk - 1] + + try: + return things[start:end] + except TypeError: + return islice(things, start, end) diff --git a/python/mozbuild/mozbuild/code_analysis/__init__.py b/python/mozbuild/mozbuild/code_analysis/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/code_analysis/mach_commands.py b/python/mozbuild/mozbuild/code_analysis/mach_commands.py new file mode 100644 index 0000000000..ad6c352021 --- /dev/null +++ b/python/mozbuild/mozbuild/code_analysis/mach_commands.py @@ -0,0 +1,1976 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, # You can obtain one at http://mozilla.org/MPL/2.0/. +import concurrent.futures +import json +import logging +import multiprocessing +import ntpath +import os +import pathlib +import posixpath +import re +import shutil +import subprocess +import sys +import tempfile +import xml.etree.ElementTree as ET +from types import SimpleNamespace + +import mozpack.path as mozpath +import six +import yaml +from mach.decorators import Command, CommandArgument, SubCommand +from mach.main import Mach +from mozversioncontrol import get_repository_object +from six.moves import input + +from mozbuild import build_commands +from mozbuild.controller.clobber import Clobberer +from mozbuild.nodeutil import find_node_executable +from mozbuild.util import memoize + + +# Function used to run clang-format on a batch of files. It is a helper function +# in order to integrate into the futures ecosystem clang-format. +def run_one_clang_format_batch(args): + try: + subprocess.check_output(args) + except subprocess.CalledProcessError as e: + return e + + +def build_repo_relative_path(abs_path, repo_path): + """Build path relative to repository root""" + + if os.path.islink(abs_path): + abs_path = mozpath.realpath(abs_path) + + return mozpath.relpath(abs_path, repo_path) + + +def prompt_bool(prompt, limit=5): + """Prompts the user with prompt and requires a boolean value.""" + from distutils.util import strtobool + + for _ in range(limit): + try: + return strtobool(input(prompt + "[Y/N]\n")) + except ValueError: + print( + "ERROR! Please enter a valid option! Please use any of the following:" + " Y, N, True, False, 1, 0" + ) + return False + + +class StaticAnalysisSubCommand(SubCommand): + def __call__(self, func): + after = SubCommand.__call__(self, func) + args = [ + CommandArgument( + "--verbose", "-v", action="store_true", help="Print verbose output." + ) + ] + for arg in args: + after = arg(after) + return after + + +class StaticAnalysisMonitor(object): + def __init__(self, srcdir, objdir, checks, total): + self._total = total + self._processed = 0 + self._current = None + self._srcdir = srcdir + + import copy + + self._checks = copy.deepcopy(checks) + + # Transform the configuration to support Regex + for item in self._checks: + if item["name"] == "-*": + continue + item["name"] = item["name"].replace("*", ".*") + + from mozbuild.compilation.warnings import WarningsCollector, WarningsDatabase + + self._warnings_database = WarningsDatabase() + + def on_warning(warning): + + # Output paths relative to repository root if the paths are under repo tree + warning["filename"] = build_repo_relative_path( + warning["filename"], self._srcdir + ) + + self._warnings_database.insert(warning) + + self._warnings_collector = WarningsCollector(on_warning, objdir=objdir) + + @property + def num_files(self): + return self._total + + @property + def num_files_processed(self): + return self._processed + + @property + def current_file(self): + return self._current + + @property + def warnings_db(self): + return self._warnings_database + + def on_line(self, line): + warning = None + + try: + warning = self._warnings_collector.process_line(line) + except Exception: + pass + + if line.find("clang-tidy") != -1: + filename = line.split(" ")[-1] + if os.path.isfile(filename): + self._current = build_repo_relative_path(filename, self._srcdir) + else: + self._current = None + self._processed = self._processed + 1 + return (warning, False) + if warning is not None: + + def get_check_config(checker_name): + # get the matcher from self._checks that is the 'name' field + for item in self._checks: + if item["name"] == checker_name: + return item + + # We are using a regex in order to also match 'mozilla-.* like checkers' + matcher = re.match(item["name"], checker_name) + if matcher is not None and matcher.group(0) == checker_name: + return item + + check_config = get_check_config(warning["flag"]) + if check_config is not None: + warning["reliability"] = check_config.get("reliability", "low") + warning["reason"] = check_config.get("reason") + warning["publish"] = check_config.get("publish", True) + elif warning["flag"] == "clang-diagnostic-error": + # For a "warning" that is flagged as "clang-diagnostic-error" + # set it as "publish" + warning["publish"] = True + + return (warning, True) + + +# Utilities for running C++ static analysis checks and format. + +# List of file extension to consider (should start with dot) +_format_include_extensions = (".cpp", ".c", ".cc", ".h", ".m", ".mm") +# File contaning all paths to exclude from formatting +_format_ignore_file = ".clang-format-ignore" + +# (TOOLS) Function return codes +TOOLS_SUCCESS = 0 +TOOLS_FAILED_DOWNLOAD = 1 +TOOLS_UNSUPORTED_PLATFORM = 2 +TOOLS_CHECKER_NO_TEST_FILE = 3 +TOOLS_CHECKER_RETURNED_NO_ISSUES = 4 +TOOLS_CHECKER_RESULT_FILE_NOT_FOUND = 5 +TOOLS_CHECKER_DIFF_FAILED = 6 +TOOLS_CHECKER_NOT_FOUND = 7 +TOOLS_CHECKER_FAILED_FILE = 8 +TOOLS_CHECKER_LIST_EMPTY = 9 +TOOLS_GRADLE_FAILED = 10 + + +@Command( + "clang-tidy", + category="devenv", + description="Convenience alias for the static-analysis command", +) +def clang_tidy(command_context): + # If no arguments are provided, just print a help message. + """Detailed documentation: + https://firefox-source-docs.mozilla.org/code-quality/static-analysis/index.html + """ + mach = Mach(os.getcwd()) + + def populate_context(key=None): + if key == "topdir": + return command_context.topsrcdir + + mach.populate_context_handler = populate_context + mach.run(["static-analysis", "--help"]) + + +@Command( + "static-analysis", + category="devenv", + description="Run C++ static analysis checks using clang-tidy", +) +def static_analysis(command_context): + # If no arguments are provided, just print a help message. + """Detailed documentation: + https://firefox-source-docs.mozilla.org/code-quality/static-analysis/index.html + """ + mach = Mach(os.getcwd()) + + def populate_context(key=None): + if key == "topdir": + return command_context.topsrcdir + + mach.populate_context_handler = populate_context + mach.run(["static-analysis", "--help"]) + + +@StaticAnalysisSubCommand( + "static-analysis", "check", "Run the checks using the helper tool" +) +@CommandArgument( + "source", + nargs="*", + default=[".*"], + help="Source files to be analyzed (regex on path). " + "Can be omitted, in which case the entire code base " + "is analyzed. The source argument is ignored if " + "there is anything fed through stdin, in which case " + "the analysis is only performed on the files changed " + "in the patch streamed through stdin. This is called " + "the diff mode.", +) +@CommandArgument( + "--checks", + "-c", + default="-*", + metavar="checks", + help="Static analysis checks to enable. By default, this enables only " + "checks that are published here: https://mzl.la/2DRHeTh, but can be any " + "clang-tidy checks syntax.", +) +@CommandArgument( + "--jobs", + "-j", + default="0", + metavar="jobs", + type=int, + help="Number of concurrent jobs to run. Default is the number of CPUs.", +) +@CommandArgument( + "--strip", + "-p", + default="1", + metavar="NUM", + help="Strip NUM leading components from file names in diff mode.", +) +@CommandArgument( + "--fix", + "-f", + default=False, + action="store_true", + help="Try to autofix errors detected by clang-tidy checkers.", +) +@CommandArgument( + "--header-filter", + "-h-f", + default="", + metavar="header_filter", + help="Regular expression matching the names of the headers to " + "output diagnostics from. Diagnostics from the main file " + "of each translation unit are always displayed", +) +@CommandArgument( + "--output", "-o", default=None, help="Write clang-tidy output in a file" +) +@CommandArgument( + "--format", + default="text", + choices=("text", "json"), + help="Output format to write in a file", +) +@CommandArgument( + "--outgoing", + default=False, + action="store_true", + help="Run static analysis checks on outgoing files from mercurial repository", +) +def check( + command_context, + source=None, + jobs=2, + strip=1, + verbose=False, + checks="-*", + fix=False, + header_filter="", + output=None, + format="text", + outgoing=False, +): + from mozbuild.controller.building import ( + StaticAnalysisFooter, + StaticAnalysisOutputManager, + ) + + command_context._set_log_level(verbose) + command_context.activate_virtualenv() + command_context.log_manager.enable_unstructured() + + rc, clang_paths = get_clang_tools(command_context, verbose=verbose) + if rc != 0: + return rc + + if not _is_version_eligible(command_context, clang_paths): + return 1 + + rc, _compile_db, compilation_commands_path = _build_compile_db( + command_context, verbose=verbose + ) + rc = rc or _build_export(command_context, jobs=jobs, verbose=verbose) + if rc != 0: + return rc + + # Use outgoing files instead of source files + if outgoing: + repo = get_repository_object(command_context.topsrcdir) + files = repo.get_outgoing_files() + source = get_abspath_files(command_context, files) + + # Split in several chunks to avoid hitting Python's limit of 100 groups in re + compile_db = json.loads(open(_compile_db, "r").read()) + total = 0 + import re + + chunk_size = 50 + for offset in range(0, len(source), chunk_size): + source_chunks = [ + re.escape(f) for f in source[offset : offset + chunk_size].copy() + ] + name_re = re.compile("(" + ")|(".join(source_chunks) + ")") + for f in compile_db: + if name_re.search(f["file"]): + total = total + 1 + + # Filter source to remove excluded files + source = _generate_path_list(command_context, source, verbose=verbose) + + if not total or not source: + command_context.log( + logging.INFO, + "static-analysis", + {}, + "There are no files eligible for analysis. Please note that 'header' files " + "cannot be used for analysis since they do not consist compilation units.", + ) + return 0 + + # Escape the files from source + source = [re.escape(f) for f in source] + + cwd = command_context.topobjdir + + monitor = StaticAnalysisMonitor( + command_context.topsrcdir, + command_context.topobjdir, + get_clang_tidy_config(command_context).checks_with_data, + total, + ) + + footer = StaticAnalysisFooter(command_context.log_manager.terminal, monitor) + + with StaticAnalysisOutputManager( + command_context.log_manager, monitor, footer + ) as output_manager: + import math + + batch_size = int(math.ceil(float(len(source)) / multiprocessing.cpu_count())) + for i in range(0, len(source), batch_size): + args = _get_clang_tidy_command( + command_context, + clang_paths, + compilation_commands_path, + checks=checks, + header_filter=header_filter, + sources=source[i : (i + batch_size)], + jobs=jobs, + fix=fix, + ) + rc = command_context.run_process( + args=args, + ensure_exit_code=False, + line_handler=output_manager.on_line, + cwd=cwd, + ) + + command_context.log( + logging.WARNING, + "warning_summary", + {"count": len(monitor.warnings_db)}, + "{count} warnings present.", + ) + + # Write output file + if output is not None: + output_manager.write(output, format) + + return rc + + +def get_abspath_files(command_context, files): + return [mozpath.join(command_context.topsrcdir, f) for f in files] + + +def get_files_with_commands(command_context, compile_db, source): + """ + Returns an array of dictionaries having file_path with build command + """ + + compile_db = json.load(open(compile_db, "r")) + + commands_list = [] + + for f in source: + # It must be a C/C++ file + _, ext = os.path.splitext(f) + + if ext.lower() not in _format_include_extensions: + command_context.log( + logging.INFO, "static-analysis", {}, "Skipping {}".format(f) + ) + continue + file_with_abspath = os.path.join(command_context.topsrcdir, f) + for f in compile_db: + # Found for a file that we are looking + if file_with_abspath == f["file"]: + commands_list.append(f) + + return commands_list + + +@memoize +def get_clang_tidy_config(command_context): + from mozbuild.code_analysis.utils import ClangTidyConfig + + return ClangTidyConfig(command_context.topsrcdir) + + +def _get_required_version(command_context): + version = get_clang_tidy_config(command_context).version + if version is None: + command_context.log( + logging.ERROR, + "static-analysis", + {}, + "ERROR: Unable to find 'package_version' in config.yml", + ) + return version + + +def _get_current_version(command_context, clang_paths): + # Because the fact that we ship together clang-tidy and clang-format + # we are sure that these two will always share the same version. + # Thus in order to determine that the version is compatible we only + # need to check one of them, going with clang-format + cmd = [clang_paths._clang_format_path, "--version"] + version_info = None + try: + version_info = ( + subprocess.check_output(cmd, stderr=subprocess.STDOUT) + .decode("utf-8") + .strip() + ) + + if "MOZ_AUTOMATION" in os.environ: + # Only show it in the CI + command_context.log( + logging.INFO, + "static-analysis", + {}, + "{} Version = {} ".format(clang_paths._clang_format_path, version_info), + ) + + except subprocess.CalledProcessError as e: + command_context.log( + logging.ERROR, + "static-analysis", + {}, + "Error determining the version clang-tidy/format binary, please see the " + "attached exception: \n{}".format(e.output), + ) + return version_info + + +def _is_version_eligible(command_context, clang_paths, log_error=True): + version = _get_required_version(command_context) + if version is None: + return False + + current_version = _get_current_version(command_context, clang_paths) + if current_version is None: + return False + version = "clang-format version " + version + if version in current_version: + return True + + if log_error: + command_context.log( + logging.ERROR, + "static-analysis", + {}, + "ERROR: You're using an old or incorrect version ({}) of clang-format binary. " + "Please update to a more recent one (at least > {}) " + "by running: './mach bootstrap' ".format( + _get_current_version(command_context, clang_paths), + _get_required_version(command_context), + ), + ) + + return False + + +def _get_clang_tidy_command( + command_context, + clang_paths, + compilation_commands_path, + checks, + header_filter, + sources, + jobs, + fix, +): + + if checks == "-*": + checks = ",".join(get_clang_tidy_config(command_context).checks) + + common_args = [ + "-clang-tidy-binary", + clang_paths._clang_tidy_path, + "-clang-apply-replacements-binary", + clang_paths._clang_apply_replacements, + "-checks=%s" % checks, + "-extra-arg=-DMOZ_CLANG_PLUGIN", + ] + + # Flag header-filter is passed in order to limit the diagnostic messages only + # to the specified header files. When no value is specified the default value + # is considered to be the source in order to limit the diagnostic message to + # the source files or folders. + common_args += [ + "-header-filter=%s" + % (header_filter if len(header_filter) else "|".join(sources)) + ] + + # From our configuration file, config.yaml, we build the configuration list, for + # the checkers that are used. These configuration options are used to better fit + # the checkers to our code. + cfg = get_clang_tidy_config(command_context).checks_config + if cfg: + common_args += ["-config=%s" % yaml.dump(cfg)] + + if fix: + common_args += ["-fix"] + + return ( + [ + command_context.virtualenv_manager.python_path, + clang_paths._run_clang_tidy_path, + "-j", + str(jobs), + "-p", + compilation_commands_path, + ] + + common_args + + sources + ) + + +@StaticAnalysisSubCommand( + "static-analysis", + "autotest", + "Run the auto-test suite in order to determine that" + " the analysis did not regress.", +) +@CommandArgument( + "--dump-results", + "-d", + default=False, + action="store_true", + help="Generate the baseline for the regression test. Based on" + " this baseline we will test future results.", +) +@CommandArgument( + "--intree-tool", + "-i", + default=False, + action="store_true", + help="Use a pre-aquired in-tree clang-tidy package from the automation env." + " This option is only valid on automation environments.", +) +@CommandArgument( + "checker_names", + nargs="*", + default=[], + help="Checkers that are going to be auto-tested.", +) +def autotest( + command_context, + verbose=False, + dump_results=False, + intree_tool=False, + checker_names=[], +): + # If 'dump_results' is True than we just want to generate the issues files for each + # checker in particulat and thus 'force_download' becomes 'False' since we want to + # do this on a local trusted clang-tidy package. + command_context._set_log_level(verbose) + command_context.activate_virtualenv() + dump_results = dump_results + + force_download = not dump_results + + # Configure the tree or download clang-tidy package, depending on the option that we choose + if intree_tool: + clang_paths = SimpleNamespace() + if "MOZ_AUTOMATION" not in os.environ: + command_context.log( + logging.INFO, + "static-analysis", + {}, + "The `autotest` with `--intree-tool` can only be ran in automation.", + ) + return 1 + if "MOZ_FETCHES_DIR" not in os.environ: + command_context.log( + logging.INFO, + "static-analysis", + {}, + "`MOZ_FETCHES_DIR` is missing from the environment variables.", + ) + return 1 + + _, config, _ = _get_config_environment(command_context) + clang_tools_path = os.environ["MOZ_FETCHES_DIR"] + clang_paths._clang_tidy_path = mozpath.join( + clang_tools_path, + "clang-tidy", + "bin", + "clang-tidy" + config.substs.get("HOST_BIN_SUFFIX", ""), + ) + clang_paths._clang_format_path = mozpath.join( + clang_tools_path, + "clang-tidy", + "bin", + "clang-format" + config.substs.get("HOST_BIN_SUFFIX", ""), + ) + clang_paths._clang_apply_replacements = mozpath.join( + clang_tools_path, + "clang-tidy", + "bin", + "clang-apply-replacements" + config.substs.get("HOST_BIN_SUFFIX", ""), + ) + clang_paths._run_clang_tidy_path = mozpath.join( + clang_tools_path, "clang-tidy", "bin", "run-clang-tidy" + ) + clang_paths._clang_format_diff = mozpath.join( + clang_tools_path, "clang-tidy", "share", "clang", "clang-format-diff.py" + ) + + # Ensure that clang-tidy is present + rc = not os.path.exists(clang_paths._clang_tidy_path) + else: + rc, clang_paths = get_clang_tools( + command_context, force=force_download, verbose=verbose + ) + + if rc != 0: + command_context.log( + logging.ERROR, + "ERROR: static-analysis", + {}, + "ERROR: clang-tidy unable to locate package.", + ) + return TOOLS_FAILED_DOWNLOAD + + clang_paths._clang_tidy_base_path = mozpath.join( + command_context.topsrcdir, "tools", "clang-tidy" + ) + + # For each checker run it + platform, _ = command_context.platform + + if platform not in get_clang_tidy_config(command_context).platforms: + command_context.log( + logging.ERROR, + "static-analysis", + {}, + "ERROR: RUNNING: clang-tidy autotest for platform {} not supported.".format( + platform + ), + ) + return TOOLS_UNSUPORTED_PLATFORM + + max_workers = multiprocessing.cpu_count() + + command_context.log( + logging.INFO, + "static-analysis", + {}, + "RUNNING: clang-tidy autotest for platform {0} with {1} workers.".format( + platform, max_workers + ), + ) + + # List all available checkers + cmd = [clang_paths._clang_tidy_path, "-list-checks", "-checks=*"] + clang_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode( + "utf-8" + ) + available_checks = clang_output.split("\n")[1:] + clang_tidy_checks = [c.strip() for c in available_checks if c] + + # Build the dummy compile_commands.json + compilation_commands_path = _create_temp_compilation_db(command_context) + checkers_test_batch = [] + checkers_results = [] + with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor: + futures = [] + for item in get_clang_tidy_config(command_context).checks_with_data: + # Skip if any of the following statements is true: + # 1. Checker attribute 'publish' is False. + not_published = not bool(item.get("publish", True)) + # 2. Checker has restricted-platforms and current platform is not of them. + ignored_platform = ( + "restricted-platforms" in item + and platform not in item["restricted-platforms"] + ) + # 3. Checker name is mozilla-* or -*. + ignored_checker = item["name"] in ["mozilla-*", "-*"] + # 4. List checker_names is passed and the current checker is not part of the + # list or 'publish' is False + checker_not_in_list = checker_names and ( + item["name"] not in checker_names or not_published + ) + if ( + not_published + or ignored_platform + or ignored_checker + or checker_not_in_list + ): + continue + checkers_test_batch.append(item["name"]) + futures.append( + executor.submit( + _verify_checker, + command_context, + clang_paths, + compilation_commands_path, + dump_results, + clang_tidy_checks, + item, + checkers_results, + ) + ) + + error_code = TOOLS_SUCCESS + for future in concurrent.futures.as_completed(futures): + # Wait for every task to finish + ret_val = future.result() + if ret_val != TOOLS_SUCCESS: + # We are interested only in one error and we don't break + # the execution of for loop since we want to make sure that all + # tasks finished. + error_code = ret_val + + if error_code != TOOLS_SUCCESS: + + command_context.log( + logging.INFO, + "static-analysis", + {}, + "FAIL: the following clang-tidy check(s) failed:", + ) + for failure in checkers_results: + checker_error = failure["checker-error"] + checker_name = failure["checker-name"] + info1 = failure["info1"] + info2 = failure["info2"] + info3 = failure["info3"] + + message_to_log = "" + if checker_error == TOOLS_CHECKER_NOT_FOUND: + message_to_log = ( + "\tChecker " + "{} not present in this clang-tidy version.".format( + checker_name + ) + ) + elif checker_error == TOOLS_CHECKER_NO_TEST_FILE: + message_to_log = ( + "\tChecker " + "{0} does not have a test file - {0}.cpp".format(checker_name) + ) + elif checker_error == TOOLS_CHECKER_RETURNED_NO_ISSUES: + message_to_log = ( + "\tChecker {0} did not find any issues in its test file, " + "clang-tidy output for the run is:\n{1}" + ).format(checker_name, info1) + elif checker_error == TOOLS_CHECKER_RESULT_FILE_NOT_FOUND: + message_to_log = ( + "\tChecker {0} does not have a result file - {0}.json" + ).format(checker_name) + elif checker_error == TOOLS_CHECKER_DIFF_FAILED: + message_to_log = ( + "\tChecker {0}\nExpected: {1}\n" + "Got: {2}\n" + "clang-tidy output for the run is:\n" + "{3}" + ).format(checker_name, info1, info2, info3) + + print("\n" + message_to_log) + + # Also delete the tmp folder + shutil.rmtree(compilation_commands_path) + return error_code + + # Run the analysis on all checkers at the same time only if we don't dump results. + if not dump_results: + ret_val = _run_analysis_batch( + command_context, + clang_paths, + compilation_commands_path, + checkers_test_batch, + ) + if ret_val != TOOLS_SUCCESS: + shutil.rmtree(compilation_commands_path) + return ret_val + + command_context.log( + logging.INFO, "static-analysis", {}, "SUCCESS: clang-tidy all tests passed." + ) + # Also delete the tmp folder + shutil.rmtree(compilation_commands_path) + + +def _run_analysis( + command_context, + clang_paths, + compilation_commands_path, + checks, + header_filter, + sources, + jobs=1, + fix=False, + print_out=False, +): + cmd = _get_clang_tidy_command( + command_context, + clang_paths, + compilation_commands_path, + checks=checks, + header_filter=header_filter, + sources=sources, + jobs=jobs, + fix=fix, + ) + + try: + clang_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode( + "utf-8" + ) + except subprocess.CalledProcessError as e: + print(e.output) + return None + return _parse_issues(command_context, clang_output), clang_output + + +def _run_analysis_batch(command_context, clang_paths, compilation_commands_path, items): + command_context.log( + logging.INFO, + "static-analysis", + {}, + "RUNNING: clang-tidy checker batch analysis.", + ) + if not len(items): + command_context.log( + logging.ERROR, + "static-analysis", + {}, + "ERROR: clang-tidy checker list is empty!", + ) + return TOOLS_CHECKER_LIST_EMPTY + + issues, clang_output = _run_analysis( + command_context, + clang_paths, + compilation_commands_path, + checks="-*," + ",".join(items), + header_filter="", + sources=[ + mozpath.join(clang_paths._clang_tidy_base_path, "test", checker) + ".cpp" + for checker in items + ], + print_out=True, + ) + + if issues is None: + return TOOLS_CHECKER_FAILED_FILE + + failed_checks = [] + failed_checks_baseline = [] + for checker in items: + test_file_path_json = ( + mozpath.join(clang_paths._clang_tidy_base_path, "test", checker) + ".json" + ) + # Read the pre-determined issues + baseline_issues = _get_autotest_stored_issues(test_file_path_json) + + # We also stored the 'reliability' index so strip that from the baseline_issues + baseline_issues[:] = [ + item for item in baseline_issues if "reliability" not in item + ] + + found = all([element_base in issues for element_base in baseline_issues]) + + if not found: + failed_checks.append(checker) + failed_checks_baseline.append(baseline_issues) + + if len(failed_checks) > 0: + command_context.log( + logging.ERROR, + "static-analysis", + {}, + "ERROR: The following check(s) failed for bulk analysis: " + + " ".join(failed_checks), + ) + + for failed_check, baseline_issue in zip(failed_checks, failed_checks_baseline): + print( + "\tChecker {0} expect following results: \n\t\t{1}".format( + failed_check, baseline_issue + ) + ) + + print( + "This is the output generated by clang-tidy for the bulk build:\n{}".format( + clang_output + ) + ) + return TOOLS_CHECKER_DIFF_FAILED + + return TOOLS_SUCCESS + + +def _create_temp_compilation_db(command_context): + directory = tempfile.mkdtemp(prefix="cc") + with open(mozpath.join(directory, "compile_commands.json"), "w") as file_handler: + compile_commands = [] + director = mozpath.join( + command_context.topsrcdir, "tools", "clang-tidy", "test" + ) + for item in get_clang_tidy_config(command_context).checks: + if item in ["-*", "mozilla-*"]: + continue + file = item + ".cpp" + element = {} + element["directory"] = director + element["command"] = "cpp -std=c++17 " + file + element["file"] = mozpath.join(director, file) + compile_commands.append(element) + + json.dump(compile_commands, file_handler) + file_handler.flush() + + return directory + + +@StaticAnalysisSubCommand( + "static-analysis", "install", "Install the static analysis helper tool" +) +@CommandArgument( + "source", + nargs="?", + type=str, + help="Where to fetch a local archive containing the static-analysis and " + "format helper tool." + "It will be installed in ~/.mozbuild/clang-tools." + "Can be omitted, in which case the latest clang-tools " + "helper for the platform would be automatically detected and installed.", +) +@CommandArgument( + "--skip-cache", + action="store_true", + help="Skip all local caches to force re-fetching the helper tool.", + default=False, +) +@CommandArgument( + "--force", + action="store_true", + help="Force re-install even though the tool exists in mozbuild.", + default=False, +) +def install( + command_context, + source=None, + skip_cache=False, + force=False, + verbose=False, +): + command_context._set_log_level(verbose) + rc, _ = get_clang_tools( + command_context, + force=force, + skip_cache=skip_cache, + source=source, + verbose=verbose, + ) + return rc + + +@StaticAnalysisSubCommand( + "static-analysis", + "clear-cache", + "Delete local helpers and reset static analysis helper tool cache", +) +def clear_cache(command_context, verbose=False): + command_context._set_log_level(verbose) + rc, _ = get_clang_tools( + command_context, + force=True, + download_if_needed=True, + skip_cache=True, + verbose=verbose, + ) + + if rc != 0: + return rc + + from mozbuild.artifact_commands import artifact_clear_cache + + return artifact_clear_cache(command_context) + + +@StaticAnalysisSubCommand( + "static-analysis", + "print-checks", + "Print a list of the static analysis checks performed by default", +) +def print_checks(command_context, verbose=False): + command_context._set_log_level(verbose) + rc, clang_paths = get_clang_tools(command_context, verbose=verbose) + + if rc != 0: + return rc + + args = [ + clang_paths._clang_tidy_path, + "-list-checks", + "-checks=%s" % get_clang_tidy_config(command_context).checks, + ] + + return command_context.run_process(args=args, pass_thru=True) + + +@Command( + "prettier-format", + category="misc", + description="Run prettier on current changes", +) +@CommandArgument( + "--path", + "-p", + nargs=1, + required=True, + help="Specify the path to reformat to stdout.", +) +@CommandArgument( + "--assume-filename", + "-a", + nargs=1, + required=True, + help="This option is usually used in the context of hg-formatsource." + "When reading from stdin, Prettier assumes this " + "filename to decide which style and parser to use.", +) +def prettier_format(command_context, path, assume_filename): + # With assume_filename we want to have stdout clean since the result of the + # format will be redirected to stdout. + + binary, _ = find_node_executable() + prettier = os.path.join( + command_context.topsrcdir, "node_modules", "prettier", "bin-prettier.js" + ) + path = os.path.join(command_context.topsrcdir, path[0]) + + # Bug 1564824. Prettier fails on patches with moved files where the + # original directory also does not exist. + assume_dir = os.path.dirname( + os.path.join(command_context.topsrcdir, assume_filename[0]) + ) + assume_filename = assume_filename[0] if os.path.isdir(assume_dir) else path + + # We use --stdin-filepath in order to better determine the path for + # the prettier formatter when it is ran outside of the repo, for example + # by the extension hg-formatsource. + args = [binary, prettier, "--stdin-filepath", assume_filename] + + process = subprocess.Popen(args, stdin=subprocess.PIPE) + with open(path, "rb") as fin: + process.stdin.write(fin.read()) + process.stdin.close() + process.wait() + return process.returncode + + +@Command( + "clang-format", + category="misc", + description="Run clang-format on current changes", +) +@CommandArgument( + "--show", + "-s", + action="store_const", + const="stdout", + dest="output_path", + help="Show diff output on stdout instead of applying changes", +) +@CommandArgument( + "--assume-filename", + "-a", + nargs=1, + default=None, + help="This option is usually used in the context of hg-formatsource." + "When reading from stdin, clang-format assumes this " + "filename to look for a style config file (with " + "-style=file) and to determine the language. When " + "specifying this option only one file should be used " + "as an input and the output will be forwarded to stdin. " + "This option also impairs the download of the clang-tools " + "and assumes the package is already located in it's default " + "location", +) +@CommandArgument( + "--path", "-p", nargs="+", default=None, help="Specify the path(s) to reformat" +) +@CommandArgument( + "--commit", + "-c", + default=None, + help="Specify a commit to reformat from. " + "For git you can also pass a range of commits (foo..bar) " + "to format all of them at the same time.", +) +@CommandArgument( + "--output", + "-o", + default=None, + dest="output_path", + help="Specify a file handle to write clang-format raw output instead of " + "applying changes. This can be stdout or a file path.", +) +@CommandArgument( + "--format", + "-f", + choices=("diff", "json"), + default="diff", + dest="output_format", + help="Specify the output format used: diff is the raw patch provided by " + "clang-format, json is a list of atomic changes to process.", +) +@CommandArgument( + "--outgoing", + default=False, + action="store_true", + help="Run clang-format on outgoing files from mercurial repository.", +) +def clang_format( + command_context, + assume_filename, + path, + commit, + output_path=None, + output_format="diff", + verbose=False, + outgoing=False, +): + # Run clang-format or clang-format-diff on the local changes + # or files/directories + if path is None and outgoing: + repo = get_repository_object(command_context.topsrcdir) + path = repo.get_outgoing_files() + + if path: + # Create the full path list + def path_maker(f_name): + return os.path.join(command_context.topsrcdir, f_name) + + path = map(path_maker, path) + + os.chdir(command_context.topsrcdir) + + # Load output file handle, either stdout or a file handle in write mode + output = None + if output_path is not None: + output = sys.stdout if output_path == "stdout" else open(output_path, "w") + + # With assume_filename we want to have stdout clean since the result of the + # format will be redirected to stdout. Only in case of errror we + # write something to stdout. + # We don't actually want to get the clang-tools here since we want in some + # scenarios to do this in parallel so we relay on the fact that the tools + # have already been downloaded via './mach bootstrap' or directly via + # './mach static-analysis install' + if assume_filename: + rc, clang_paths = _set_clang_tools_paths(command_context) + if rc != 0: + print("clang-format: Unable to set path to clang-format tools.") + return rc + + if not _do_clang_tools_exist(clang_paths): + print("clang-format: Unable to set locate clang-format tools.") + return 1 + + if not _is_version_eligible(command_context, clang_paths): + return 1 + else: + rc, clang_paths = get_clang_tools(command_context, verbose=verbose) + if rc != 0: + return rc + + if path is None: + return _run_clang_format_diff( + command_context, + clang_paths._clang_format_diff, + clang_paths._clang_format_path, + commit, + output, + ) + + if assume_filename: + return _run_clang_format_in_console( + command_context, clang_paths._clang_format_path, path, assume_filename + ) + + return _run_clang_format_path( + command_context, clang_paths._clang_format_path, path, output, output_format + ) + + +def _verify_checker( + command_context, + clang_paths, + compilation_commands_path, + dump_results, + clang_tidy_checks, + item, + checkers_results, +): + check = item["name"] + test_file_path = mozpath.join(clang_paths._clang_tidy_base_path, "test", check) + test_file_path_cpp = test_file_path + ".cpp" + test_file_path_json = test_file_path + ".json" + + command_context.log( + logging.INFO, + "static-analysis", + {}, + "RUNNING: clang-tidy checker {}.".format(check), + ) + + # Structured information in case a checker fails + checker_error = { + "checker-name": check, + "checker-error": "", + "info1": "", + "info2": "", + "info3": "", + } + + # Verify if this checker actually exists + if check not in clang_tidy_checks: + checker_error["checker-error"] = TOOLS_CHECKER_NOT_FOUND + checkers_results.append(checker_error) + return TOOLS_CHECKER_NOT_FOUND + + # Verify if the test file exists for this checker + if not os.path.exists(test_file_path_cpp): + checker_error["checker-error"] = TOOLS_CHECKER_NO_TEST_FILE + checkers_results.append(checker_error) + return TOOLS_CHECKER_NO_TEST_FILE + + issues, clang_output = _run_analysis( + command_context, + clang_paths, + compilation_commands_path, + checks="-*," + check, + header_filter="", + sources=[test_file_path_cpp], + ) + if issues is None: + return TOOLS_CHECKER_FAILED_FILE + + # Verify to see if we got any issues, if not raise exception + if not issues: + checker_error["checker-error"] = TOOLS_CHECKER_RETURNED_NO_ISSUES + checker_error["info1"] = clang_output + checkers_results.append(checker_error) + return TOOLS_CHECKER_RETURNED_NO_ISSUES + + # Also store the 'reliability' index for this checker + issues.append({"reliability": item["reliability"]}) + + if dump_results: + _build_autotest_result(test_file_path_json, json.dumps(issues)) + else: + if not os.path.exists(test_file_path_json): + # Result file for test not found maybe regenerate it? + checker_error["checker-error"] = TOOLS_CHECKER_RESULT_FILE_NOT_FOUND + checkers_results.append(checker_error) + return TOOLS_CHECKER_RESULT_FILE_NOT_FOUND + + # Read the pre-determined issues + baseline_issues = _get_autotest_stored_issues(test_file_path_json) + + # Compare the two lists + if issues != baseline_issues: + checker_error["checker-error"] = TOOLS_CHECKER_DIFF_FAILED + checker_error["info1"] = baseline_issues + checker_error["info2"] = issues + checker_error["info3"] = clang_output + checkers_results.append(checker_error) + return TOOLS_CHECKER_DIFF_FAILED + + return TOOLS_SUCCESS + + +def _build_autotest_result(file, issues): + with open(file, "w") as f: + f.write(issues) + + +def _get_autotest_stored_issues(file): + with open(file) as f: + return json.load(f) + + +def _parse_issues(command_context, clang_output): + """ + Parse clang-tidy output into structured issues + """ + + # Limit clang output parsing to 'Enabled checks:' + end = re.search(r"^Enabled checks:\n", clang_output, re.MULTILINE) + if end is not None: + clang_output = clang_output[: end.start() - 1] + + platform, _ = command_context.platform + re_strip_colors = re.compile(r"\x1b\[[\d;]+m", re.MULTILINE) + filtered = re_strip_colors.sub("", clang_output) + # Starting with clang 8, for the diagnostic messages we have multiple `LF CR` + # in order to be compatiable with msvc compiler format, and for this + # we are not interested to match the end of line. + regex_string = r"(.+):(\d+):(\d+): (warning|error): ([^\[\]\n]+)(?: \[([\.\w-]+)\])" + + # For non 'win' based platforms we also need the 'end of the line' regex + if platform not in ("win64", "win32"): + regex_string += "?$" + + regex_header = re.compile(regex_string, re.MULTILINE) + + # Sort headers by positions + headers = sorted(regex_header.finditer(filtered), key=lambda h: h.start()) + issues = [] + for _, header in enumerate(headers): + header_group = header.groups() + element = [header_group[3], header_group[4], header_group[5]] + issues.append(element) + return issues + + +def _get_config_environment(command_context): + ran_configure = False + config = None + + try: + config = command_context.config_environment + except Exception: + command_context.log( + logging.WARNING, + "static-analysis", + {}, + "Looks like configure has not run yet, running it now...", + ) + + clobber = Clobberer(command_context.topsrcdir, command_context.topobjdir) + + if clobber.clobber_needed(): + choice = prompt_bool( + "Configuration has changed and Clobber is needed. " + "Do you want to proceed?" + ) + if not choice: + command_context.log( + logging.ERROR, + "static-analysis", + {}, + "ERROR: Without Clobber we cannot continue execution!", + ) + return (1, None, None) + os.environ["AUTOCLOBBER"] = "1" + + rc = build_commands.configure(command_context) + if rc != 0: + return (rc, config, ran_configure) + ran_configure = True + try: + config = command_context.config_environment + except Exception: + pass + + return (0, config, ran_configure) + + +def _build_compile_db(command_context, verbose=False): + compilation_commands_path = mozpath.join( + command_context.topobjdir, "static-analysis" + ) + compile_db = mozpath.join(compilation_commands_path, "compile_commands.json") + + if os.path.exists(compile_db): + return 0, compile_db, compilation_commands_path + + rc, config, ran_configure = _get_config_environment(command_context) + if rc != 0: + return rc, compile_db, compilation_commands_path + + if ran_configure: + # Configure may have created the compilation database if the + # mozconfig enables building the CompileDB backend by default, + # So we recurse to see if the file exists once again. + return _build_compile_db(command_context, verbose=verbose) + + if config: + print( + "Looks like a clang compilation database has not been " + "created yet, creating it now..." + ) + rc = build_commands.build_backend( + command_context, ["StaticAnalysis"], verbose=verbose + ) + if rc != 0: + return rc, compile_db, compilation_commands_path + assert os.path.exists(compile_db) + return 0, compile_db, compilation_commands_path + + +def _build_export(command_context, jobs, verbose=False): + def on_line(line): + command_context.log(logging.INFO, "build_output", {"line": line}, "{line}") + + # First install what we can through install manifests. + rc = command_context._run_make( + directory=command_context.topobjdir, + target="pre-export", + line_handler=None, + silent=not verbose, + ) + if rc != 0: + return rc + + # Then build the rest of the build dependencies by running the full + # export target, because we can't do anything better. + for target in ("export", "pre-compile"): + rc = command_context._run_make( + directory=command_context.topobjdir, + target=target, + line_handler=None, + silent=not verbose, + num_jobs=jobs, + ) + if rc != 0: + return rc + + return 0 + + +def _set_clang_tools_paths(command_context): + rc, config, _ = _get_config_environment(command_context) + + clang_paths = SimpleNamespace() + + if rc != 0: + return rc, clang_paths + + clang_paths._clang_tools_path = mozpath.join( + command_context._mach_context.state_dir, "clang-tools" + ) + clang_paths._clang_tidy_path = mozpath.join( + clang_paths._clang_tools_path, + "clang-tidy", + "bin", + "clang-tidy" + config.substs.get("HOST_BIN_SUFFIX", ""), + ) + clang_paths._clang_format_path = mozpath.join( + clang_paths._clang_tools_path, + "clang-tidy", + "bin", + "clang-format" + config.substs.get("HOST_BIN_SUFFIX", ""), + ) + clang_paths._clang_apply_replacements = mozpath.join( + clang_paths._clang_tools_path, + "clang-tidy", + "bin", + "clang-apply-replacements" + config.substs.get("HOST_BIN_SUFFIX", ""), + ) + clang_paths._run_clang_tidy_path = mozpath.join( + clang_paths._clang_tools_path, + "clang-tidy", + "bin", + "run-clang-tidy", + ) + clang_paths._clang_format_diff = mozpath.join( + clang_paths._clang_tools_path, + "clang-tidy", + "share", + "clang", + "clang-format-diff.py", + ) + return 0, clang_paths + + +def _do_clang_tools_exist(clang_paths): + return ( + os.path.exists(clang_paths._clang_tidy_path) + and os.path.exists(clang_paths._clang_format_path) + and os.path.exists(clang_paths._clang_apply_replacements) + and os.path.exists(clang_paths._run_clang_tidy_path) + ) + + +def get_clang_tools( + command_context, + force=False, + skip_cache=False, + source=None, + download_if_needed=True, + verbose=False, +): + + rc, clang_paths = _set_clang_tools_paths(command_context) + + if rc != 0: + return rc, clang_paths + + if ( + _do_clang_tools_exist(clang_paths) + and _is_version_eligible(command_context, clang_paths, log_error=False) + and not force + ): + return 0, clang_paths + + if os.path.isdir(clang_paths._clang_tools_path) and download_if_needed: + # The directory exists, perhaps it's corrupted? Delete it + # and start from scratch. + shutil.rmtree(clang_paths._clang_tools_path) + return get_clang_tools( + command_context, + force=force, + skip_cache=skip_cache, + source=source, + verbose=verbose, + download_if_needed=download_if_needed, + ) + + # Create base directory where we store clang binary + os.mkdir(clang_paths._clang_tools_path) + + if source: + return _get_clang_tools_from_source(command_context, clang_paths, source) + + if not download_if_needed: + return 0, clang_paths + + from mozbuild.bootstrap import bootstrap_toolchain + + bootstrap_toolchain("clang-tools/clang-tidy") + + return 0 if _is_version_eligible(command_context, clang_paths) else 1, clang_paths + + +def _get_clang_tools_from_source(command_context, clang_paths, filename): + from mozbuild.action.tooltool import unpack_file + + clang_tidy_path = mozpath.join( + command_context._mach_context.state_dir, "clang-tools" + ) + + currentWorkingDir = os.getcwd() + os.chdir(clang_tidy_path) + + unpack_file(filename) + + # Change back the cwd + os.chdir(currentWorkingDir) + + clang_path = mozpath.join(clang_tidy_path, "clang") + + if not os.path.isdir(clang_path): + raise Exception("Extracted the archive but didn't find the expected output") + + assert os.path.exists(clang_paths._clang_tidy_path) + assert os.path.exists(clang_paths._clang_format_path) + assert os.path.exists(clang_paths._clang_apply_replacements) + assert os.path.exists(clang_paths._run_clang_tidy_path) + return 0, clang_paths + + +def _get_clang_format_diff_command(command_context, commit): + if command_context.repository.name == "hg": + args = ["hg", "diff", "-U0"] + if commit: + args += ["-c", commit] + else: + args += ["-r", ".^"] + for dot_extension in _format_include_extensions: + args += ["--include", "glob:**{0}".format(dot_extension)] + args += ["--exclude", "listfile:{0}".format(_format_ignore_file)] + else: + commit_range = "HEAD" # All uncommitted changes. + if commit: + commit_range = ( + commit if ".." in commit else "{}~..{}".format(commit, commit) + ) + args = ["git", "diff", "--no-color", "-U0", commit_range, "--"] + for dot_extension in _format_include_extensions: + args += ["*{0}".format(dot_extension)] + # git-diff doesn't support an 'exclude-from-files' param, but + # allow to add individual exclude pattern since v1.9, see + # https://git-scm.com/docs/gitglossary#gitglossary-aiddefpathspecapathspec + with open(_format_ignore_file, "rb") as exclude_pattern_file: + for pattern in exclude_pattern_file.readlines(): + pattern = six.ensure_str(pattern.rstrip()) + pattern = pattern.replace(".*", "**") + if not pattern or pattern.startswith("#"): + continue # empty or comment + magics = ["exclude"] + if pattern.startswith("^"): + magics += ["top"] + pattern = pattern[1:] + args += [":({0}){1}".format(",".join(magics), pattern)] + return args + + +def _run_clang_format_diff( + command_context, clang_format_diff, clang_format, commit, output_file +): + # Run clang-format on the diff + # Note that this will potentially miss a lot things + from subprocess import PIPE, CalledProcessError, Popen, check_output + + diff_process = Popen( + _get_clang_format_diff_command(command_context, commit), stdout=PIPE + ) + args = [sys.executable, clang_format_diff, "-p1", "-binary=%s" % clang_format] + + if not output_file: + args.append("-i") + try: + output = check_output(args, stdin=diff_process.stdout) + if output_file: + # We want to print the diffs + print(output, file=output_file) + + return 0 + except CalledProcessError as e: + # Something wrong happend + print("clang-format: An error occured while running clang-format-diff.") + return e.returncode + + +def _is_ignored_path(command_context, ignored_dir_re, f): + # path needs to be relative to the src root + root_dir = command_context.topsrcdir + os.sep + if f.startswith(root_dir): + f = f[len(root_dir) :] + # the ignored_dir_re regex uses / on all platforms + return re.match(ignored_dir_re, f.replace(os.sep, "/")) + + +def _generate_path_list(command_context, paths, verbose=True): + path_to_third_party = os.path.join(command_context.topsrcdir, _format_ignore_file) + ignored_dir = [] + with open(path_to_third_party, "r") as fh: + for line in fh: + # Remove comments and empty lines + if line.startswith("#") or len(line.strip()) == 0: + continue + # The regexp is to make sure we are managing relative paths + ignored_dir.append(r"^[\./]*" + line.rstrip()) + + # Generates the list of regexp + ignored_dir_re = "(%s)" % "|".join(ignored_dir) + extensions = _format_include_extensions + + path_list = [] + for f in paths: + if _is_ignored_path(command_context, ignored_dir_re, f): + # Early exit if we have provided an ignored directory + if verbose: + print("static-analysis: Ignored third party code '{0}'".format(f)) + continue + + if os.path.isdir(f): + # Processing a directory, generate the file list + for folder, subs, files in os.walk(f): + subs.sort() + for filename in sorted(files): + f_in_dir = posixpath.join(pathlib.Path(folder).as_posix(), filename) + if f_in_dir.endswith(extensions) and not _is_ignored_path( + command_context, ignored_dir_re, f_in_dir + ): + # Supported extension and accepted path + path_list.append(f_in_dir) + else: + # Make sure that the file exists and it has a supported extension + if os.path.isfile(f) and f.endswith(extensions): + path_list.append(f) + + return path_list + + +def _run_clang_format_in_console(command_context, clang_format, paths, assume_filename): + path_list = _generate_path_list(command_context, assume_filename, False) + + if path_list == []: + return 0 + + # We use -assume-filename in order to better determine the path for + # the .clang-format when it is ran outside of the repo, for example + # by the extension hg-formatsource + args = [clang_format, "-assume-filename={}".format(assume_filename[0])] + + process = subprocess.Popen(args, stdin=subprocess.PIPE) + with open(paths[0], "r") as fin: + process.stdin.write(fin.read()) + process.stdin.close() + process.wait() + return process.returncode + + +def _get_clang_format_cfg(command_context, current_dir): + clang_format_cfg_path = mozpath.join(current_dir, ".clang-format") + + if os.path.exists(clang_format_cfg_path): + # Return found path for .clang-format + return clang_format_cfg_path + + if current_dir != command_context.topsrcdir: + # Go to parent directory + return _get_clang_format_cfg(command_context, os.path.split(current_dir)[0]) + # We have reached command_context.topsrcdir so return None + return None + + +def _copy_clang_format_for_show_diff( + command_context, current_dir, cached_clang_format_cfg, tmpdir +): + # Lookup for .clang-format first in cache + clang_format_cfg = cached_clang_format_cfg.get(current_dir, None) + + if clang_format_cfg is None: + # Go through top directories + clang_format_cfg = _get_clang_format_cfg(command_context, current_dir) + + # This is unlikely to happen since we must have .clang-format from + # command_context.topsrcdir but in any case we should handle a potential error + if clang_format_cfg is None: + print("Cannot find corresponding .clang-format.") + return 1 + + # Cache clang_format_cfg for potential later usage + cached_clang_format_cfg[current_dir] = clang_format_cfg + + # Copy .clang-format to the tmp dir where the formatted file is copied + shutil.copy(clang_format_cfg, tmpdir) + return 0 + + +def _run_clang_format_path( + command_context, clang_format, paths, output_file, output_format +): + + # Run clang-format on files or directories directly + from subprocess import CalledProcessError, check_output + + if output_format == "json": + # Get replacements in xml, then process to json + args = [clang_format, "-output-replacements-xml"] + else: + args = [clang_format, "-i"] + + if output_file: + # We just want to show the diff, we create the directory to copy it + tmpdir = os.path.join(command_context.topobjdir, "tmp") + if not os.path.exists(tmpdir): + os.makedirs(tmpdir) + + path_list = _generate_path_list(command_context, paths) + + if path_list == []: + return + + print("Processing %d file(s)..." % len(path_list)) + + if output_file: + patches = {} + cached_clang_format_cfg = {} + for i in range(0, len(path_list)): + l = path_list[i : (i + 1)] + + # Copy the files into a temp directory + # and run clang-format on the temp directory + # and show the diff + original_path = l[0] + local_path = ntpath.basename(original_path) + current_dir = ntpath.dirname(original_path) + target_file = os.path.join(tmpdir, local_path) + faketmpdir = os.path.dirname(target_file) + if not os.path.isdir(faketmpdir): + os.makedirs(faketmpdir) + shutil.copy(l[0], faketmpdir) + l[0] = target_file + + ret = _copy_clang_format_for_show_diff( + command_context, current_dir, cached_clang_format_cfg, faketmpdir + ) + if ret != 0: + return ret + + # Run clang-format on the list + try: + output = check_output(args + l) + if output and output_format == "json": + # Output a relative path in json patch list + relative_path = os.path.relpath( + original_path, command_context.topsrcdir + ) + patches[relative_path] = _parse_xml_output(original_path, output) + except CalledProcessError as e: + # Something wrong happend + print("clang-format: An error occured while running clang-format.") + return e.returncode + + # show the diff + if output_format == "diff": + diff_command = ["diff", "-u", original_path, target_file] + try: + output = check_output(diff_command) + except CalledProcessError as e: + # diff -u returns 0 when no change + # here, we expect changes. if we are here, this means that + # there is a diff to show + if e.output: + # Replace the temp path by the path relative to the repository to + # display a valid patch + relative_path = os.path.relpath( + original_path, command_context.topsrcdir + ) + # We must modify the paths in order to be compatible with the + # `diff` format. + original_path_diff = os.path.join("a", relative_path) + target_path_diff = os.path.join("b", relative_path) + e.output = e.output.decode("utf-8") + patch = e.output.replace( + "+++ {}".format(target_file), + "+++ {}".format(target_path_diff), + ).replace( + "-- {}".format(original_path), + "-- {}".format(original_path_diff), + ) + patches[original_path] = patch + + if output_format == "json": + output = json.dumps(patches, indent=4) + else: + # Display all the patches at once + output = "\n".join(patches.values()) + + # Output to specified file or stdout + print(output, file=output_file) + + shutil.rmtree(tmpdir) + return 0 + + # Run clang-format in parallel trying to saturate all of the available cores. + import math + + max_workers = multiprocessing.cpu_count() + + # To maximize CPU usage when there are few items to handle, + # underestimate the number of items per batch, then dispatch + # outstanding items across workers. Per definition, each worker will + # handle at most one outstanding item. + batch_size = int(math.floor(float(len(path_list)) / max_workers)) + outstanding_items = len(path_list) - batch_size * max_workers + + batches = [] + + i = 0 + while i < len(path_list): + num_items = batch_size + (1 if outstanding_items > 0 else 0) + batches.append(args + path_list[i : (i + num_items)]) + + outstanding_items -= 1 + i += num_items + + error_code = None + + with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor: + futures = [] + for batch in batches: + futures.append(executor.submit(run_one_clang_format_batch, batch)) + + for future in concurrent.futures.as_completed(futures): + # Wait for every task to finish + ret_val = future.result() + if ret_val is not None: + error_code = ret_val + + if error_code is not None: + return error_code + return 0 + + +def _parse_xml_output(path, clang_output): + """ + Parse the clang-format XML output to convert it in a JSON compatible + list of patches, and calculates line level informations from the + character level provided changes. + """ + content = six.ensure_str(open(path, "r").read()) + + def _nb_of_lines(start, end): + return len(content[start:end].splitlines()) + + def _build(replacement): + offset = int(replacement.attrib["offset"]) + length = int(replacement.attrib["length"]) + last_line = content.rfind("\n", 0, offset) + return { + "replacement": replacement.text, + "char_offset": offset, + "char_length": length, + "line": _nb_of_lines(0, offset), + "line_offset": last_line != -1 and (offset - last_line) or 0, + "lines_modified": _nb_of_lines(offset, offset + length), + } + + return [ + _build(replacement) + for replacement in ET.fromstring(clang_output).findall("replacement") + ] diff --git a/python/mozbuild/mozbuild/code_analysis/moz.build b/python/mozbuild/mozbuild/code_analysis/moz.build new file mode 100644 index 0000000000..bb49fbcd2f --- /dev/null +++ b/python/mozbuild/mozbuild/code_analysis/moz.build @@ -0,0 +1,8 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +with Files("**"): + BUG_COMPONENT = ("Firefox Build System", "Source Code Analysis") diff --git a/python/mozbuild/mozbuild/code_analysis/utils.py b/python/mozbuild/mozbuild/code_analysis/utils.py new file mode 100644 index 0000000000..e3931aa7e4 --- /dev/null +++ b/python/mozbuild/mozbuild/code_analysis/utils.py @@ -0,0 +1,138 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import logging + +import mozpack.path as mozpath +import yaml + +from mozbuild.util import memoized_property + + +class ClangTidyConfig(object): + def __init__(self, mozilla_src): + self._clang_tidy_config = self._get_clang_tidy_config(mozilla_src) + + def _get_clang_tidy_config(self, mozilla_src): + try: + file_handler = open( + mozpath.join(mozilla_src, "tools", "clang-tidy", "config.yaml") + ) + config = yaml.safe_load(file_handler) + except Exception: + self.log( + logging.ERROR, + "clang-tidy-config", + {}, + "Looks like config.yaml is not valid, we are going to use default" + " values for the rest of the analysis for clang-tidy.", + ) + return None + return config + + @memoized_property + def checks(self): + """ + Returns a list with all activated checks + """ + + checks = ["-*"] + try: + config = self._clang_tidy_config + for item in config["clang_checkers"]: + if item.get("publish", True): + checks.append(item["name"]) + except Exception: + self.log( + logging.ERROR, + "clang-tidy-config", + {}, + "Looks like config.yaml is not valid, so we are unable to " + "determine default checkers, using '-checks=-*,mozilla-*'", + ) + checks.append("mozilla-*") + finally: + return checks + + @memoized_property + def checks_with_data(self): + """ + Returns a list with all activated checks plus metadata for each check + """ + + checks_with_data = [{"name": "-*"}] + try: + config = self._clang_tidy_config + for item in config["clang_checkers"]: + if item.get("publish", True): + checks_with_data.append(item) + except Exception: + self.log( + logging.ERROR, + "clang-tidy-config", + {}, + "Looks like config.yaml is not valid, so we are unable to " + "determine default checkers, using '-checks=-*,mozilla-*'", + ) + checks_with_data.append({"name": "mozilla-*", "reliability": "high"}) + finally: + return checks_with_data + + @memoized_property + def checks_config(self): + """ + Returns the configuation for all checks + """ + + config_list = [] + checks_config = {} + try: + config = self._clang_tidy_config + for checker in config["clang_checkers"]: + if checker.get("publish", True) and "config" in checker: + for checker_option in checker["config"]: + # Verify if the format of the Option is correct, + # possibilities are: + # 1. CheckerName.Option + # 2. Option -> that will become CheckerName.Option + if not checker_option["key"].startswith(checker["name"]): + checker_option["key"] = "{}.{}".format( + checker["name"], checker_option["key"] + ) + config_list += checker["config"] + checks_config["CheckOptions"] = config_list + except Exception: + self.log( + logging.ERROR, + "clang-tidy-config", + {}, + "Looks like config.yaml is not valid, so we are unable to " + "determine configuration for checkers, so using default", + ) + checks_config = None + finally: + return checks_config + + @memoized_property + def version(self): + """ + Returns version of clang-tidy suitable for this configuration file + """ + + if "package_version" in self._clang_tidy_config: + return self._clang_tidy_config["package_version"] + self.log( + logging.ERROR, + "clang-tidy-confis", + {}, + "Unable to find 'package_version' in the config.yml", + ) + return None + + @memoized_property + def platforms(self): + """ + Returns a list of platforms suitable to work with `clang-tidy` + """ + return self._clang_tidy_config.get("platforms", []) diff --git a/python/mozbuild/mozbuild/codecoverage/__init__.py b/python/mozbuild/mozbuild/codecoverage/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/codecoverage/chrome_map.py b/python/mozbuild/mozbuild/codecoverage/chrome_map.py new file mode 100644 index 0000000000..79cedd2faf --- /dev/null +++ b/python/mozbuild/mozbuild/codecoverage/chrome_map.py @@ -0,0 +1,175 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import json +import os +import re + +import mozpack.path as mozpath +import six +from mach.config import ConfigSettings +from mach.logging import LoggingManager +from mozpack.copier import FileRegistry +from mozpack.files import PreprocessedFile +from mozpack.manifests import InstallManifest + +from mozbuild.backend.common import CommonBackend +from mozbuild.base import MozbuildObject +from mozbuild.frontend.data import ( + ChromeManifestEntry, + FinalTargetFiles, + FinalTargetPreprocessedFiles, + JARManifest, +) + +from .manifest_handler import ChromeManifestHandler + +_line_comment_re = re.compile('^//@line (\d+) "(.+)"$') + + +def generate_pp_info(path, topsrcdir): + with open(path, encoding="utf-8") as fh: + # (start, end) -> (included_source, start) + section_info = dict() + + this_section = None + + def finish_section(pp_end): + pp_start, inc_source, inc_start = this_section + section_info[str(pp_start) + "," + str(pp_end)] = inc_source, inc_start + + for count, line in enumerate(fh): + # Regex are quite slow, so bail out early. + if not line.startswith("//@line"): + continue + m = re.match(_line_comment_re, line) + if m: + if this_section: + finish_section(count + 1) + inc_start, inc_source = m.groups() + + # Special case to handle $SRCDIR prefixes + src_dir_prefix = "$SRCDIR" + parts = mozpath.split(inc_source) + if parts[0] == src_dir_prefix: + inc_source = mozpath.join(*parts[1:]) + else: + inc_source = mozpath.relpath(inc_source, topsrcdir) + + pp_start = count + 2 + this_section = pp_start, inc_source, int(inc_start) + + if this_section: + finish_section(count + 2) + + return section_info + + +# This build backend is assuming the build to have happened already, as it is parsing +# built preprocessed files to generate data to map them to the original sources. + + +class ChromeMapBackend(CommonBackend): + def _init(self): + CommonBackend._init(self) + + log_manager = LoggingManager() + self._cmd = MozbuildObject( + self.environment.topsrcdir, + ConfigSettings(), + log_manager, + self.environment.topobjdir, + ) + self._install_mapping = {} + self.manifest_handler = ChromeManifestHandler() + + def consume_object(self, obj): + if isinstance(obj, JARManifest): + self._consume_jar_manifest(obj) + if isinstance(obj, ChromeManifestEntry): + self.manifest_handler.handle_manifest_entry(obj.entry) + if isinstance(obj, (FinalTargetFiles, FinalTargetPreprocessedFiles)): + self._handle_final_target_files(obj) + return True + + def _handle_final_target_files(self, obj): + for path, files in obj.files.walk(): + for f in files: + dest = mozpath.join(obj.install_target, path, f.target_basename) + obj_path = mozpath.join(self.environment.topobjdir, dest) + if obj_path.endswith(".in"): + obj_path = obj_path[:-3] + if isinstance(obj, FinalTargetPreprocessedFiles): + assert os.path.exists(obj_path), "%s should exist" % obj_path + pp_info = generate_pp_info(obj_path, obj.topsrcdir) + else: + pp_info = None + + base = ( + obj.topobjdir + if f.full_path.startswith(obj.topobjdir) + else obj.topsrcdir + ) + self._install_mapping[dest] = ( + mozpath.relpath(f.full_path, base), + pp_info, + ) + + def consume_finished(self): + mp = os.path.join( + self.environment.topobjdir, "_build_manifests", "install", "_tests" + ) + install_manifest = InstallManifest(mp) + reg = FileRegistry() + install_manifest.populate_registry(reg) + + for dest, src in reg: + if not hasattr(src, "path"): + continue + + if not os.path.isabs(dest): + dest = "_tests/" + dest + + obj_path = mozpath.join(self.environment.topobjdir, dest) + if isinstance(src, PreprocessedFile): + assert os.path.exists(obj_path), "%s should exist" % obj_path + pp_info = generate_pp_info(obj_path, self.environment.topsrcdir) + else: + pp_info = None + + rel_src = mozpath.relpath(src.path, self.environment.topsrcdir) + self._install_mapping[dest] = rel_src, pp_info + + # Our result has four parts: + # A map from url prefixes to objdir directories: + # { "chrome://mozapps/content/": [ "dist/bin/chrome/toolkit/content/mozapps" ], ... } + # A map of overrides. + # A map from objdir paths to sourcedir paths, and an object storing mapping + # information for preprocessed files: + # { "dist/bin/browser/chrome/browser/content/browser/aboutSessionRestore.js": + # [ "$topsrcdir/browser/components/sessionstore/content/aboutSessionRestore.js", {} ], + # ... } + # An object containing build configuration information. + outputfile = os.path.join(self.environment.topobjdir, "chrome-map.json") + with self._write_file(outputfile) as fh: + chrome_mapping = self.manifest_handler.chrome_mapping + overrides = self.manifest_handler.overrides + json.dump( + [ + {k: list(v) for k, v in six.iteritems(chrome_mapping)}, + overrides, + self._install_mapping, + { + "topobjdir": mozpath.normpath(self.environment.topobjdir), + "MOZ_APP_NAME": self.environment.substs.get("MOZ_APP_NAME"), + "OMNIJAR_NAME": self.environment.substs.get("OMNIJAR_NAME"), + "MOZ_MACBUNDLE_NAME": self.environment.substs.get( + "MOZ_MACBUNDLE_NAME" + ), + }, + ], + fh, + sort_keys=True, + indent=2, + ) diff --git a/python/mozbuild/mozbuild/codecoverage/lcov_rewriter.py b/python/mozbuild/mozbuild/codecoverage/lcov_rewriter.py new file mode 100644 index 0000000000..fd83efdb3e --- /dev/null +++ b/python/mozbuild/mozbuild/codecoverage/lcov_rewriter.py @@ -0,0 +1,777 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import json +import os +import sys +from argparse import ArgumentParser + +try: + import urlparse +except ImportError: + import urllib.parse as urlparse + +import mozpack.path as mozpath +from mozpack.chrome.manifest import parse_manifest +from six import viewitems + +from .manifest_handler import ChromeManifestHandler + + +class LcovRecord(object): + __slots__ = ( + "test_name", + "source_file", + "functions", + "function_exec_counts", + "function_count", + "covered_function_count", + "branches", + "branch_count", + "covered_branch_count", + "lines", + "line_count", + "covered_line_count", + ) + + def __init__(self): + self.functions = {} + self.function_exec_counts = {} + self.branches = {} + self.lines = {} + + def __iadd__(self, other): + + # These shouldn't differ. + self.source_file = other.source_file + if hasattr(other, "test_name"): + self.test_name = other.test_name + self.functions.update(other.functions) + + for name, count in viewitems(other.function_exec_counts): + self.function_exec_counts[name] = count + self.function_exec_counts.get( + name, 0 + ) + + for key, taken in viewitems(other.branches): + self.branches[key] = taken + self.branches.get(key, 0) + + for line, (exec_count, checksum) in viewitems(other.lines): + new_exec_count = exec_count + if line in self.lines: + old_exec_count, _ = self.lines[line] + new_exec_count += old_exec_count + self.lines[line] = new_exec_count, checksum + + self.resummarize() + return self + + def resummarize(self): + # Re-calculate summaries after generating or splitting a record. + self.function_count = len(self.functions.keys()) + # Function records may have moved between files, so filter here. + self.function_exec_counts = { + fn_name: count + for fn_name, count in viewitems(self.function_exec_counts) + if fn_name in self.functions.values() + } + self.covered_function_count = len( + [c for c in self.function_exec_counts.values() if c] + ) + self.line_count = len(self.lines) + self.covered_line_count = len([c for c, _ in self.lines.values() if c]) + self.branch_count = len(self.branches) + self.covered_branch_count = len([c for c in self.branches.values() if c]) + + +class RecordRewriter(object): + # Helper class for rewriting/spliting individual lcov records according + # to what the preprocessor did. + def __init__(self): + self._ranges = None + + def _get_range(self, line): + for start, end in self._ranges: + if line < start: + return None + if line < end: + return start, end + return None + + def _get_mapped_line(self, line, r): + inc_source, inc_start = self._current_pp_info[r] + start, end = r + offs = line - start + return inc_start + offs + + def _get_record(self, inc_source): + if inc_source in self._additions: + gen_rec = self._additions[inc_source] + else: + gen_rec = LcovRecord() + gen_rec.source_file = inc_source + self._additions[inc_source] = gen_rec + return gen_rec + + def _rewrite_lines(self, record): + rewritten_lines = {} + for ln, line_info in viewitems(record.lines): + r = self._get_range(ln) + if r is None: + rewritten_lines[ln] = line_info + continue + new_ln = self._get_mapped_line(ln, r) + inc_source, _ = self._current_pp_info[r] + + if inc_source != record.source_file: + gen_rec = self._get_record(inc_source) + gen_rec.lines[new_ln] = line_info + continue + + # Move exec_count to the new lineno. + rewritten_lines[new_ln] = line_info + + record.lines = rewritten_lines + + def _rewrite_functions(self, record): + rewritten_fns = {} + + # Sometimes we get multiple entries for a named function ("top-level", for + # instance). It's not clear the records that result are well-formed, but + # we act as though if a function has multiple FN's, the corresponding + # FNDA's are all the same. + for ln, fn_name in viewitems(record.functions): + r = self._get_range(ln) + if r is None: + rewritten_fns[ln] = fn_name + continue + new_ln = self._get_mapped_line(ln, r) + inc_source, _ = self._current_pp_info[r] + if inc_source != record.source_file: + gen_rec = self._get_record(inc_source) + gen_rec.functions[new_ln] = fn_name + if fn_name in record.function_exec_counts: + gen_rec.function_exec_counts[fn_name] = record.function_exec_counts[ + fn_name + ] + continue + rewritten_fns[new_ln] = fn_name + record.functions = rewritten_fns + + def _rewrite_branches(self, record): + rewritten_branches = {} + for (ln, block_number, branch_number), taken in viewitems(record.branches): + r = self._get_range(ln) + if r is None: + rewritten_branches[ln, block_number, branch_number] = taken + continue + new_ln = self._get_mapped_line(ln, r) + inc_source, _ = self._current_pp_info[r] + if inc_source != record.source_file: + gen_rec = self._get_record(inc_source) + gen_rec.branches[(new_ln, block_number, branch_number)] = taken + continue + rewritten_branches[(new_ln, block_number, branch_number)] = taken + + record.branches = rewritten_branches + + def rewrite_record(self, record, pp_info): + # Rewrite the lines in the given record according to preprocessor info + # and split to additional records when pp_info has included file info. + self._current_pp_info = dict( + [(tuple([int(l) for l in k.split(",")]), v) for k, v in pp_info.items()] + ) + self._ranges = sorted(self._current_pp_info.keys()) + self._additions = {} + self._rewrite_lines(record) + self._rewrite_functions(record) + self._rewrite_branches(record) + + record.resummarize() + + generated_records = self._additions.values() + for r in generated_records: + r.resummarize() + return generated_records + + +class LcovFile(object): + # Simple parser/pretty-printer for lcov format. + # lcov parsing based on http://ltp.sourceforge.net/coverage/lcov/geninfo.1.php + + # TN: + # SF: + # FN:, + # FNDA:, + # FNF: + # FNH: + # BRDA:,,, + # BRF: + # BRH: + # DA:,[,] + # LF: + # LH: + # end_of_record + PREFIX_TYPES = { + "TN": 0, + "SF": 0, + "FN": 1, + "FNDA": 1, + "FNF": 0, + "FNH": 0, + "BRDA": 3, + "BRF": 0, + "BRH": 0, + "DA": 2, + "LH": 0, + "LF": 0, + } + + def __init__(self, lcov_paths): + self.lcov_paths = lcov_paths + + def iterate_records(self, rewrite_source=None): + current_source_file = None + current_pp_info = None + current_lines = [] + for lcov_path in self.lcov_paths: + with open(lcov_path, "r", encoding="utf-8") as lcov_fh: + for line in lcov_fh: + line = line.rstrip() + if not line: + continue + + if line == "end_of_record": + # We skip records that we couldn't rewrite, that is records for which + # rewrite_url returns None. + if current_source_file is not None: + yield (current_source_file, current_pp_info, current_lines) + current_source_file = None + current_pp_info = None + current_lines = [] + continue + + colon = line.find(":") + prefix = line[:colon] + + if prefix == "SF": + sf = line[(colon + 1) :] + res = ( + rewrite_source(sf) + if rewrite_source is not None + else (sf, None) + ) + if res is None: + current_lines.append(line) + else: + current_source_file, current_pp_info = res + current_lines.append("SF:" + current_source_file) + else: + current_lines.append(line) + + def parse_record(self, record_content): + self.current_record = LcovRecord() + + for line in record_content: + colon = line.find(":") + + prefix = line[:colon] + + # We occasionally end up with multi-line scripts in data: + # uris that will trip up the parser, just skip them for now. + if colon < 0 or prefix not in self.PREFIX_TYPES: + continue + + args = line[(colon + 1) :].split(",", self.PREFIX_TYPES[prefix]) + + def try_convert(a): + try: + return int(a) + except ValueError: + return a + + args = [try_convert(a) for a in args] + + try: + LcovFile.__dict__["parse_" + prefix](self, *args) + except ValueError: + print("Encountered an error in %s:\n%s" % (self.lcov_fh.name, line)) + raise + except KeyError: + print("Invalid lcov line start in %s:\n%s" % (self.lcov_fh.name, line)) + raise + except TypeError: + print("Invalid lcov line start in %s:\n%s" % (self.lcov_fh.name, line)) + raise + + ret = self.current_record + self.current_record = LcovRecord() + return ret + + def print_file(self, fh, rewrite_source, rewrite_record): + for source_file, pp_info, record_content in self.iterate_records( + rewrite_source + ): + if pp_info is not None: + record = self.parse_record(record_content) + for r in rewrite_record(record, pp_info): + fh.write(self.format_record(r)) + fh.write(self.format_record(record)) + else: + fh.write("\n".join(record_content) + "\nend_of_record\n") + + def format_record(self, record): + out_lines = [] + for name in LcovRecord.__slots__: + if hasattr(record, name): + out_lines.append(LcovFile.__dict__["format_" + name](self, record)) + return "\n".join(out_lines) + "\nend_of_record\n" + + def format_test_name(self, record): + return "TN:%s" % record.test_name + + def format_source_file(self, record): + return "SF:%s" % record.source_file + + def format_functions(self, record): + # Sorting results gives deterministic output (and is a lot faster than + # using OrderedDict). + fns = [] + for start_lineno, fn_name in sorted(viewitems(record.functions)): + fns.append("FN:%s,%s" % (start_lineno, fn_name)) + return "\n".join(fns) + + def format_function_exec_counts(self, record): + fndas = [] + for name, exec_count in sorted(viewitems(record.function_exec_counts)): + fndas.append("FNDA:%s,%s" % (exec_count, name)) + return "\n".join(fndas) + + def format_function_count(self, record): + return "FNF:%s" % record.function_count + + def format_covered_function_count(self, record): + return "FNH:%s" % record.covered_function_count + + def format_branches(self, record): + brdas = [] + for key in sorted(record.branches): + taken = record.branches[key] + taken = "-" if taken == 0 else taken + brdas.append("BRDA:%s" % ",".join(map(str, list(key) + [taken]))) + return "\n".join(brdas) + + def format_branch_count(self, record): + return "BRF:%s" % record.branch_count + + def format_covered_branch_count(self, record): + return "BRH:%s" % record.covered_branch_count + + def format_lines(self, record): + das = [] + for line_no, (exec_count, checksum) in sorted(viewitems(record.lines)): + s = "DA:%s,%s" % (line_no, exec_count) + if checksum: + s += ",%s" % checksum + das.append(s) + return "\n".join(das) + + def format_line_count(self, record): + return "LF:%s" % record.line_count + + def format_covered_line_count(self, record): + return "LH:%s" % record.covered_line_count + + def parse_TN(self, test_name): + self.current_record.test_name = test_name + + def parse_SF(self, source_file): + self.current_record.source_file = source_file + + def parse_FN(self, start_lineno, fn_name): + self.current_record.functions[start_lineno] = fn_name + + def parse_FNDA(self, exec_count, fn_name): + self.current_record.function_exec_counts[fn_name] = exec_count + + def parse_FNF(self, function_count): + self.current_record.function_count = function_count + + def parse_FNH(self, covered_function_count): + self.current_record.covered_function_count = covered_function_count + + def parse_BRDA(self, line_number, block_number, branch_number, taken): + taken = 0 if taken == "-" else taken + self.current_record.branches[(line_number, block_number, branch_number)] = taken + + def parse_BRF(self, branch_count): + self.current_record.branch_count = branch_count + + def parse_BRH(self, covered_branch_count): + self.current_record.covered_branch_count = covered_branch_count + + def parse_DA(self, line_number, execution_count, checksum=None): + self.current_record.lines[line_number] = (execution_count, checksum) + + def parse_LH(self, covered_line_count): + self.current_record.covered_line_count = covered_line_count + + def parse_LF(self, line_count): + self.current_record.line_count = line_count + + +class UrlFinderError(Exception): + pass + + +class UrlFinder(object): + # Given a "chrome://" or "resource://" url, uses data from the UrlMapBackend + # and install manifests to find a path to the source file and the corresponding + # (potentially pre-processed) file in the objdir. + def __init__(self, chrome_map_path, appdir, gredir, extra_chrome_manifests): + # Cached entries + self._final_mapping = {} + + try: + with open(chrome_map_path, "r", encoding="utf-8") as fh: + url_prefixes, overrides, install_info, buildconfig = json.load(fh) + except IOError: + print( + "Error reading %s. Run |./mach build-backend -b ChromeMap| to " + "populate the ChromeMap backend." % chrome_map_path + ) + raise + + self.topobjdir = buildconfig["topobjdir"] + self.MOZ_APP_NAME = buildconfig["MOZ_APP_NAME"] + self.OMNIJAR_NAME = buildconfig["OMNIJAR_NAME"] + + # These are added dynamically in nsIResProtocolHandler, we might + # need to get them at run time. + if "resource:///" not in url_prefixes: + url_prefixes["resource:///"] = [appdir] + if "resource://gre/" not in url_prefixes: + url_prefixes["resource://gre/"] = [gredir] + + self._url_prefixes = url_prefixes + self._url_overrides = overrides + + self._respath = None + + mac_bundle_name = buildconfig["MOZ_MACBUNDLE_NAME"] + if mac_bundle_name: + self._respath = mozpath.join( + "dist", mac_bundle_name, "Contents", "Resources" + ) + + if not extra_chrome_manifests: + extra_path = os.path.join(self.topobjdir, "_tests", "extra.manifest") + if os.path.isfile(extra_path): + extra_chrome_manifests = [extra_path] + + if extra_chrome_manifests: + self._populate_chrome(extra_chrome_manifests) + + self._install_mapping = install_info + + def _populate_chrome(self, manifests): + handler = ChromeManifestHandler() + for m in manifests: + path = os.path.abspath(m) + for e in parse_manifest(None, path): + handler.handle_manifest_entry(e) + self._url_overrides.update(handler.overrides) + self._url_prefixes.update(handler.chrome_mapping) + + def _find_install_prefix(self, objdir_path): + def _prefix(s): + for p in mozpath.split(s): + if "*" not in p: + yield p + "/" + + offset = 0 + for leaf in reversed(mozpath.split(objdir_path)): + offset += len(leaf) + if objdir_path[:-offset] in self._install_mapping: + pattern_prefix, is_pp = self._install_mapping[objdir_path[:-offset]] + full_leaf = objdir_path[len(objdir_path) - offset :] + src_prefix = "".join(_prefix(pattern_prefix)) + self._install_mapping[objdir_path] = ( + mozpath.join(src_prefix, full_leaf), + is_pp, + ) + break + offset += 1 + + def _install_info(self, objdir_path): + if objdir_path not in self._install_mapping: + # If our path is missing, some prefix of it may be in the install + # mapping mapped to a wildcard. + self._find_install_prefix(objdir_path) + if objdir_path not in self._install_mapping: + raise UrlFinderError("Couldn't find entry in manifest for %s" % objdir_path) + return self._install_mapping[objdir_path] + + def _abs_objdir_install_info(self, term): + obj_relpath = term[len(self.topobjdir) + 1 :] + res = self._install_info(obj_relpath) + + # Some urls on osx will refer to paths in the mac bundle, so we + # re-interpret them as being their original location in dist/bin. + if not res and self._respath and obj_relpath.startswith(self._respath): + obj_relpath = obj_relpath.replace(self._respath, "dist/bin") + res = self._install_info(obj_relpath) + + if not res: + raise UrlFinderError("Couldn't find entry in manifest for %s" % obj_relpath) + return res + + def find_files(self, url): + # Returns a tuple of (source file, pp_info) + # for the given "resource:", "chrome:", or "file:" uri. + term = url + if term in self._url_overrides: + term = self._url_overrides[term] + + if os.path.isabs(term) and term.startswith(self.topobjdir): + source_path, pp_info = self._abs_objdir_install_info(term) + return source_path, pp_info + + for prefix, dests in viewitems(self._url_prefixes): + if term.startswith(prefix): + for dest in dests: + if not dest.endswith("/"): + dest += "/" + objdir_path = term.replace(prefix, dest) + + while objdir_path.startswith("//"): + # The mochitest harness produces some wonky file:// uris + # that need to be fixed. + objdir_path = objdir_path[1:] + + try: + if os.path.isabs(objdir_path) and objdir_path.startswith( + self.topobjdir + ): + return self._abs_objdir_install_info(objdir_path) + else: + src_path, pp_info = self._install_info(objdir_path) + return mozpath.normpath(src_path), pp_info + except UrlFinderError: + pass + + if dest.startswith("resource://") or dest.startswith("chrome://"): + result = self.find_files(term.replace(prefix, dest)) + if result: + return result + + raise UrlFinderError("No objdir path for %s" % term) + + def rewrite_url(self, url): + # This applies one-off rules and returns None for urls that we aren't + # going to be able to resolve to a source file ("about:" urls, for + # instance). + if url in self._final_mapping: + return self._final_mapping[url] + if url.endswith("> eval"): + return None + if url.endswith("> Function"): + return None + if " -> " in url: + url = url.split(" -> ")[1].rstrip() + if "?" in url: + url = url.split("?")[0] + + url_obj = urlparse.urlparse(url) + if url_obj.scheme == "jar": + app_name = self.MOZ_APP_NAME + omnijar_name = self.OMNIJAR_NAME + + if app_name in url: + if omnijar_name in url: + # e.g. file:///home/worker/workspace/build/application/firefox/omni.ja!/components/MainProcessSingleton.js # noqa + parts = url_obj.path.split(omnijar_name + "!", 1) + elif ".xpi!" in url: + # e.g. file:///home/worker/workspace/build/application/firefox/browser/features/e10srollout@mozilla.org.xpi!/bootstrap.js # noqa + parts = url_obj.path.split(".xpi!", 1) + else: + # We don't know how to handle this jar: path, so return it to the + # caller to make it print a warning. + return url_obj.path, None + + dir_parts = parts[0].rsplit(app_name + "/", 1) + url = mozpath.normpath( + mozpath.join( + self.topobjdir, + "dist", + "bin", + dir_parts[1].lstrip("/"), + parts[1].lstrip("/"), + ) + ) + elif ".xpi!" in url: + # This matching mechanism is quite brittle and based on examples seen in the wild. + # There's no rule to match the XPI name to the path in dist/xpi-stage. + parts = url_obj.path.split(".xpi!", 1) + addon_name = os.path.basename(parts[0]) + if "-test@mozilla.org" in addon_name: + addon_name = addon_name[: -len("-test@mozilla.org")] + elif addon_name.endswith("@mozilla.org"): + addon_name = addon_name[: -len("@mozilla.org")] + url = mozpath.normpath( + mozpath.join( + self.topobjdir, + "dist", + "xpi-stage", + addon_name, + parts[1].lstrip("/"), + ) + ) + elif url_obj.scheme == "file" and os.path.isabs(url_obj.path): + path = url_obj.path + if not os.path.isfile(path): + # This may have been in a profile directory that no + # longer exists. + return None + if not path.startswith(self.topobjdir): + return path, None + url = url_obj.path + elif url_obj.scheme in ("http", "https", "javascript", "data", "about"): + return None + + result = self.find_files(url) + self._final_mapping[url] = result + return result + + +class LcovFileRewriter(object): + # Class for partial parses of LCOV format and rewriting to resolve urls + # and preprocessed file lines. + def __init__( + self, + chrome_map_path, + appdir="dist/bin/browser/", + gredir="dist/bin/", + extra_chrome_manifests=[], + ): + self.url_finder = UrlFinder( + chrome_map_path, appdir, gredir, extra_chrome_manifests + ) + self.pp_rewriter = RecordRewriter() + + def rewrite_files(self, in_paths, output_file, output_suffix): + unknowns = set() + found_valid = [False] + + def rewrite_source(url): + try: + res = self.url_finder.rewrite_url(url) + if res is None: + return None + except Exception as e: + if url not in unknowns: + # The exception can contain random filename used by + # test cases, and there can be character that cannot be + # encoded with the stdout encoding. + sys.stdout.buffer.write( + ( + "Error: %s.\nCouldn't find source info for %s, removing record" + % (e, url) + ).encode(sys.stdout.encoding, errors="replace") + ) + unknowns.add(url) + return None + + source_file, pp_info = res + # We can't assert that the file exists here, because we don't have the source + # checkout available on test machines. We can bring back this assertion when + # bug 1432287 is fixed. + # assert os.path.isfile(source_file), "Couldn't find mapped source file %s at %s!" % ( + # url, source_file) + + found_valid[0] = True + + return res + + in_paths = [os.path.abspath(in_path) for in_path in in_paths] + + if output_file: + lcov_file = LcovFile(in_paths) + with open(output_file, "w+", encoding="utf-8") as out_fh: + lcov_file.print_file( + out_fh, rewrite_source, self.pp_rewriter.rewrite_record + ) + else: + for in_path in in_paths: + lcov_file = LcovFile([in_path]) + with open(in_path + output_suffix, "w+", encoding="utf-8") as out_fh: + lcov_file.print_file( + out_fh, rewrite_source, self.pp_rewriter.rewrite_record + ) + + if not found_valid[0]: + print("WARNING: No valid records found in %s" % in_paths) + return + + +def main(): + parser = ArgumentParser( + description="Given a set of gcov .info files produced " + "by spidermonkey's code coverage, re-maps file urls " + "back to source files and lines in preprocessed files " + "back to their original locations." + ) + parser.add_argument( + "--chrome-map-path", + default="chrome-map.json", + help="Path to the chrome-map.json file.", + ) + parser.add_argument( + "--app-dir", + default="dist/bin/browser/", + help="Prefix of the appdir in use. This is used to map " + "urls starting with resource:///. It may differ by " + "app, but defaults to the valid value for firefox.", + ) + parser.add_argument( + "--gre-dir", + default="dist/bin/", + help="Prefix of the gre dir in use. This is used to map " + "urls starting with resource://gre. It may differ by " + "app, but defaults to the valid value for firefox.", + ) + parser.add_argument( + "--output-suffix", default=".out", help="The suffix to append to output files." + ) + parser.add_argument( + "--extra-chrome-manifests", + nargs="+", + help="Paths to files containing extra chrome registration.", + ) + parser.add_argument( + "--output-file", + default="", + help="The output file where the results are merged. Leave empty to make the rewriter not " + "merge files.", + ) + parser.add_argument("files", nargs="+", help="The set of files to process.") + + args = parser.parse_args() + + rewriter = LcovFileRewriter( + args.chrome_map_path, args.app_dir, args.gre_dir, args.extra_chrome_manifests + ) + + files = [] + for f in args.files: + if os.path.isdir(f): + files += [os.path.join(f, e) for e in os.listdir(f)] + else: + files.append(f) + + rewriter.rewrite_files(files, args.output_file, args.output_suffix) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/codecoverage/manifest_handler.py b/python/mozbuild/mozbuild/codecoverage/manifest_handler.py new file mode 100644 index 0000000000..1f67b4089c --- /dev/null +++ b/python/mozbuild/mozbuild/codecoverage/manifest_handler.py @@ -0,0 +1,52 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from collections import defaultdict + +try: + import urlparse +except ImportError: + import urllib.parse as urlparse + +import mozpack.path as mozpath +from mozpack.chrome.manifest import ( + Manifest, + ManifestChrome, + ManifestOverride, + ManifestResource, + parse_manifest, +) + + +class ChromeManifestHandler(object): + def __init__(self): + self.overrides = {} + self.chrome_mapping = defaultdict(set) + + def handle_manifest_entry(self, entry): + format_strings = { + "content": "chrome://%s/content/", + "resource": "resource://%s/", + "locale": "chrome://%s/locale/", + "skin": "chrome://%s/skin/", + } + + if isinstance(entry, (ManifestChrome, ManifestResource)): + if isinstance(entry, ManifestResource): + dest = entry.target + url = urlparse.urlparse(dest) + if not url.scheme: + dest = mozpath.normpath(mozpath.join(entry.base, dest)) + if url.scheme == "file": + dest = mozpath.normpath(url.path) + else: + dest = mozpath.normpath(entry.path) + + base_uri = format_strings[entry.type] % entry.name + self.chrome_mapping[base_uri].add(dest) + if isinstance(entry, ManifestOverride): + self.overrides[entry.overloaded] = entry.overload + if isinstance(entry, Manifest): + for e in parse_manifest(None, entry.path): + self.handle_manifest_entry(e) diff --git a/python/mozbuild/mozbuild/codecoverage/packager.py b/python/mozbuild/mozbuild/codecoverage/packager.py new file mode 100644 index 0000000000..92254a96f5 --- /dev/null +++ b/python/mozbuild/mozbuild/codecoverage/packager.py @@ -0,0 +1,71 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import errno +import json +import sys + +import buildconfig +import mozpack.path as mozpath +from mozpack.copier import FileRegistry, Jarrer +from mozpack.files import FileFinder, GeneratedFile +from mozpack.manifests import InstallManifest, UnreadableInstallManifest + + +def describe_install_manifest(manifest, dest_dir): + try: + manifest = InstallManifest(manifest) + except UnreadableInstallManifest: + raise IOError(errno.EINVAL, "Error parsing manifest file", manifest) + + reg = FileRegistry() + + mapping = {} + manifest.populate_registry(reg) + dest_dir = mozpath.join(buildconfig.topobjdir, dest_dir) + for dest_file, src in reg: + if hasattr(src, "path"): + dest_path = mozpath.join(dest_dir, dest_file) + relsrc_path = mozpath.relpath(src.path, buildconfig.topsrcdir) + mapping[dest_path] = relsrc_path + + return mapping + + +def package_coverage_data(root, output_file): + finder = FileFinder(root) + jarrer = Jarrer() + for p, f in finder.find("**/*.gcno"): + jarrer.add(p, f) + + dist_include_manifest = mozpath.join( + buildconfig.topobjdir, "_build_manifests", "install", "dist_include" + ) + linked_files = describe_install_manifest(dist_include_manifest, "dist/include") + mapping_file = GeneratedFile(json.dumps(linked_files, sort_keys=True)) + jarrer.add("linked-files-map.json", mapping_file) + jarrer.copy(output_file) + + +def cli(args=sys.argv[1:]): + parser = argparse.ArgumentParser() + parser.add_argument( + "-o", "--output-file", dest="output_file", help="Path to save packaged data to." + ) + parser.add_argument( + "--root", dest="root", default=None, help="Root directory to search from." + ) + args = parser.parse_args(args) + + if not args.root: + from buildconfig import topobjdir + + args.root = topobjdir + + return package_coverage_data(args.root, args.output_file) + + +if __name__ == "__main__": + sys.exit(cli()) diff --git a/python/mozbuild/mozbuild/compilation/__init__.py b/python/mozbuild/mozbuild/compilation/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/compilation/codecomplete.py b/python/mozbuild/mozbuild/compilation/codecomplete.py new file mode 100644 index 0000000000..b5a466b729 --- /dev/null +++ b/python/mozbuild/mozbuild/compilation/codecomplete.py @@ -0,0 +1,55 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +# This modules provides functionality for dealing with code completion. + +from mach.decorators import Command, CommandArgument + +from mozbuild.shellutil import quote as shell_quote +from mozbuild.shellutil import split as shell_split + + +# Instropection commands. + + +@Command( + "compileflags", + category="devenv", + description="Display the compilation flags for a given source file", +) +@CommandArgument( + "what", default=None, help="Source file to display compilation flags for" +) +def compileflags(command_context, what): + from mozbuild.compilation import util + from mozbuild.util import resolve_target_to_make + + if not util.check_top_objdir(command_context.topobjdir): + return 1 + + path_arg = command_context._wrap_path_argument(what) + + make_dir, make_target = resolve_target_to_make( + command_context.topobjdir, path_arg.relpath() + ) + + if make_dir is None and make_target is None: + return 1 + + build_vars = util.get_build_vars(make_dir, command_context) + + if what.endswith(".c"): + cc = "CC" + name = "COMPILE_CFLAGS" + else: + cc = "CXX" + name = "COMPILE_CXXFLAGS" + + if name not in build_vars: + return + + # Drop the first flag since that is the pathname of the compiler. + flags = (shell_split(build_vars[cc]) + shell_split(build_vars[name]))[1:] + + print(" ".join(shell_quote(arg) for arg in util.sanitize_cflags(flags))) diff --git a/python/mozbuild/mozbuild/compilation/database.py b/python/mozbuild/mozbuild/compilation/database.py new file mode 100644 index 0000000000..e741c88a81 --- /dev/null +++ b/python/mozbuild/mozbuild/compilation/database.py @@ -0,0 +1,244 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +# This modules provides functionality for dealing with code completion. + +import os +from collections import OrderedDict, defaultdict + +import mozpack.path as mozpath + +from mozbuild.backend.common import CommonBackend +from mozbuild.frontend.data import ( + ComputedFlags, + DirectoryTraversal, + PerSourceFlag, + Sources, + VariablePassthru, +) +from mozbuild.shellutil import quote as shell_quote +from mozbuild.util import expand_variables + + +class CompileDBBackend(CommonBackend): + def _init(self): + CommonBackend._init(self) + + # The database we're going to dump out to. + self._db = OrderedDict() + + # The cache for per-directory flags + self._flags = {} + + self._envs = {} + self._local_flags = defaultdict(dict) + self._per_source_flags = defaultdict(list) + + def _build_cmd(self, cmd, filename, unified): + cmd = list(cmd) + if unified is None: + cmd.append(filename) + else: + cmd.append(unified) + + return cmd + + def consume_object(self, obj): + # Those are difficult directories, that will be handled later. + if obj.relsrcdir in ( + "build/unix/elfhack", + "build/unix/elfhack/inject", + "build/clang-plugin", + "build/clang-plugin/tests", + ): + return True + + consumed = CommonBackend.consume_object(self, obj) + + if consumed: + return True + + if isinstance(obj, DirectoryTraversal): + self._envs[obj.objdir] = obj.config + + elif isinstance(obj, Sources): + # For other sources, include each source file. + for f in obj.files: + self._build_db_line( + obj.objdir, obj.relsrcdir, obj.config, f, obj.canonical_suffix + ) + + elif isinstance(obj, VariablePassthru): + for var in ("MOZBUILD_CMFLAGS", "MOZBUILD_CMMFLAGS"): + if var in obj.variables: + self._local_flags[obj.objdir][var] = obj.variables[var] + + elif isinstance(obj, PerSourceFlag): + self._per_source_flags[obj.file_name].extend(obj.flags) + + elif isinstance(obj, ComputedFlags): + for var, flags in obj.get_flags(): + self._local_flags[obj.objdir]["COMPUTED_%s" % var] = flags + + return True + + def consume_finished(self): + CommonBackend.consume_finished(self) + + db = [] + + for (directory, filename, unified), cmd in self._db.items(): + env = self._envs[directory] + cmd = self._build_cmd(cmd, filename, unified) + variables = { + "DIST": mozpath.join(env.topobjdir, "dist"), + "DEPTH": env.topobjdir, + "MOZILLA_DIR": env.topsrcdir, + "topsrcdir": env.topsrcdir, + "topobjdir": env.topobjdir, + } + variables.update(self._local_flags[directory]) + c = [] + for a in cmd: + accum = "" + for word in expand_variables(a, variables).split(): + # We can't just split() the output of expand_variables since + # there can be spaces enclosed by quotes, e.g. '"foo bar"'. + # Handle that case by checking whether there are an even + # number of double-quotes in the word and appending it to + # the accumulator if not. Meanwhile, shlex.split() and + # mozbuild.shellutil.split() aren't able to properly handle + # this and break in various ways, so we can't use something + # off-the-shelf. + has_quote = bool(word.count('"') % 2) + if accum and has_quote: + c.append(accum + " " + word) + accum = "" + elif accum and not has_quote: + accum += " " + word + elif not accum and has_quote: + accum = word + else: + c.append(word) + # Tell clangd to keep parsing to the end of a file, regardless of + # how many errors are encountered. (Unified builds mean that we + # encounter a lot of errors parsing some files.) + c.insert(-1, "-ferror-limit=0") + + per_source_flags = self._per_source_flags.get(filename) + if per_source_flags is not None: + c.extend(per_source_flags) + db.append( + { + "directory": directory, + "command": " ".join(shell_quote(a) for a in c), + "file": mozpath.join(directory, filename), + } + ) + + import json + + outputfile = self._outputfile_path() + with self._write_file(outputfile) as jsonout: + json.dump(db, jsonout, indent=0) + + def _outputfile_path(self): + # Output the database (a JSON file) to objdir/compile_commands.json + return os.path.join(self.environment.topobjdir, "compile_commands.json") + + def _process_unified_sources_without_mapping(self, obj): + for f in list(sorted(obj.files)): + self._build_db_line( + obj.objdir, obj.relsrcdir, obj.config, f, obj.canonical_suffix + ) + + def _process_unified_sources(self, obj): + if not obj.have_unified_mapping: + return self._process_unified_sources_without_mapping(obj) + + # For unified sources, only include the unified source file. + # Note that unified sources are never used for host sources. + for f in obj.unified_source_mapping: + self._build_db_line( + obj.objdir, obj.relsrcdir, obj.config, f[0], obj.canonical_suffix + ) + for entry in f[1]: + self._build_db_line( + obj.objdir, + obj.relsrcdir, + obj.config, + entry, + obj.canonical_suffix, + unified=f[0], + ) + + def _handle_idl_manager(self, idl_manager): + pass + + def _handle_ipdl_sources( + self, + ipdl_dir, + sorted_ipdl_sources, + sorted_nonstatic_ipdl_sources, + sorted_static_ipdl_sources, + ): + pass + + def _handle_webidl_build( + self, + bindings_dir, + unified_source_mapping, + webidls, + expected_build_output_files, + global_define_files, + ): + for f in unified_source_mapping: + self._build_db_line(bindings_dir, None, self.environment, f[0], ".cpp") + + COMPILERS = { + ".c": "CC", + ".cpp": "CXX", + ".m": "CC", + ".mm": "CXX", + } + + CFLAGS = { + ".c": "CFLAGS", + ".cpp": "CXXFLAGS", + ".m": "CFLAGS", + ".mm": "CXXFLAGS", + } + + def _get_compiler_args(self, cenv, canonical_suffix): + if canonical_suffix not in self.COMPILERS: + return None + return cenv.substs[self.COMPILERS[canonical_suffix]].split() + + def _build_db_line( + self, objdir, reldir, cenv, filename, canonical_suffix, unified=None + ): + compiler_args = self._get_compiler_args(cenv, canonical_suffix) + if compiler_args is None: + return + db = self._db.setdefault( + (objdir, filename, unified), + compiler_args + ["-o", "/dev/null", "-c"], + ) + reldir = reldir or mozpath.relpath(objdir, cenv.topobjdir) + + def append_var(name): + value = cenv.substs.get(name) + if not value: + return + if isinstance(value, str): + value = value.split() + db.extend(value) + + db.append("$(COMPUTED_%s)" % self.CFLAGS[canonical_suffix]) + if canonical_suffix == ".m": + append_var("OS_COMPILE_CMFLAGS") + db.append("$(MOZBUILD_CMFLAGS)") + elif canonical_suffix == ".mm": + append_var("OS_COMPILE_CMMFLAGS") + db.append("$(MOZBUILD_CMMFLAGS)") diff --git a/python/mozbuild/mozbuild/compilation/util.py b/python/mozbuild/mozbuild/compilation/util.py new file mode 100644 index 0000000000..fc06382a3b --- /dev/null +++ b/python/mozbuild/mozbuild/compilation/util.py @@ -0,0 +1,64 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import os + + +def check_top_objdir(topobjdir): + top_make = os.path.join(topobjdir, "Makefile") + if not os.path.exists(top_make): + print( + "Your tree has not been built yet. Please run " + "|mach build| with no arguments." + ) + return False + return True + + +def get_build_vars(directory, cmd): + build_vars = {} + + def on_line(line): + elements = [s.strip() for s in line.split("=", 1)] + + if len(elements) != 2: + return + + build_vars[elements[0]] = elements[1] + + try: + old_logger = cmd.log_manager.replace_terminal_handler(None) + cmd._run_make( + directory=directory, + target="showbuild", + log=False, + print_directory=False, + num_jobs=1, + silent=True, + line_handler=on_line, + ) + finally: + cmd.log_manager.replace_terminal_handler(old_logger) + + return build_vars + + +def sanitize_cflags(flags): + # We filter out -Xclang arguments as clang based tools typically choke on + # passing these flags down to the clang driver. -Xclang tells the clang + # driver driver to pass whatever comes after it down to clang cc1, which is + # why we skip -Xclang and the argument immediately after it. Here is an + # example: the following two invocations pass |-foo -bar -baz| to cc1: + # clang -cc1 -foo -bar -baz + # clang -Xclang -foo -Xclang -bar -Xclang -baz + sanitized = [] + saw_xclang = False + for flag in flags: + if flag == "-Xclang": + saw_xclang = True + elif saw_xclang: + saw_xclang = False + else: + sanitized.append(flag) + return sanitized diff --git a/python/mozbuild/mozbuild/compilation/warnings.py b/python/mozbuild/mozbuild/compilation/warnings.py new file mode 100644 index 0000000000..4f0ef57e51 --- /dev/null +++ b/python/mozbuild/mozbuild/compilation/warnings.py @@ -0,0 +1,392 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +# This modules provides functionality for dealing with compiler warnings. + +import errno +import io +import json +import os +import re + +import mozpack.path as mozpath +import six + +from mozbuild.util import hash_file + +# Regular expression to strip ANSI color sequences from a string. This is +# needed to properly analyze Clang compiler output, which may be colorized. +# It assumes ANSI escape sequences. +RE_STRIP_COLORS = re.compile(r"\x1b\[[\d;]+m") + +# This captures Clang diagnostics with the standard formatting. +RE_CLANG_WARNING_AND_ERROR = re.compile( + r""" + (?P[^:]+) + : + (?P\d+) + : + (?P\d+) + : + \s(?Pwarning|error):\s + (?P.+) + \[(?P[^\]]+) + """, + re.X, +) + +# This captures Clang-cl warning format. +RE_CLANG_CL_WARNING_AND_ERROR = re.compile( + r""" + (?P.*) + \((?P\d+),(?P\d+)\) + \s?:\s+(?Pwarning|error):\s + (?P.*) + \[(?P[^\]]+) + """, + re.X, +) + +IN_FILE_INCLUDED_FROM = "In file included from " + + +class CompilerWarning(dict): + """Represents an individual compiler warning.""" + + def __init__(self): + dict.__init__(self) + + self["filename"] = None + self["line"] = None + self["column"] = None + self["message"] = None + self["flag"] = None + + def copy(self): + """Returns a copy of this compiler warning.""" + w = CompilerWarning() + w.update(self) + return w + + # Since we inherit from dict, functools.total_ordering gets confused. + # Thus, we define a key function, a generic comparison, and then + # implement all the rich operators with those; approach is from: + # http://regebro.wordpress.com/2010/12/13/python-implementing-rich-comparison-the-correct-way/ + def _cmpkey(self): + return (self["filename"], self["line"], self["column"]) + + def _compare(self, other, func): + if not isinstance(other, CompilerWarning): + return NotImplemented + + return func(self._cmpkey(), other._cmpkey()) + + def __eq__(self, other): + return self._compare(other, lambda s, o: s == o) + + def __neq__(self, other): + return self._compare(other, lambda s, o: s != o) + + def __lt__(self, other): + return self._compare(other, lambda s, o: s < o) + + def __le__(self, other): + return self._compare(other, lambda s, o: s <= o) + + def __gt__(self, other): + return self._compare(other, lambda s, o: s > o) + + def __ge__(self, other): + return self._compare(other, lambda s, o: s >= o) + + def __hash__(self): + """Define so this can exist inside a set, etc.""" + return hash(tuple(sorted(self.items()))) + + +class WarningsDatabase(object): + """Holds a collection of warnings. + + The warnings database is a semi-intelligent container that holds warnings + encountered during builds. + + The warnings database is backed by a JSON file. But, that is transparent + to consumers. + + Under most circumstances, the warnings database is insert only. When a + warning is encountered, the caller simply blindly inserts it into the + database. The database figures out whether it is a dupe, etc. + + During the course of development, it is common for warnings to change + slightly as source code changes. For example, line numbers will disagree. + The WarningsDatabase handles this by storing the hash of a file a warning + occurred in. At warning insert time, if the hash of the file does not match + what is stored in the database, the existing warnings for that file are + purged from the database. + + Callers should periodically prune old, invalid warnings from the database + by calling prune(). A good time to do this is at the end of a build. + """ + + def __init__(self): + """Create an empty database.""" + self._files = {} + + def __len__(self): + i = 0 + for value in self._files.values(): + i += len(value["warnings"]) + + return i + + def __iter__(self): + for value in self._files.values(): + for warning in value["warnings"]: + yield warning + + def __contains__(self, item): + for value in self._files.values(): + for warning in value["warnings"]: + if warning == item: + return True + + return False + + @property + def warnings(self): + """All the CompilerWarning instances in this database.""" + for value in self._files.values(): + for w in value["warnings"]: + yield w + + def type_counts(self, dirpath=None): + """Returns a mapping of warning types to their counts.""" + + types = {} + for value in self._files.values(): + for warning in value["warnings"]: + if dirpath and not mozpath.normsep(warning["filename"]).startswith( + dirpath + ): + continue + flag = warning["flag"] + count = types.get(flag, 0) + count += 1 + + types[flag] = count + + return types + + def has_file(self, filename): + """Whether we have any warnings for the specified file.""" + return filename in self._files + + def warnings_for_file(self, filename): + """Obtain the warnings for the specified file.""" + f = self._files.get(filename, {"warnings": []}) + + for warning in f["warnings"]: + yield warning + + def insert(self, warning, compute_hash=True): + assert isinstance(warning, CompilerWarning) + + filename = warning["filename"] + + new_hash = None + + if compute_hash: + new_hash = hash_file(filename) + + if filename in self._files: + if new_hash != self._files[filename]["hash"]: + del self._files[filename] + + value = self._files.get( + filename, + { + "hash": new_hash, + "warnings": set(), + }, + ) + + value["warnings"].add(warning) + + self._files[filename] = value + + def prune(self): + """Prune the contents of the database. + + This removes warnings that are no longer valid. A warning is no longer + valid if the file it was in no longer exists or if the content has + changed. + + The check for changed content catches the case where a file previously + contained warnings but no longer does. + """ + + # Need to calculate up front since we are mutating original object. + filenames = list(six.iterkeys(self._files)) + for filename in filenames: + if not os.path.exists(filename): + del self._files[filename] + continue + + if self._files[filename]["hash"] is None: + continue + + current_hash = hash_file(filename) + if current_hash != self._files[filename]["hash"]: + del self._files[filename] + continue + + def serialize(self, fh): + """Serialize the database to an open file handle.""" + obj = {"files": {}} + + # All this hackery because JSON can't handle sets. + for k, v in six.iteritems(self._files): + obj["files"][k] = {} + + for k2, v2 in six.iteritems(v): + normalized = v2 + if isinstance(v2, set): + normalized = list(v2) + obj["files"][k][k2] = normalized + + to_write = six.ensure_text(json.dumps(obj, indent=2)) + fh.write(to_write) + + def deserialize(self, fh): + """Load serialized content from a handle into the current instance.""" + obj = json.load(fh) + + self._files = obj["files"] + + # Normalize data types. + for filename, value in six.iteritems(self._files): + if "warnings" in value: + normalized = set() + for d in value["warnings"]: + w = CompilerWarning() + w.update(d) + normalized.add(w) + + self._files[filename]["warnings"] = normalized + + def load_from_file(self, filename): + """Load the database from a file.""" + with io.open(filename, "r", encoding="utf-8") as fh: + self.deserialize(fh) + + def save_to_file(self, filename): + """Save the database to a file.""" + try: + # Ensure the directory exists + os.makedirs(os.path.dirname(filename)) + except OSError as e: + if e.errno != errno.EEXIST: + raise + with io.open(filename, "w", encoding="utf-8", newline="\n") as fh: + self.serialize(fh) + + +class WarningsCollector(object): + """Collects warnings from text data. + + Instances of this class receive data (usually the output of compiler + invocations) and parse it into warnings. + + The collector works by incrementally receiving data, usually line-by-line + output from the compiler. Therefore, it can maintain state to parse + multi-line warning messages. + """ + + def __init__(self, cb, objdir=None): + """Initialize a new collector. + + ``cb`` is a callable that is called with a ``CompilerWarning`` + instance whenever a new warning is parsed. + + ``objdir`` is the object directory. Used for normalizing paths. + """ + self.cb = cb + self.objdir = objdir + self.included_from = [] + + def process_line(self, line): + """Take a line of text and process it for a warning.""" + + filtered = RE_STRIP_COLORS.sub("", line) + + # Clang warnings in files included from the one(s) being compiled will + # start with "In file included from /path/to/file:line:". Here, we + # record those. + if filtered.startswith(IN_FILE_INCLUDED_FROM): + included_from = filtered[len(IN_FILE_INCLUDED_FROM) :] + + parts = included_from.split(":") + + self.included_from.append(parts[0]) + + return + + warning = CompilerWarning() + filename = None + + # TODO make more efficient so we run minimal regexp matches. + match_clang = RE_CLANG_WARNING_AND_ERROR.match(filtered) + match_clang_cl = RE_CLANG_CL_WARNING_AND_ERROR.match(filtered) + if match_clang: + d = match_clang.groupdict() + + filename = d["file"] + warning["type"] = d["type"] + warning["line"] = int(d["line"]) + warning["column"] = int(d["column"]) + warning["flag"] = d["flag"] + warning["message"] = d["message"].rstrip() + + elif match_clang_cl: + d = match_clang_cl.groupdict() + + filename = d["file"] + warning["type"] = d["type"] + warning["line"] = int(d["line"]) + warning["column"] = int(d["column"]) + warning["flag"] = d["flag"] + warning["message"] = d["message"].rstrip() + + else: + self.included_from = [] + return None + + filename = os.path.normpath(filename) + + # Sometimes we get relative includes. These typically point to files in + # the object directory. We try to resolve the relative path. + if not os.path.isabs(filename): + filename = self._normalize_relative_path(filename) + + warning["filename"] = filename + + self.cb(warning) + + return warning + + def _normalize_relative_path(self, filename): + # Special case files in dist/include. + idx = filename.find("/dist/include") + if idx != -1: + return self.objdir + filename[idx:] + + for included_from in self.included_from: + source_dir = os.path.dirname(included_from) + + candidate = os.path.normpath(os.path.join(source_dir, filename)) + + if os.path.exists(candidate): + return candidate + + return filename diff --git a/python/mozbuild/mozbuild/config_status.py b/python/mozbuild/mozbuild/config_status.py new file mode 100644 index 0000000000..8e8a7f625b --- /dev/null +++ b/python/mozbuild/mozbuild/config_status.py @@ -0,0 +1,184 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Combined with build/autoconf/config.status.m4, ConfigStatus is an almost +# drop-in replacement for autoconf 2.13's config.status, with features +# borrowed from autoconf > 2.5, and additional features. + +import logging +import os +import sys +import time +from argparse import ArgumentParser +from itertools import chain + +from mach.logging import LoggingManager + +from mozbuild.backend import backends, get_backend_class +from mozbuild.backend.configenvironment import ConfigEnvironment +from mozbuild.base import MachCommandConditions +from mozbuild.frontend.emitter import TreeMetadataEmitter +from mozbuild.frontend.reader import BuildReader +from mozbuild.mozinfo import write_mozinfo +from mozbuild.util import FileAvoidWrite, process_time + +log_manager = LoggingManager() + + +ANDROID_IDE_ADVERTISEMENT = """ +============= +ADVERTISEMENT + +You are building GeckoView. After your build completes, you can open +the top source directory in Android Studio directly and build using Gradle. +See the documentation at + +https://firefox-source-docs.mozilla.org/mobile/android/geckoview/contributor/geckoview-quick-start.html#build-using-android-studio +============= +""".strip() + + +def config_status( + topobjdir=".", + topsrcdir=".", + defines=None, + substs=None, + source=None, + mozconfig=None, + args=sys.argv[1:], +): + """Main function, providing config.status functionality. + + Contrary to config.status, it doesn't use CONFIG_FILES or CONFIG_HEADERS + variables. + + Without the -n option, this program acts as config.status and considers + the current directory as the top object directory, even when config.status + is in a different directory. It will, however, treat the directory + containing config.status as the top object directory with the -n option. + + The options to this function are passed when creating the + ConfigEnvironment. These lists, as well as the actual wrapper script + around this function, are meant to be generated by configure. + See build/autoconf/config.status.m4. + """ + + if "CONFIG_FILES" in os.environ: + raise Exception( + "Using the CONFIG_FILES environment variable is not " "supported." + ) + if "CONFIG_HEADERS" in os.environ: + raise Exception( + "Using the CONFIG_HEADERS environment variable is not " "supported." + ) + + if not os.path.isabs(topsrcdir): + raise Exception( + "topsrcdir must be defined as an absolute directory: " "%s" % topsrcdir + ) + + default_backends = ["RecursiveMake"] + default_backends = (substs or {}).get("BUILD_BACKENDS", ["RecursiveMake"]) + + parser = ArgumentParser() + parser.add_argument( + "-v", + "--verbose", + dest="verbose", + action="store_true", + help="display verbose output", + ) + parser.add_argument( + "-n", + dest="not_topobjdir", + action="store_true", + help="do not consider current directory as top object directory", + ) + parser.add_argument( + "-d", "--diff", action="store_true", help="print diffs of changed files." + ) + parser.add_argument( + "-b", + "--backend", + nargs="+", + choices=sorted(backends), + default=default_backends, + help="what backend to build (default: %s)." % " ".join(default_backends), + ) + parser.add_argument( + "--dry-run", action="store_true", help="do everything except writing files out." + ) + options = parser.parse_args(args) + + # Without -n, the current directory is meant to be the top object directory + if not options.not_topobjdir: + topobjdir = os.path.realpath(".") + + env = ConfigEnvironment( + topsrcdir, + topobjdir, + defines=defines, + substs=substs, + source=source, + mozconfig=mozconfig, + ) + + with FileAvoidWrite(os.path.join(topobjdir, "mozinfo.json")) as f: + write_mozinfo(f, env, os.environ) + + cpu_start = process_time() + time_start = time.monotonic() + + # Make appropriate backend instances, defaulting to RecursiveMakeBackend, + # or what is in BUILD_BACKENDS. + selected_backends = [get_backend_class(b)(env) for b in options.backend] + + if options.dry_run: + for b in selected_backends: + b.dry_run = True + + reader = BuildReader(env) + emitter = TreeMetadataEmitter(env) + # This won't actually do anything because of the magic of generators. + definitions = emitter.emit(reader.read_topsrcdir()) + + log_level = logging.DEBUG if options.verbose else logging.INFO + log_manager.add_terminal_logging(level=log_level) + log_manager.enable_unstructured() + + print("Reticulating splines...", file=sys.stderr) + if len(selected_backends) > 1: + definitions = list(definitions) + + for the_backend in selected_backends: + the_backend.consume(definitions) + + execution_time = 0.0 + for obj in chain((reader, emitter), selected_backends): + summary = obj.summary() + print(summary, file=sys.stderr) + execution_time += summary.execution_time + if hasattr(obj, "gyp_summary"): + summary = obj.gyp_summary() + print(summary, file=sys.stderr) + + cpu_time = process_time() - cpu_start + wall_time = time.monotonic() - time_start + efficiency = cpu_time / wall_time if wall_time else 100 + untracked = wall_time - execution_time + + print( + "Total wall time: {:.2f}s; CPU time: {:.2f}s; Efficiency: " + "{:.0%}; Untracked: {:.2f}s".format(wall_time, cpu_time, efficiency, untracked), + file=sys.stderr, + ) + + if options.diff: + for the_backend in selected_backends: + for path, diff in sorted(the_backend.file_diffs.items()): + print("\n".join(diff)) + + # Advertise Android Studio if it is appropriate. + if MachCommandConditions.is_android(env): + print(ANDROID_IDE_ADVERTISEMENT) diff --git a/python/mozbuild/mozbuild/configure/__init__.py b/python/mozbuild/mozbuild/configure/__init__.py new file mode 100644 index 0000000000..f60f179d6b --- /dev/null +++ b/python/mozbuild/mozbuild/configure/__init__.py @@ -0,0 +1,1311 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import codecs +import inspect +import logging +import os +import re +import sys +import types +from collections import OrderedDict +from contextlib import contextmanager +from functools import wraps + +import mozpack.path as mozpath +import six +from six.moves import builtins as __builtin__ + +from mozbuild.configure.help import HelpFormatter +from mozbuild.configure.options import ( + HELP_OPTIONS_CATEGORY, + CommandLineHelper, + ConflictingOptionError, + InvalidOptionError, + Option, + OptionValue, +) +from mozbuild.configure.util import ConfigureOutputHandler, LineIO, getpreferredencoding +from mozbuild.util import ( + ReadOnlyDict, + ReadOnlyNamespace, + exec_, + memoize, + memoized_property, + system_encoding, +) + +# TRACE logging level, below (thus more verbose than) DEBUG +TRACE = 5 + + +class ConfigureError(Exception): + pass + + +class SandboxDependsFunction(object): + """Sandbox-visible representation of @depends functions.""" + + def __init__(self, unsandboxed): + self._or = unsandboxed.__or__ + self._and = unsandboxed.__and__ + self._getattr = unsandboxed.__getattr__ + + def __call__(self, *arg, **kwargs): + raise ConfigureError("The `%s` function may not be called" % self.__name__) + + def __or__(self, other): + if not isinstance(other, SandboxDependsFunction): + raise ConfigureError( + "Can only do binary arithmetic operations " + "with another @depends function." + ) + return self._or(other).sandboxed + + def __and__(self, other): + if not isinstance(other, SandboxDependsFunction): + raise ConfigureError( + "Can only do binary arithmetic operations " + "with another @depends function." + ) + return self._and(other).sandboxed + + def __cmp__(self, other): + raise ConfigureError("Cannot compare @depends functions.") + + def __eq__(self, other): + raise ConfigureError("Cannot compare @depends functions.") + + def __hash__(self): + return object.__hash__(self) + + def __ne__(self, other): + raise ConfigureError("Cannot compare @depends functions.") + + def __lt__(self, other): + raise ConfigureError("Cannot compare @depends functions.") + + def __le__(self, other): + raise ConfigureError("Cannot compare @depends functions.") + + def __gt__(self, other): + raise ConfigureError("Cannot compare @depends functions.") + + def __ge__(self, other): + raise ConfigureError("Cannot compare @depends functions.") + + def __getattr__(self, key): + return self._getattr(key).sandboxed + + def __nonzero__(self): + raise ConfigureError("Cannot do boolean operations on @depends functions.") + + +class DependsFunction(object): + __slots__ = ( + "_func", + "_name", + "dependencies", + "when", + "sandboxed", + "sandbox", + "_result", + ) + + def __init__(self, sandbox, func, dependencies, when=None): + assert isinstance(sandbox, ConfigureSandbox) + assert not inspect.isgeneratorfunction(func) + # Allow non-functions when there are no dependencies. This is equivalent + # to passing a lambda that returns the given value. + if not (inspect.isroutine(func) or not dependencies): + print(func) + assert inspect.isroutine(func) or not dependencies + self._func = func + self._name = getattr(func, "__name__", None) + self.dependencies = dependencies + self.sandboxed = wraps(func)(SandboxDependsFunction(self)) + self.sandbox = sandbox + self.when = when + sandbox._depends[self.sandboxed] = self + + # Only @depends functions with a dependency on '--help' are executed + # immediately. Everything else is queued for later execution. + if sandbox._help_option in dependencies: + sandbox._value_for(self) + elif not sandbox._help: + sandbox._execution_queue.append((sandbox._value_for, (self,))) + + @property + def name(self): + return self._name + + @name.setter + def name(self, value): + self._name = value + + @property + def sandboxed_dependencies(self): + return [ + d.sandboxed if isinstance(d, DependsFunction) else d + for d in self.dependencies + ] + + @memoize + def result(self): + if self.when and not self.sandbox._value_for(self.when): + return None + + if inspect.isroutine(self._func): + resolved_args = [self.sandbox._value_for(d) for d in self.dependencies] + return self._func(*resolved_args) + return self._func + + def __repr__(self): + return "<%s %s(%s)>" % ( + self.__class__.__name__, + self.name, + ", ".join(repr(d) for d in self.dependencies), + ) + + def __or__(self, other): + if isinstance(other, SandboxDependsFunction): + other = self.sandbox._depends.get(other) + assert isinstance(other, DependsFunction) + assert self.sandbox is other.sandbox + return CombinedDependsFunction(self.sandbox, self.or_impl, (self, other)) + + @staticmethod + def or_impl(iterable): + # Applies "or" to all the items of iterable. + # e.g. if iterable contains a, b and c, returns `a or b or c`. + for i in iterable: + if i: + return i + return i + + def __and__(self, other): + if isinstance(other, SandboxDependsFunction): + other = self.sandbox._depends.get(other) + assert isinstance(other, DependsFunction) + assert self.sandbox is other.sandbox + return CombinedDependsFunction(self.sandbox, self.and_impl, (self, other)) + + @staticmethod + def and_impl(iterable): + # Applies "and" to all the items of iterable. + # e.g. if iterable contains a, b and c, returns `a and b and c`. + for i in iterable: + if not i: + return i + return i + + def __getattr__(self, key): + if key.startswith("_"): + return super(DependsFunction, self).__getattr__(key) + # Our function may return None or an object that simply doesn't have + # the wanted key. In that case, just return None. + return TrivialDependsFunction( + self.sandbox, lambda x: getattr(x, key, None), [self], self.when + ) + + +class TrivialDependsFunction(DependsFunction): + """Like a DependsFunction, but the linter won't expect it to have a + dependency on --help ever.""" + + +class CombinedDependsFunction(DependsFunction): + def __init__(self, sandbox, func, dependencies): + flatten_deps = [] + for d in dependencies: + if isinstance(d, CombinedDependsFunction) and d._func is func: + for d2 in d.dependencies: + if d2 not in flatten_deps: + flatten_deps.append(d2) + elif d not in flatten_deps: + flatten_deps.append(d) + + super(CombinedDependsFunction, self).__init__(sandbox, func, flatten_deps) + + @memoize + def result(self): + resolved_args = (self.sandbox._value_for(d) for d in self.dependencies) + return self._func(resolved_args) + + def __eq__(self, other): + return ( + isinstance(other, self.__class__) + and self._func is other._func + and set(self.dependencies) == set(other.dependencies) + ) + + def __hash__(self): + return object.__hash__(self) + + def __ne__(self, other): + return not self == other + + +class SandboxedGlobal(dict): + """Identifiable dict type for use as function global""" + + +def forbidden_import(*args, **kwargs): + raise ImportError("Importing modules is forbidden") + + +class ConfigureSandbox(dict): + """Represents a sandbox for executing Python code for build configuration. + This is a different kind of sandboxing than the one used for moz.build + processing. + + The sandbox has 9 primitives: + - option + - depends + - template + - imports + - include + - set_config + - set_define + - imply_option + - only_when + + `option`, `include`, `set_config`, `set_define` and `imply_option` are + functions. `depends`, `template`, and `imports` are decorators. `only_when` + is a context_manager. + + These primitives are declared as name_impl methods to this class and + the mapping name -> name_impl is done automatically in __getitem__. + + Additional primitives should be frowned upon to keep the sandbox itself as + simple as possible. Instead, helpers should be created within the sandbox + with the existing primitives. + + The sandbox is given, at creation, a dict where the yielded configuration + will be stored. + + config = {} + sandbox = ConfigureSandbox(config) + sandbox.run(path) + do_stuff(config) + """ + + # The default set of builtins. We expose unicode as str to make sandboxed + # files more python3-ready. + BUILTINS = ReadOnlyDict( + { + b: getattr(__builtin__, b, None) + for b in ( + "AssertionError", + "False", + "None", + "True", + "__build_class__", # will be None on py2 + "all", + "any", + "bool", + "dict", + "enumerate", + "getattr", + "hasattr", + "int", + "isinstance", + "len", + "list", + "max", + "min", + "range", + "set", + "sorted", + "tuple", + "zip", + ) + }, + __import__=forbidden_import, + str=six.text_type, + ) + + # Expose a limited set of functions from os.path + OS = ReadOnlyNamespace( + path=ReadOnlyNamespace( + **{ + k: getattr(mozpath, k, getattr(os.path, k)) + for k in ( + "abspath", + "basename", + "dirname", + "isabs", + "join", + "normcase", + "normpath", + "realpath", + "relpath", + ) + } + ) + ) + + def __init__( + self, + config, + environ=os.environ, + argv=sys.argv, + stdout=sys.stdout, + stderr=sys.stderr, + logger=None, + ): + dict.__setitem__(self, "__builtins__", self.BUILTINS) + + self._environ = dict(environ) + + self._paths = [] + self._all_paths = set() + self._templates = set() + # Associate SandboxDependsFunctions to DependsFunctions. + self._depends = OrderedDict() + self._seen = set() + # Store the @imports added to a given function. + self._imports = {} + + self._options = OrderedDict() + # Store raw option (as per command line or environment) for each Option + self._raw_options = OrderedDict() + + # Store options added with `imply_option`, and the reason they were + # added (which can either have been given to `imply_option`, or + # inferred. Their order matters, so use a list. + self._implied_options = [] + + # Store all results from _prepare_function + self._prepared_functions = set() + + # Queue of functions to execute, with their arguments + self._execution_queue = [] + + # Store the `when`s associated to some options. + self._conditions = {} + + # A list of conditions to apply as a default `when` for every *_impl() + self._default_conditions = [] + + self._helper = CommandLineHelper(environ, argv) + + assert isinstance(config, dict) + self._config = config + + # Tracks how many templates "deep" we are in the stack. + self._template_depth = 0 + + logging.addLevelName(TRACE, "TRACE") + if logger is None: + logger = moz_logger = logging.getLogger("moz.configure") + logger.setLevel(logging.DEBUG) + formatter = logging.Formatter("%(levelname)s: %(message)s") + handler = ConfigureOutputHandler(stdout, stderr) + handler.setFormatter(formatter) + queue_debug = handler.queue_debug + logger.addHandler(handler) + + else: + assert isinstance(logger, logging.Logger) + moz_logger = None + + @contextmanager + def queue_debug(): + yield + + self._logger = logger + + # Some callers will manage to log a bytestring with characters in it + # that can't be converted to ascii. Make our log methods robust to this + # by detecting the encoding that a producer is likely to have used. + encoding = getpreferredencoding() + + def wrapped_log_method(logger, key): + method = getattr(logger, key) + + def wrapped(*args, **kwargs): + out_args = [ + six.ensure_text(arg, encoding=encoding or "utf-8") + if isinstance(arg, six.binary_type) + else arg + for arg in args + ] + return method(*out_args, **kwargs) + + return wrapped + + log_namespace = { + k: wrapped_log_method(logger, k) + for k in ("debug", "info", "warning", "error") + } + log_namespace["queue_debug"] = queue_debug + self.log_impl = ReadOnlyNamespace(**log_namespace) + + self._help = None + self._help_option = self.option_impl( + "--help", help="print this message", category=HELP_OPTIONS_CATEGORY + ) + self._seen.add(self._help_option) + + self._always = DependsFunction(self, lambda: True, []) + self._never = DependsFunction(self, lambda: False, []) + + if self._value_for(self._help_option): + self._help = HelpFormatter(argv[0]) + self._help.add(self._help_option) + elif moz_logger: + handler = logging.FileHandler( + "config.log", mode="w", delay=True, encoding="utf-8" + ) + handler.setFormatter(formatter) + logger.addHandler(handler) + + def include_file(self, path): + """Include one file in the sandbox. Users of this class probably want + to use `run` instead. + + Note: this will execute all template invocations, as well as @depends + functions that depend on '--help', but nothing else. + """ + + if self._paths: + path = mozpath.join(mozpath.dirname(self._paths[-1]), path) + path = mozpath.normpath(path) + if not mozpath.basedir(path, (mozpath.dirname(self._paths[0]),)): + raise ConfigureError( + "Cannot include `%s` because it is not in a subdirectory " + "of `%s`" % (path, mozpath.dirname(self._paths[0])) + ) + else: + path = mozpath.realpath(mozpath.abspath(path)) + if path in self._all_paths: + raise ConfigureError( + "Cannot include `%s` because it was included already." % path + ) + self._paths.append(path) + self._all_paths.add(path) + + with open(path, "rb") as fh: + source = fh.read() + + code = compile(source, path, "exec") + + exec_(code, self) + + self._paths.pop(-1) + + def run(self, path=None): + """Executes the given file within the sandbox, as well as everything + pending from any other included file, and ensure the overall + consistency of the executed script(s).""" + if path: + self.include_file(path) + + for option in six.itervalues(self._options): + # All options must be referenced by some @depends function + if option not in self._seen: + raise ConfigureError( + "Option `%s` is not handled ; reference it with a @depends" + % option.option + ) + + self._value_for(option) + + # All implied options should exist. + for implied_option in self._implied_options: + value = self._resolve(implied_option.value) + if value is not None: + # There are two ways to end up here: either the implied option + # is unknown, or it's known but there was a dependency loop + # that prevented the implication from being applied. + option = self._options.get(implied_option.name) + if not option: + raise ConfigureError( + "`%s`, emitted from `%s` line %d, is unknown." + % ( + implied_option.option, + implied_option.caller[1], + implied_option.caller[2], + ) + ) + # If the option is known, check that the implied value doesn't + # conflict with what value was attributed to the option. + if implied_option.when and not self._value_for(implied_option.when): + continue + option_value = self._value_for_option(option) + if value != option_value: + reason = implied_option.reason + if isinstance(reason, Option): + reason = self._raw_options.get(reason) or reason.option + reason = reason.split("=", 1)[0] + value = OptionValue.from_(value) + raise InvalidOptionError( + "'%s' implied by '%s' conflicts with '%s' from the %s" + % ( + value.format(option.option), + reason, + option_value.format(option.option), + option_value.origin, + ) + ) + + # All options should have been removed (handled) by now. + for arg in self._helper: + without_value = arg.split("=", 1)[0] + msg = "Unknown option: %s" % without_value + if self._help: + self._logger.warning(msg) + else: + raise InvalidOptionError(msg) + + # Run the execution queue + for func, args in self._execution_queue: + func(*args) + + if self._help: + with LineIO(self.log_impl.info) as out: + self._help.usage(out) + + def __getitem__(self, key): + impl = "%s_impl" % key + func = getattr(self, impl, None) + if func: + return func + + return super(ConfigureSandbox, self).__getitem__(key) + + def __setitem__(self, key, value): + if ( + key in self.BUILTINS + or key == "__builtins__" + or hasattr(self, "%s_impl" % key) + ): + raise KeyError("Cannot reassign builtins") + + if inspect.isfunction(value) and value not in self._templates: + value = self._prepare_function(value) + + elif ( + not isinstance(value, SandboxDependsFunction) + and value not in self._templates + and not (inspect.isclass(value) and issubclass(value, Exception)) + ): + raise KeyError( + "Cannot assign `%s` because it is neither a " + "@depends nor a @template" % key + ) + + if isinstance(value, SandboxDependsFunction): + self._depends[value].name = key + + return super(ConfigureSandbox, self).__setitem__(key, value) + + def _resolve(self, arg): + if isinstance(arg, SandboxDependsFunction): + return self._value_for_depends(self._depends[arg]) + return arg + + def _value_for(self, obj): + if isinstance(obj, SandboxDependsFunction): + assert obj in self._depends + return self._value_for_depends(self._depends[obj]) + + elif isinstance(obj, DependsFunction): + return self._value_for_depends(obj) + + elif isinstance(obj, Option): + return self._value_for_option(obj) + + assert False + + @memoize + def _value_for_depends(self, obj): + value = obj.result() + self._logger.log(TRACE, "%r = %r", obj, value) + return value + + @memoize + def _value_for_option(self, option): + implied = {} + matching_implied_options = [ + o for o in self._implied_options if o.name in (option.name, option.env) + ] + # Update self._implied_options before going into the loop with the non-matching + # options. + self._implied_options = [ + o for o in self._implied_options if o.name not in (option.name, option.env) + ] + + for implied_option in matching_implied_options: + if implied_option.when and not self._value_for(implied_option.when): + continue + + value = self._resolve(implied_option.value) + + if value is not None: + value = OptionValue.from_(value) + opt = value.format(implied_option.option) + self._helper.add(opt, "implied") + implied[opt] = implied_option + + try: + value, option_string = self._helper.handle(option) + except ConflictingOptionError as e: + reason = implied[e.arg].reason + if isinstance(reason, Option): + reason = self._raw_options.get(reason) or reason.option + reason = reason.split("=", 1)[0] + raise InvalidOptionError( + "'%s' implied by '%s' conflicts with '%s' from the %s" + % (e.arg, reason, e.old_arg, e.old_origin) + ) + + if value.origin == "implied": + recursed_value = getattr(self, "__value_for_option").get((option,)) + if recursed_value is not None: + _, filename, line, _, _, _ = implied[value.format(option.option)].caller + raise ConfigureError( + "'%s' appears somewhere in the direct or indirect dependencies when " + "resolving imply_option at %s:%d" % (option.option, filename, line) + ) + + if option_string: + self._raw_options[option] = option_string + + when = self._conditions.get(option) + # If `when` resolves to a false-ish value, we always return None. + # This makes option(..., when='--foo') equivalent to + # option(..., when=depends('--foo')(lambda x: x)). + if when and not self._value_for(when) and value is not None: + # If the option was passed explicitly, we throw an error that + # the option is not available. Except when the option was passed + # from the environment, because that would be too cumbersome. + if value.origin not in ("default", "environment"): + raise InvalidOptionError( + "%s is not available in this configuration" + % option_string.split("=", 1)[0] + ) + self._logger.log(TRACE, "%r = None", option) + return None + + self._logger.log(TRACE, "%r = %r", option, value) + return value + + def _dependency(self, arg, callee_name, arg_name=None): + if isinstance(arg, six.string_types): + prefix, name, values = Option.split_option(arg) + if values != (): + raise ConfigureError("Option must not contain an '='") + if name not in self._options: + raise ConfigureError( + "'%s' is not a known option. " "Maybe it's declared too late?" % arg + ) + arg = self._options[name] + self._seen.add(arg) + elif isinstance(arg, SandboxDependsFunction): + assert arg in self._depends + arg = self._depends[arg] + else: + raise TypeError( + "Cannot use object of type '%s' as %sargument to %s" + % ( + type(arg).__name__, + "`%s` " % arg_name if arg_name else "", + callee_name, + ) + ) + return arg + + def _normalize_when(self, when, callee_name): + if when is True: + when = self._always + elif when is False: + when = self._never + elif when is not None: + when = self._dependency(when, callee_name, "when") + + if self._default_conditions: + # Create a pseudo @depends function for the combination of all + # default conditions and `when`. + dependencies = [when] if when else [] + dependencies.extend(self._default_conditions) + if len(dependencies) == 1: + return dependencies[0] + return CombinedDependsFunction(self, all, dependencies) + return when + + @contextmanager + def only_when_impl(self, when): + """Implementation of only_when() + + `only_when` is a context manager that essentially makes calls to + other sandbox functions within the context block ignored. + """ + when = self._normalize_when(when, "only_when") + if when and self._default_conditions[-1:] != [when]: + self._default_conditions.append(when) + yield + self._default_conditions.pop() + else: + yield + + def option_impl(self, *args, **kwargs): + """Implementation of option() + This function creates and returns an Option() object, passing it the + resolved arguments (uses the result of functions when functions are + passed). In most cases, the result of this function is not expected to + be used. + Command line argument/environment variable parsing for this Option is + handled here. + """ + when = self._normalize_when(kwargs.get("when"), "option") + args = [self._resolve(arg) for arg in args] + kwargs = {k: self._resolve(v) for k, v in six.iteritems(kwargs) if k != "when"} + # The Option constructor needs to look up the stack to infer a category + # for the Option, since the category is based on the filename where the + # Option is defined. However, if the Option is defined in a template, we + # want the category to reference the caller of the template rather than + # the caller of the option() function. + kwargs["define_depth"] = self._template_depth * 3 + option = Option(*args, **kwargs) + if when: + self._conditions[option] = when + if option.name in self._options: + raise ConfigureError("Option `%s` already defined" % option.option) + if option.env in self._options: + raise ConfigureError("Option `%s` already defined" % option.env) + if option.name: + self._options[option.name] = option + if option.env: + self._options[option.env] = option + + if self._help and (when is None or self._value_for(when)): + self._help.add(option) + + return option + + def depends_impl(self, *args, **kwargs): + """Implementation of @depends() + This function is a decorator. It returns a function that subsequently + takes a function and returns a dummy function. The dummy function + identifies the actual function for the sandbox, while preventing + further function calls from within the sandbox. + + @depends() takes a variable number of option strings or dummy function + references. The decorated function is called as soon as the decorator + is called, and the arguments it receives are the OptionValue or + function results corresponding to each of the arguments to @depends. + As an exception, when a HelpFormatter is attached, only functions that + have '--help' in their @depends argument list are called. + + The decorated function is altered to use a different global namespace + for its execution. This different global namespace exposes a limited + set of functions from os.path. + """ + for k in kwargs: + if k != "when": + raise TypeError( + "depends_impl() got an unexpected keyword argument '%s'" % k + ) + + when = self._normalize_when(kwargs.get("when"), "@depends") + + if not when and not args: + raise ConfigureError("@depends needs at least one argument") + + dependencies = tuple(self._dependency(arg, "@depends") for arg in args) + + conditions = [ + self._conditions[d] + for d in dependencies + if d in self._conditions and isinstance(d, Option) + ] + for c in conditions: + if c != when: + raise ConfigureError( + "@depends function needs the same `when` " + "as options it depends on" + ) + + def decorator(func): + if inspect.isgeneratorfunction(func): + raise ConfigureError( + "Cannot decorate generator functions with @depends" + ) + if inspect.isroutine(func): + if func in self._templates: + raise TypeError("Cannot use a @template function here") + func = self._prepare_function(func) + elif isinstance(func, SandboxDependsFunction): + raise TypeError("Cannot nest @depends functions") + elif dependencies: + raise TypeError( + "Cannot wrap literal values in @depends with dependencies" + ) + depends = DependsFunction(self, func, dependencies, when=when) + return depends.sandboxed + + return decorator + + def include_impl(self, what, when=None): + """Implementation of include(). + Allows to include external files for execution in the sandbox. + It is possible to use a @depends function as argument, in which case + the result of the function is the file name to include. This latter + feature is only really meant for --enable-application/--enable-project. + """ + with self.only_when_impl(when): + what = self._resolve(what) + if what: + if not isinstance(what, six.string_types): + raise TypeError("Unexpected type: '%s'" % type(what).__name__) + self.include_file(what) + + def template_impl(self, func): + """Implementation of @template. + This function is a decorator. Template functions are called + immediately. They are altered so that their global namespace exposes + a limited set of functions from os.path, as well as `depends` and + `option`. + Templates allow to simplify repetitive constructs, or to implement + helper decorators and somesuch. + """ + + def update_globals(glob): + glob.update( + (k[: -len("_impl")], getattr(self, k)) + for k in dir(self) + if k.endswith("_impl") and k != "template_impl" + ) + glob.update((k, v) for k, v in six.iteritems(self) if k not in glob) + + template = self._prepare_function(func, update_globals) + + # Any function argument to the template must be prepared to be sandboxed. + # If the template itself returns a function (in which case, it's very + # likely a decorator), that function must be prepared to be sandboxed as + # well. + def wrap_template(template): + isfunction = inspect.isfunction + + def maybe_prepare_function(obj): + if isfunction(obj): + return self._prepare_function(obj) + return obj + + # The following function may end up being prepared to be sandboxed, + # so it mustn't depend on anything from the global scope in this + # file. It can however depend on variables from the closure, thus + # maybe_prepare_function and isfunction are declared above to be + # available there. + @self.wraps(template) + def wrapper(*args, **kwargs): + args = [maybe_prepare_function(arg) for arg in args] + kwargs = {k: maybe_prepare_function(v) for k, v in kwargs.items()} + self._template_depth += 1 + ret = template(*args, **kwargs) + self._template_depth -= 1 + if isfunction(ret): + # We can't expect the sandboxed code to think about all the + # details of implementing decorators, so do some of the + # work for them. If the function takes exactly one function + # as argument and returns a function, it must be a + # decorator, so mark the returned function as wrapping the + # function passed in. + if len(args) == 1 and not kwargs and isfunction(args[0]): + ret = self.wraps(args[0])(ret) + return wrap_template(ret) + return ret + + return wrapper + + wrapper = wrap_template(template) + self._templates.add(wrapper) + return wrapper + + def wraps(self, func): + return wraps(func) + + RE_MODULE = re.compile("^[a-zA-Z0-9_\.]+$") + + def imports_impl(self, _import, _from=None, _as=None): + """Implementation of @imports. + This decorator imports the given _import from the given _from module + optionally under a different _as name. + The options correspond to the various forms for the import builtin. + + @imports('sys') + @imports(_from='mozpack', _import='path', _as='mozpath') + """ + for value, required in ((_import, True), (_from, False), (_as, False)): + + if not isinstance(value, six.string_types) and ( + required or value is not None + ): + raise TypeError("Unexpected type: '%s'" % type(value).__name__) + if value is not None and not self.RE_MODULE.match(value): + raise ValueError("Invalid argument to @imports: '%s'" % value) + if _as and "." in _as: + raise ValueError("Invalid argument to @imports: '%s'" % _as) + + def decorator(func): + if func in self._templates: + raise ConfigureError("@imports must appear after @template") + if func in self._depends: + raise ConfigureError("@imports must appear after @depends") + # For the imports to apply in the order they appear in the + # .configure file, we accumulate them in reverse order and apply + # them later. + imports = self._imports.setdefault(func, []) + imports.insert(0, (_from, _import, _as)) + return func + + return decorator + + def _apply_imports(self, func, glob): + for _from, _import, _as in self._imports.pop(func, ()): + self._get_one_import(_from, _import, _as, glob) + + def _handle_wrapped_import(self, _from, _import, _as, glob): + """Given the name of a module, "import" a mocked package into the glob + iff the module is one that we wrap (either for the sandbox or for the + purpose of testing). Applies if the wrapped module is exposed by an + attribute of `self`. + + For example, if the import statement is `from os import environ`, then + this function will set + glob['environ'] = self._wrapped_os.environ. + + Iff this function handles the given import, return True. + """ + module = (_from or _import).split(".")[0] + attr = "_wrapped_" + module + wrapped = getattr(self, attr, None) + if wrapped: + if _as or _from: + obj = self._recursively_get_property( + module, (_from + "." if _from else "") + _import, wrapped + ) + glob[_as or _import] = obj + else: + glob[module] = wrapped + return True + else: + return False + + def _recursively_get_property(self, module, what, wrapped): + """Traverse the wrapper object `wrapped` (which represents the module + `module`) and return the property represented by `what`, which may be a + series of nested attributes. + + For example, if `module` is 'os' and `what` is 'os.path.join', + return `wrapped.path.join`. + """ + if what == module: + return wrapped + assert what.startswith(module + ".") + attrs = what[len(module + ".") :].split(".") + for attr in attrs: + wrapped = getattr(wrapped, attr) + return wrapped + + @memoized_property + def _wrapped_os(self): + wrapped_os = {} + exec_("from os import *", {}, wrapped_os) + # Special case os and os.environ so that os.environ is our copy of + # the environment. + wrapped_os["environ"] = self._environ + # Also override some os.path functions with ours. + wrapped_path = {} + exec_("from os.path import *", {}, wrapped_path) + wrapped_path.update(self.OS.path.__dict__) + wrapped_os["path"] = ReadOnlyNamespace(**wrapped_path) + return ReadOnlyNamespace(**wrapped_os) + + @memoized_property + def _wrapped_subprocess(self): + wrapped_subprocess = {} + exec_("from subprocess import *", {}, wrapped_subprocess) + + def wrap(function): + def wrapper(*args, **kwargs): + if kwargs.get("env") is None and self._environ: + kwargs["env"] = dict(self._environ) + + return function(*args, **kwargs) + + return wrapper + + for f in ("call", "check_call", "check_output", "Popen", "run"): + # `run` is new to python 3.5. In case this still runs from python2 + # code, avoid failing here. + if f in wrapped_subprocess: + wrapped_subprocess[f] = wrap(wrapped_subprocess[f]) + + return ReadOnlyNamespace(**wrapped_subprocess) + + @memoized_property + def _wrapped_six(self): + if six.PY3: + return six + wrapped_six = {} + exec_("from six import *", {}, wrapped_six) + wrapped_six_moves = {} + exec_("from six.moves import *", {}, wrapped_six_moves) + wrapped_six_moves_builtins = {} + exec_("from six.moves.builtins import *", {}, wrapped_six_moves_builtins) + + # Special case for the open() builtin, because otherwise, using it + # fails with "IOError: file() constructor not accessible in + # restricted mode". We also make open() look more like python 3's, + # decoding to unicode strings unless the mode says otherwise. + def wrapped_open(name, mode=None, buffering=None): + args = (name,) + kwargs = {} + if buffering is not None: + kwargs["buffering"] = buffering + if mode is not None: + args += (mode,) + if "b" in mode: + return open(*args, **kwargs) + kwargs["encoding"] = system_encoding + return codecs.open(*args, **kwargs) + + wrapped_six_moves_builtins["open"] = wrapped_open + wrapped_six_moves["builtins"] = ReadOnlyNamespace(**wrapped_six_moves_builtins) + wrapped_six["moves"] = ReadOnlyNamespace(**wrapped_six_moves) + + return ReadOnlyNamespace(**wrapped_six) + + def _get_one_import(self, _from, _import, _as, glob): + """Perform the given import, placing the result into the dict glob.""" + if not _from and _import == "__builtin__": + glob[_as or "__builtin__"] = __builtin__ + return + if _from == "__builtin__": + _from = "six.moves.builtins" + # The special `__sandbox__` module gives access to the sandbox + # instance. + if not _from and _import == "__sandbox__": + glob[_as or _import] = self + return + if self._handle_wrapped_import(_from, _import, _as, glob): + return + # If we've gotten this far, we should just do a normal import. + # Until this proves to be a performance problem, just construct an + # import statement and execute it. + import_line = "%simport %s%s" % ( + ("from %s " % _from) if _from else "", + _import, + (" as %s" % _as) if _as else "", + ) + exec_(import_line, {}, glob) + + def _resolve_and_set(self, data, name, value, when=None): + # Don't set anything when --help was on the command line + if self._help: + return + if when and not self._value_for(when): + return + name = self._resolve(name) + if name is None: + return + if not isinstance(name, six.string_types): + raise TypeError("Unexpected type: '%s'" % type(name).__name__) + if name in data: + raise ConfigureError( + "Cannot add '%s' to configuration: Key already " "exists" % name + ) + value = self._resolve(value) + if value is not None: + if self._logger.isEnabledFor(TRACE): + if data is self._config: + self._logger.log(TRACE, "set_config(%s, %r)", name, value) + elif data is self._config.get("DEFINES"): + self._logger.log(TRACE, "set_define(%s, %r)", name, value) + data[name] = value + + def set_config_impl(self, name, value, when=None): + """Implementation of set_config(). + Set the configuration items with the given name to the given value. + Both `name` and `value` can be references to @depends functions, + in which case the result from these functions is used. If the result + of either function is None, the configuration item is not set. + """ + when = self._normalize_when(when, "set_config") + + self._execution_queue.append( + (self._resolve_and_set, (self._config, name, value, when)) + ) + + def set_define_impl(self, name, value, when=None): + """Implementation of set_define(). + Set the define with the given name to the given value. Both `name` and + `value` can be references to @depends functions, in which case the + result from these functions is used. If the result of either function + is None, the define is not set. If the result is False, the define is + explicitly undefined (-U). + """ + when = self._normalize_when(when, "set_define") + + defines = self._config.setdefault("DEFINES", {}) + self._execution_queue.append( + (self._resolve_and_set, (defines, name, value, when)) + ) + + def imply_option_impl(self, option, value, reason=None, when=None): + """Implementation of imply_option(). + Injects additional options as if they had been passed on the command + line. The `option` argument is a string as in option()'s `name` or + `env`. The option must be declared after `imply_option` references it. + The `value` argument indicates the value to pass to the option. + It can be: + - True. In this case `imply_option` injects the positive option + + (--enable-foo/--with-foo). + imply_option('--enable-foo', True) + imply_option('--disable-foo', True) + + are both equivalent to `--enable-foo` on the command line. + + - False. In this case `imply_option` injects the negative option + + (--disable-foo/--without-foo). + imply_option('--enable-foo', False) + imply_option('--disable-foo', False) + + are both equivalent to `--disable-foo` on the command line. + + - None. In this case `imply_option` does nothing. + imply_option('--enable-foo', None) + imply_option('--disable-foo', None) + + are both equivalent to not passing any flag on the command line. + + - a string or a tuple. In this case `imply_option` injects the positive + option with the given value(s). + + imply_option('--enable-foo', 'a') + imply_option('--disable-foo', 'a') + + are both equivalent to `--enable-foo=a` on the command line. + imply_option('--enable-foo', ('a', 'b')) + imply_option('--disable-foo', ('a', 'b')) + + are both equivalent to `--enable-foo=a,b` on the command line. + + Because imply_option('--disable-foo', ...) can be misleading, it is + recommended to use the positive form ('--enable' or '--with') for + `option`. + + The `value` argument can also be (and usually is) a reference to a + @depends function, in which case the result of that function will be + used as per the descripted mapping above. + + The `reason` argument indicates what caused the option to be implied. + It is necessary when it cannot be inferred from the `value`. + """ + + when = self._normalize_when(when, "imply_option") + + # Don't do anything when --help was on the command line + if self._help: + return + if not reason and isinstance(value, SandboxDependsFunction): + deps = self._depends[value].dependencies + possible_reasons = [d for d in deps if d != self._help_option] + if len(possible_reasons) == 1: + if isinstance(possible_reasons[0], Option): + reason = possible_reasons[0] + if not reason and ( + isinstance(value, (bool, tuple)) or isinstance(value, six.string_types) + ): + # A reason can be provided automatically when imply_option + # is called with an immediate value. + _, filename, line, _, _, _ = inspect.stack()[1] + reason = "imply_option at %s:%s" % (filename, line) + + if not reason: + raise ConfigureError( + "Cannot infer what implies '%s'. Please add a `reason` to " + "the `imply_option` call." % option + ) + + prefix, name, values = Option.split_option(option) + if values != (): + raise ConfigureError("Implied option must not contain an '='") + + self._implied_options.append( + ReadOnlyNamespace( + option=option, + prefix=prefix, + name=name, + value=value, + caller=inspect.stack()[1], + reason=reason, + when=when, + ) + ) + + def _prepare_function(self, func, update_globals=None): + """Alter the given function global namespace with the common ground + for @depends, and @template. + """ + if not inspect.isfunction(func): + raise TypeError("Unexpected type: '%s'" % type(func).__name__) + if func in self._prepared_functions: + return func + + glob = SandboxedGlobal( + (k, v) + for k, v in six.iteritems(func.__globals__) + if (inspect.isfunction(v) and v not in self._templates) + or (inspect.isclass(v) and issubclass(v, Exception)) + ) + glob.update( + __builtins__=self.BUILTINS, + __file__=self._paths[-1] if self._paths else "", + __name__=self._paths[-1] if self._paths else "", + os=self.OS, + log=self.log_impl, + namespace=ReadOnlyNamespace, + ) + if update_globals: + update_globals(glob) + + # The execution model in the sandbox doesn't guarantee the execution + # order will always be the same for a given function, and if it uses + # variables from a closure that are changed after the function is + # declared, depending when the function is executed, the value of the + # variable can differ. For consistency, we force the function to use + # the value from the earliest it can be run, which is at declaration. + # Note this is not entirely bullet proof (if the value is e.g. a list, + # the list contents could have changed), but covers the bases. + closure = None + if func.__closure__: + + def makecell(content): + def f(): + content + + return f.__closure__[0] + + closure = tuple(makecell(cell.cell_contents) for cell in func.__closure__) + + new_func = self.wraps(func)( + types.FunctionType( + func.__code__, glob, func.__name__, func.__defaults__, closure + ) + ) + + @self.wraps(new_func) + def wrapped(*args, **kwargs): + if func in self._imports: + self._apply_imports(func, glob) + return new_func(*args, **kwargs) + + self._prepared_functions.add(wrapped) + return wrapped diff --git a/python/mozbuild/mozbuild/configure/check_debug_ranges.py b/python/mozbuild/mozbuild/configure/check_debug_ranges.py new file mode 100644 index 0000000000..f82624c14f --- /dev/null +++ b/python/mozbuild/mozbuild/configure/check_debug_ranges.py @@ -0,0 +1,68 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# This script returns the number of items for the DW_AT_ranges corresponding +# to a given compilation unit. This is used as a helper to find a bug in some +# versions of GNU ld. + +import re +import subprocess +import sys + + +def get_range_for(compilation_unit, debug_info): + """Returns the range offset for a given compilation unit + in a given debug_info.""" + name = ranges = "" + search_cu = False + for nfo in debug_info.splitlines(): + if "DW_TAG_compile_unit" in nfo: + search_cu = True + elif "DW_TAG_" in nfo or not nfo.strip(): + if name == compilation_unit and ranges != "": + return int(ranges, 16) + name = ranges = "" + search_cu = False + if search_cu: + if "DW_AT_name" in nfo: + name = nfo.rsplit(None, 1)[1] + elif "DW_AT_ranges" in nfo: + ranges = nfo.rsplit(None, 1)[1] + return None + + +def get_range_length(range, debug_ranges): + """Returns the number of items in the range starting at the + given offset.""" + length = 0 + for line in debug_ranges.splitlines(): + m = re.match("\s*([0-9a-fA-F]+)\s+([0-9a-fA-F]+)\s+([0-9a-fA-F]+)", line) + if m and int(m.group(1), 16) == range: + length += 1 + return length + + +def main(bin, compilation_unit): + p = subprocess.Popen( + ["objdump", "-W", bin], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + ) + (out, err) = p.communicate() + sections = re.split("\n(Contents of the|The section) ", out) + debug_info = [s for s in sections if s.startswith(".debug_info")] + debug_ranges = [s for s in sections if s.startswith(".debug_ranges")] + if not debug_ranges or not debug_info: + return 0 + + range = get_range_for(compilation_unit, debug_info[0]) + if range is not None: + return get_range_length(range, debug_ranges[0]) + + return -1 + + +if __name__ == "__main__": + print(main(*sys.argv[1:])) diff --git a/python/mozbuild/mozbuild/configure/constants.py b/python/mozbuild/mozbuild/configure/constants.py new file mode 100644 index 0000000000..a36152651d --- /dev/null +++ b/python/mozbuild/mozbuild/configure/constants.py @@ -0,0 +1,131 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from collections import OrderedDict + +from mozbuild.util import EnumString + +CompilerType = EnumString.subclass( + "clang", + "clang-cl", + "gcc", + "msvc", +) + +OS = EnumString.subclass( + "Android", + "DragonFly", + "FreeBSD", + "GNU", + "NetBSD", + "OpenBSD", + "OSX", + "SunOS", + "WINNT", + "WASI", +) + +Kernel = EnumString.subclass( + "Darwin", + "DragonFly", + "FreeBSD", + "kFreeBSD", + "Linux", + "NetBSD", + "OpenBSD", + "SunOS", + "WINNT", + "WASI", +) + +CPU_bitness = { + "aarch64": 64, + "Alpha": 64, + "arm": 32, + "hppa": 32, + "ia64": 64, + "loongarch64": 64, + "m68k": 32, + "mips32": 32, + "mips64": 64, + "ppc": 32, + "ppc64": 64, + "riscv64": 64, + "s390": 32, + "s390x": 64, + "sh4": 32, + "sparc": 32, + "sparc64": 64, + "x86": 32, + "x86_64": 64, + "wasm32": 32, +} + +CPU = EnumString.subclass(*CPU_bitness.keys()) + +Endianness = EnumString.subclass( + "big", + "little", +) + +WindowsBinaryType = EnumString.subclass( + "win32", + "win64", +) + +Abi = EnumString.subclass( + "msvc", + "mingw", +) + +# The order of those checks matter +CPU_preprocessor_checks = OrderedDict( + ( + ("x86", "__i386__ || _M_IX86"), + ("x86_64", "__x86_64__ || _M_X64"), + ("arm", "__arm__ || _M_ARM"), + ("aarch64", "__aarch64__ || _M_ARM64"), + ("ia64", "__ia64__"), + ("s390x", "__s390x__"), + ("s390", "__s390__"), + ("ppc64", "__powerpc64__"), + ("ppc", "__powerpc__"), + ("Alpha", "__alpha__"), + ("hppa", "__hppa__"), + ("sparc64", "__sparc__ && __arch64__"), + ("sparc", "__sparc__"), + ("m68k", "__m68k__"), + ("mips64", "__mips64"), + ("mips32", "__mips__"), + ("riscv64", "__riscv && __riscv_xlen == 64"), + ("loongarch64", "__loongarch64"), + ("sh4", "__sh__"), + ("wasm32", "__wasm32__"), + ) +) + +assert sorted(CPU_preprocessor_checks.keys()) == sorted(CPU.POSSIBLE_VALUES) + +kernel_preprocessor_checks = { + "Darwin": "__APPLE__", + "DragonFly": "__DragonFly__", + "FreeBSD": "__FreeBSD__", + "kFreeBSD": "__FreeBSD_kernel__", + "Linux": "__linux__", + "NetBSD": "__NetBSD__", + "OpenBSD": "__OpenBSD__", + "SunOS": "__sun__", + "WINNT": "_WIN32 || __CYGWIN__", + "WASI": "__wasi__", +} + +assert sorted(kernel_preprocessor_checks.keys()) == sorted(Kernel.POSSIBLE_VALUES) + +OS_preprocessor_checks = { + "Android": "__ANDROID__", +} + +# We intentionally don't include all possible OSes in our checks, because we +# only care about OS mismatches for specific target OSes. +# assert sorted(OS_preprocessor_checks.keys()) == sorted(OS.POSSIBLE_VALUES) diff --git a/python/mozbuild/mozbuild/configure/help.py b/python/mozbuild/mozbuild/configure/help.py new file mode 100644 index 0000000000..bfd5e6ad6d --- /dev/null +++ b/python/mozbuild/mozbuild/configure/help.py @@ -0,0 +1,90 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import re +from collections import defaultdict + +from mozbuild.configure.options import Option + + +class HelpFormatter(object): + def __init__(self, argv0): + self.intro = ["Usage: %s [options]" % os.path.basename(argv0)] + self.options = [] + + def add(self, option): + assert isinstance(option, Option) + if option.possible_origins == ("implied",): + # Don't display help if our option can only be implied. + return + self.options.append(option) + + def format_options_by_category(self, options_by_category): + ret = [] + for category, options in sorted( + options_by_category.items(), key=lambda x: x[0] + ): + ret.append(" " + category + ":") + for option in sorted(options, key=lambda opt: opt.option): + opt = option.option + if option.choices: + opt += "={%s}" % ",".join(option.choices) + help = self.format_help(option) + if len(option.default): + if help: + help += " " + help += "[%s]" % ",".join(option.default) + + if len(opt) > 24 or not help: + ret.append(" %s" % opt) + if help: + ret.append("%s%s" % (" " * 30, help)) + else: + ret.append(" %-24s %s" % (opt, help)) + ret.append("") + return ret + + RE_FORMAT = re.compile(r"{([^|}]*)\|([^|}]*)}") + + # Return formatted help text for --{enable,disable,with,without}-* options. + # + # Format is the following syntax: + # {String for --enable or --with|String for --disable or --without} + # + # For example, '{Enable|Disable} optimizations' will be formatted to + # 'Enable optimizations' if the options's prefix is 'enable' or 'with', + # and formatted to 'Disable optimizations' if the options's prefix is + # 'disable' or 'without'. + def format_help(self, option): + if not option.help: + return "" + + if option.prefix in ("enable", "with"): + replacement = r"\1" + elif option.prefix in ("disable", "without"): + replacement = r"\2" + else: + return option.help + + return self.RE_FORMAT.sub(replacement, option.help) + + def usage(self, out): + options_by_category = defaultdict(list) + env_by_category = defaultdict(list) + for option in self.options: + target = options_by_category if option.name else env_by_category + target[option.category].append(option) + options_formatted = [ + "Options: [defaults in brackets after descriptions]" + ] + self.format_options_by_category(options_by_category) + env_formatted = ["Environment variables:"] + self.format_options_by_category( + env_by_category + ) + print( + "\n\n".join( + "\n".join(t) for t in (self.intro, options_formatted, env_formatted) + ), + file=out, + ) diff --git a/python/mozbuild/mozbuild/configure/lint.py b/python/mozbuild/mozbuild/configure/lint.py new file mode 100644 index 0000000000..7ea379b1ef --- /dev/null +++ b/python/mozbuild/mozbuild/configure/lint.py @@ -0,0 +1,348 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import inspect +import re +import types +from dis import Bytecode +from functools import wraps +from io import StringIO + +from mozbuild.util import memoize + +from . import ( + CombinedDependsFunction, + ConfigureError, + ConfigureSandbox, + DependsFunction, + SandboxDependsFunction, + SandboxedGlobal, + TrivialDependsFunction, +) +from .help import HelpFormatter + + +class LintSandbox(ConfigureSandbox): + def __init__(self, environ=None, argv=None, stdout=None, stderr=None): + out = StringIO() + stdout = stdout or out + stderr = stderr or out + environ = environ or {} + argv = argv or [] + self._wrapped = {} + self._has_imports = set() + self._bool_options = [] + self._bool_func_options = [] + self.LOG = "" + super(LintSandbox, self).__init__( + {}, environ=environ, argv=argv, stdout=stdout, stderr=stderr + ) + + def run(self, path=None): + if path: + self.include_file(path) + + for dep in self._depends.values(): + self._check_dependencies(dep) + + def _raise_from(self, exception, obj, line=0): + """ + Raises the given exception as if it were emitted from the given + location. + + The location is determined from the values of obj and line. + - `obj` can be a function or DependsFunction, in which case + `line` corresponds to the line within the function the exception + will be raised from (as an offset from the function's firstlineno). + - `obj` can be a stack frame, in which case `line` is ignored. + """ + + def thrower(e): + raise e + + if isinstance(obj, DependsFunction): + obj, _ = self.unwrap(obj._func) + + if inspect.isfunction(obj): + funcname = obj.__name__ + filename = obj.__code__.co_filename + firstline = obj.__code__.co_firstlineno + line += firstline + elif inspect.isframe(obj): + funcname = obj.f_code.co_name + filename = obj.f_code.co_filename + firstline = obj.f_code.co_firstlineno + line = obj.f_lineno + else: + # Don't know how to handle the given location, still raise the + # exception. + raise exception + + # Create a new function from the above thrower that pretends + # the `def` line is on the first line of the function given as + # argument, and the `raise` line is on the line given as argument. + + offset = line - firstline + # co_lnotab is a string where each pair of consecutive character is + # (chr(byte_increment), chr(line_increment)), mapping bytes in co_code + # to line numbers relative to co_firstlineno. + # If the offset we need to encode is larger than what fits in a 8-bit + # signed integer, we need to split it. + co_lnotab = bytes([0, 127] * (offset // 127) + [0, offset % 127]) + code = thrower.__code__ + codetype_args = [ + code.co_argcount, + code.co_kwonlyargcount, + code.co_nlocals, + code.co_stacksize, + code.co_flags, + code.co_code, + code.co_consts, + code.co_names, + code.co_varnames, + filename, + funcname, + firstline, + co_lnotab, + ] + if hasattr(code, "co_posonlyargcount"): + # co_posonlyargcount was introduced in Python 3.8. + codetype_args.insert(1, code.co_posonlyargcount) + + code = types.CodeType(*codetype_args) + thrower = types.FunctionType( + code, + thrower.__globals__, + funcname, + thrower.__defaults__, + thrower.__closure__, + ) + thrower(exception) + + def _check_dependencies(self, obj): + if isinstance(obj, CombinedDependsFunction) or obj in ( + self._always, + self._never, + ): + return + if not inspect.isroutine(obj._func): + return + func, glob = self.unwrap(obj._func) + func_args = inspect.getfullargspec(func) + if func_args.varkw: + e = ConfigureError( + "Keyword arguments are not allowed in @depends functions" + ) + self._raise_from(e, func) + + all_args = list(func_args.args) + if func_args.varargs: + all_args.append(func_args.varargs) + used_args = set() + + for instr in Bytecode(func): + if instr.opname in ("LOAD_FAST", "LOAD_CLOSURE"): + if instr.argval in all_args: + used_args.add(instr.argval) + + for num, arg in enumerate(all_args): + if arg not in used_args: + dep = obj.dependencies[num] + if dep != self._help_option or not self._need_help_dependency(obj): + if isinstance(dep, DependsFunction): + dep = dep.name + else: + dep = dep.option + e = ConfigureError("The dependency on `%s` is unused" % dep) + self._raise_from(e, func) + + def _need_help_dependency(self, obj): + if isinstance(obj, (CombinedDependsFunction, TrivialDependsFunction)): + return False + if isinstance(obj, DependsFunction): + if obj in (self._always, self._never) or not inspect.isroutine(obj._func): + return False + func, glob = self.unwrap(obj._func) + # We allow missing --help dependencies for functions that: + # - don't use @imports + # - don't have a closure + # - don't use global variables + if func in self._has_imports or func.__closure__: + return True + for instr in Bytecode(func): + if instr.opname in ("LOAD_GLOBAL", "STORE_GLOBAL"): + # There is a fake os module when one is not imported, + # and it's allowed for functions without a --help + # dependency. + if instr.argval == "os" and glob.get("os") is self.OS: + continue + if instr.argval in self.BUILTINS: + continue + if instr.argval in "namespace": + continue + return True + return False + + def _missing_help_dependency(self, obj): + if isinstance(obj, DependsFunction) and self._help_option in obj.dependencies: + return False + return self._need_help_dependency(obj) + + @memoize + def _value_for_depends(self, obj): + with_help = self._help_option in obj.dependencies + if with_help: + for arg in obj.dependencies: + if self._missing_help_dependency(arg): + e = ConfigureError( + "Missing '--help' dependency because `%s` depends on " + "'--help' and `%s`" % (obj.name, arg.name) + ) + self._raise_from(e, arg) + elif self._missing_help_dependency(obj): + e = ConfigureError("Missing '--help' dependency") + self._raise_from(e, obj) + return super(LintSandbox, self)._value_for_depends(obj) + + def option_impl(self, *args, **kwargs): + result = super(LintSandbox, self).option_impl(*args, **kwargs) + when = self._conditions.get(result) + if when: + self._value_for(when) + + self._check_option(result, *args, **kwargs) + + return result + + def _check_option(self, option, *args, **kwargs): + if "default" not in kwargs: + return + if len(args) == 0: + return + + self._check_prefix_for_bool_option(*args, **kwargs) + self._check_help_for_option_with_func_default(option, *args, **kwargs) + + def _check_prefix_for_bool_option(self, *args, **kwargs): + name = args[0] + default = kwargs["default"] + + if type(default) != bool: + return + + table = { + True: { + "enable": "disable", + "with": "without", + }, + False: { + "disable": "enable", + "without": "with", + }, + } + for prefix, replacement in table[default].items(): + if name.startswith("--{}-".format(prefix)): + frame = inspect.currentframe() + while frame and frame.f_code.co_name != self.option_impl.__name__: + frame = frame.f_back + e = ConfigureError( + "{} should be used instead of " + "{} with default={}".format( + name.replace( + "--{}-".format(prefix), "--{}-".format(replacement) + ), + name, + default, + ) + ) + self._raise_from(e, frame.f_back if frame else None) + + def _check_help_for_option_with_func_default(self, option, *args, **kwargs): + default = kwargs["default"] + + if not isinstance(default, SandboxDependsFunction): + return + + if not option.prefix: + return + + default = self._resolve(default) + if type(default) is str: + return + + help = kwargs["help"] + match = re.search(HelpFormatter.RE_FORMAT, help) + if match: + return + + if option.prefix in ("enable", "disable"): + rule = "{Enable|Disable}" + else: + rule = "{With|Without}" + + frame = inspect.currentframe() + while frame and frame.f_code.co_name != self.option_impl.__name__: + frame = frame.f_back + e = ConfigureError( + '`help` should contain "{}" because of non-constant default'.format(rule) + ) + self._raise_from(e, frame.f_back if frame else None) + + def unwrap(self, func): + glob = func.__globals__ + while func in self._wrapped: + if isinstance(func.__globals__, SandboxedGlobal): + glob = func.__globals__ + func = self._wrapped[func] + return func, glob + + def wraps(self, func): + def do_wraps(wrapper): + self._wrapped[wrapper] = func + return wraps(func)(wrapper) + + return do_wraps + + def imports_impl(self, _import, _from=None, _as=None): + wrapper = super(LintSandbox, self).imports_impl(_import, _from=_from, _as=_as) + + def decorator(func): + self._has_imports.add(func) + return wrapper(func) + + return decorator + + def _prepare_function(self, func, update_globals=None): + wrapped = super(LintSandbox, self)._prepare_function(func, update_globals) + _, glob = self.unwrap(wrapped) + imports = set() + for _from, _import, _as in self._imports.get(func, ()): + if _as: + imports.add(_as) + else: + what = _import.split(".")[0] + imports.add(what) + if _from == "__builtin__" and _import in glob["__builtins__"]: + e = NameError( + "builtin '{}' doesn't need to be imported".format(_import) + ) + self._raise_from(e, func) + for instr in Bytecode(func): + code = func.__code__ + if ( + instr.opname == "LOAD_GLOBAL" + and instr.argval not in glob + and instr.argval not in imports + and instr.argval not in glob["__builtins__"] + and instr.argval not in code.co_varnames[: code.co_argcount] + ): + # Raise the same kind of error as what would happen during + # execution. + e = NameError("global name '{}' is not defined".format(instr.argval)) + if instr.starts_line is None: + self._raise_from(e, func) + else: + self._raise_from(e, func, instr.starts_line - code.co_firstlineno) + + return wrapped diff --git a/python/mozbuild/mozbuild/configure/options.py b/python/mozbuild/mozbuild/configure/options.py new file mode 100644 index 0000000000..cc3b4516ea --- /dev/null +++ b/python/mozbuild/mozbuild/configure/options.py @@ -0,0 +1,614 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import inspect +import os +import sys +from collections import OrderedDict + +import six + +HELP_OPTIONS_CATEGORY = "Help options" +# List of whitelisted option categories. If you want to add a new category, +# simply add it to this list; however, exercise discretion as +# "./configure --help" becomes less useful if there are an excessive number of +# categories. +_ALL_CATEGORIES = (HELP_OPTIONS_CATEGORY,) + + +def _infer_option_category(define_depth): + stack_frame = inspect.stack(0)[3 + define_depth] + try: + path = os.path.relpath(stack_frame[0].f_code.co_filename) + except ValueError: + # If this call fails, it means the relative path couldn't be determined + # (e.g. because this file is on a different drive than the cwd on a + # Windows machine). That's fine, just use the absolute filename. + path = stack_frame[0].f_code.co_filename + return "Options from " + path + + +def istupleofstrings(obj): + return ( + isinstance(obj, tuple) + and len(obj) + and all(isinstance(o, six.string_types) for o in obj) + ) + + +class OptionValue(tuple): + """Represents the value of a configure option. + + This class is not meant to be used directly. Use its subclasses instead. + + The `origin` attribute holds where the option comes from (e.g. environment, + command line, or default) + """ + + def __new__(cls, values=(), origin="unknown"): + return super(OptionValue, cls).__new__(cls, values) + + def __init__(self, values=(), origin="unknown"): + self.origin = origin + + def format(self, option): + if option.startswith("--"): + prefix, name, values = Option.split_option(option) + assert values == () + for prefix_set in ( + ("disable", "enable"), + ("without", "with"), + ): + if prefix in prefix_set: + prefix = prefix_set[int(bool(self))] + break + if prefix: + option = "--%s-%s" % (prefix, name) + elif self: + option = "--%s" % name + else: + return "" + if len(self): + return "%s=%s" % (option, ",".join(self)) + return option + elif self and not len(self): + return "%s=1" % option + return "%s=%s" % (option, ",".join(self)) + + def __eq__(self, other): + # This is to catch naive comparisons against strings and other + # types in moz.configure files, as it is really easy to write + # value == 'foo'. We only raise a TypeError for instances that + # have content, because value-less instances (like PositiveOptionValue + # and NegativeOptionValue) are common and it is trivial to + # compare these. + if not isinstance(other, tuple) and len(self): + raise TypeError( + "cannot compare a populated %s against an %s; " + "OptionValue instances are tuples - did you mean to " + "compare against member elements using [x]?" + % (type(other).__name__, type(self).__name__) + ) + + # Allow explicit tuples to be compared. + if type(other) == tuple: + return tuple.__eq__(self, other) + elif isinstance(other, bool): + return bool(self) == other + # Else we're likely an OptionValue class. + elif type(other) != type(self): + return False + else: + return super(OptionValue, self).__eq__(other) + + def __ne__(self, other): + return not self.__eq__(other) + + def __repr__(self): + return "%s%s" % (self.__class__.__name__, super(OptionValue, self).__repr__()) + + @staticmethod + def from_(value): + if isinstance(value, OptionValue): + return value + elif value is True: + return PositiveOptionValue() + elif value is False or value == (): + return NegativeOptionValue() + elif isinstance(value, six.string_types): + return PositiveOptionValue((value,)) + elif isinstance(value, tuple): + return PositiveOptionValue(value) + else: + raise TypeError("Unexpected type: '%s'" % type(value).__name__) + + +class PositiveOptionValue(OptionValue): + """Represents the value for a positive option (--enable/--with/--foo) + in the form of a tuple for when values are given to the option (in the form + --option=value[,value2...]. + """ + + def __nonzero__(self): # py2 + return True + + def __bool__(self): # py3 + return True + + +class NegativeOptionValue(OptionValue): + """Represents the value for a negative option (--disable/--without) + + This is effectively an empty tuple with a `origin` attribute. + """ + + def __new__(cls, origin="unknown"): + return super(NegativeOptionValue, cls).__new__(cls, origin=origin) + + def __init__(self, origin="unknown"): + super(NegativeOptionValue, self).__init__(origin=origin) + + +class InvalidOptionError(Exception): + pass + + +class ConflictingOptionError(InvalidOptionError): + def __init__(self, message, **format_data): + if format_data: + message = message.format(**format_data) + super(ConflictingOptionError, self).__init__(message) + for k, v in six.iteritems(format_data): + setattr(self, k, v) + + +class Option(object): + """Represents a configure option + + A configure option can be a command line flag or an environment variable + or both. + + - `name` is the full command line flag (e.g. --enable-foo). + - `env` is the environment variable name (e.g. ENV) + - `nargs` is the number of arguments the option may take. It can be a + number or the special values '?' (0 or 1), '*' (0 or more), or '+' (1 or + more). + - `default` can be used to give a default value to the option. When the + `name` of the option starts with '--enable-' or '--with-', the implied + default is an empty PositiveOptionValue. When it starts with '--disable-' + or '--without-', the implied default is a NegativeOptionValue. + - `choices` restricts the set of values that can be given to the option. + - `help` is the option description for use in the --help output. + - `possible_origins` is a tuple of strings that are origins accepted for + this option. Example origins are 'mozconfig', 'implied', and 'environment'. + - `category` is a human-readable string used only for categorizing command- + line options when displaying the output of `configure --help`. If not + supplied, the script will attempt to infer an appropriate category based + on the name of the file where the option was defined. If supplied it must + be in the _ALL_CATEGORIES list above. + - `define_depth` should generally only be used by templates that are used + to instantiate an option indirectly. Set this to a positive integer to + force the script to look into a deeper stack frame when inferring the + `category`. + """ + + __slots__ = ( + "id", + "prefix", + "name", + "env", + "nargs", + "default", + "choices", + "help", + "possible_origins", + "category", + "define_depth", + ) + + def __init__( + self, + name=None, + env=None, + nargs=None, + default=None, + possible_origins=None, + choices=None, + category=None, + help=None, + define_depth=0, + ): + if not name and not env: + raise InvalidOptionError( + "At least an option name or an environment variable name must " + "be given" + ) + if name: + if not isinstance(name, six.string_types): + raise InvalidOptionError("Option must be a string") + if not name.startswith("--"): + raise InvalidOptionError("Option must start with `--`") + if "=" in name: + raise InvalidOptionError("Option must not contain an `=`") + if not name.islower(): + raise InvalidOptionError("Option must be all lowercase") + if env: + if not isinstance(env, six.string_types): + raise InvalidOptionError("Environment variable name must be a string") + if not env.isupper(): + raise InvalidOptionError( + "Environment variable name must be all uppercase" + ) + if nargs not in (None, "?", "*", "+") and not ( + isinstance(nargs, int) and nargs >= 0 + ): + raise InvalidOptionError( + "nargs must be a positive integer, '?', '*' or '+'" + ) + if ( + not isinstance(default, six.string_types) + and not isinstance(default, (bool, type(None))) + and not istupleofstrings(default) + ): + raise InvalidOptionError( + "default must be a bool, a string or a tuple of strings" + ) + if choices and not istupleofstrings(choices): + raise InvalidOptionError("choices must be a tuple of strings") + if category and not isinstance(category, six.string_types): + raise InvalidOptionError("Category must be a string") + if category and category not in _ALL_CATEGORIES: + raise InvalidOptionError( + "Category must either be inferred or in the _ALL_CATEGORIES " + "list in options.py: %s" % ", ".join(_ALL_CATEGORIES) + ) + if not isinstance(define_depth, int): + raise InvalidOptionError("DefineDepth must be an integer") + if not help: + raise InvalidOptionError("A help string must be provided") + if possible_origins and not istupleofstrings(possible_origins): + raise InvalidOptionError("possible_origins must be a tuple of strings") + self.possible_origins = possible_origins + + if name: + prefix, name, values = self.split_option(name) + assert values == () + + # --disable and --without options mean the default is enabled. + # --enable and --with options mean the default is disabled. + # However, we allow a default to be given so that the default + # can be affected by other factors. + if prefix: + if default is None: + default = prefix in ("disable", "without") + elif default is False: + prefix = { + "disable": "enable", + "without": "with", + }.get(prefix, prefix) + elif default is True: + prefix = { + "enable": "disable", + "with": "without", + }.get(prefix, prefix) + else: + prefix = "" + + self.prefix = prefix + self.name = name + self.env = env + if default in (None, False): + self.default = NegativeOptionValue(origin="default") + elif isinstance(default, tuple): + self.default = PositiveOptionValue(default, origin="default") + elif default is True: + self.default = PositiveOptionValue(origin="default") + else: + self.default = PositiveOptionValue((default,), origin="default") + if nargs is None: + nargs = 0 + if len(self.default) == 1: + nargs = "?" + elif len(self.default) > 1: + nargs = "*" + elif choices: + nargs = 1 + self.nargs = nargs + has_choices = choices is not None + if isinstance(self.default, PositiveOptionValue): + if has_choices and len(self.default) == 0: + raise InvalidOptionError( + "A `default` must be given along with `choices`" + ) + if not self._validate_nargs(len(self.default)): + raise InvalidOptionError("The given `default` doesn't satisfy `nargs`") + if has_choices and not all(d in choices for d in self.default): + raise InvalidOptionError( + "The `default` value must be one of %s" + % ", ".join("'%s'" % c for c in choices) + ) + elif has_choices: + maxargs = self.maxargs + if len(choices) < maxargs and maxargs != sys.maxsize: + raise InvalidOptionError("Not enough `choices` for `nargs`") + self.choices = choices + self.help = help + self.category = category or _infer_option_category(define_depth) + + @staticmethod + def split_option(option): + """Split a flag or variable into a prefix, a name and values + + Variables come in the form NAME=values (no prefix). + Flags come in the form --name=values or --prefix-name=values + where prefix is one of 'with', 'without', 'enable' or 'disable'. + The '=values' part is optional. Values are separated with commas. + """ + if not isinstance(option, six.string_types): + raise InvalidOptionError("Option must be a string") + + elements = option.split("=", 1) + name = elements[0] + values = tuple(elements[1].split(",")) if len(elements) == 2 else () + if name.startswith("--"): + name = name[2:] + if not name.islower(): + raise InvalidOptionError("Option must be all lowercase") + elements = name.split("-", 1) + prefix = elements[0] + if len(elements) == 2 and prefix in ( + "enable", + "disable", + "with", + "without", + ): + return prefix, elements[1], values + else: + if name.startswith("-"): + raise InvalidOptionError( + "Option must start with two dashes instead of one" + ) + if name.islower(): + raise InvalidOptionError( + 'Environment variable name "%s" must be all uppercase' % name + ) + return "", name, values + + @staticmethod + def _join_option(prefix, name): + # The constraints around name and env in __init__ make it so that + # we can distinguish between flags and environment variables with + # islower/isupper. + if name.isupper(): + assert not prefix + return name + elif prefix: + return "--%s-%s" % (prefix, name) + return "--%s" % name + + @property + def option(self): + if self.prefix or self.name: + return self._join_option(self.prefix, self.name) + else: + return self.env + + @property + def minargs(self): + if isinstance(self.nargs, int): + return self.nargs + return 1 if self.nargs == "+" else 0 + + @property + def maxargs(self): + if isinstance(self.nargs, int): + return self.nargs + return 1 if self.nargs == "?" else sys.maxsize + + def _validate_nargs(self, num): + minargs, maxargs = self.minargs, self.maxargs + return num >= minargs and num <= maxargs + + def get_value(self, option=None, origin="unknown"): + """Given a full command line option (e.g. --enable-foo=bar) or a + variable assignment (FOO=bar), returns the corresponding OptionValue. + + Note: variable assignments can come from either the environment or + from the command line (e.g. `../configure CFLAGS=-O2`) + """ + if not option: + return self.default + + if self.possible_origins and origin not in self.possible_origins: + raise InvalidOptionError( + "%s can not be set by %s. Values are accepted from: %s" + % (option, origin, ", ".join(self.possible_origins)) + ) + + prefix, name, values = self.split_option(option) + option = self._join_option(prefix, name) + + assert name in (self.name, self.env) + + if prefix in ("disable", "without"): + if values != (): + raise InvalidOptionError("Cannot pass a value to %s" % option) + return NegativeOptionValue(origin=origin) + + if name == self.env: + if values == ("",): + return NegativeOptionValue(origin=origin) + if self.nargs in (0, "?", "*") and values == ("1",): + return PositiveOptionValue(origin=origin) + + values = PositiveOptionValue(values, origin=origin) + + if not self._validate_nargs(len(values)): + raise InvalidOptionError( + "%s takes %s value%s" + % ( + option, + { + "?": "0 or 1", + "*": "0 or more", + "+": "1 or more", + }.get(self.nargs, str(self.nargs)), + "s" if (not isinstance(self.nargs, int) or self.nargs != 1) else "", + ) + ) + + if len(values) and self.choices: + relative_result = None + for val in values: + if self.nargs in ("+", "*"): + if val.startswith(("+", "-")): + if relative_result is None: + relative_result = list(self.default) + sign = val[0] + val = val[1:] + if sign == "+": + if val not in relative_result: + relative_result.append(val) + else: + try: + relative_result.remove(val) + except ValueError: + pass + + if val not in self.choices: + raise InvalidOptionError( + "'%s' is not one of %s" + % (val, ", ".join("'%s'" % c for c in self.choices)) + ) + + if relative_result is not None: + values = PositiveOptionValue(relative_result, origin=origin) + + return values + + def __repr__(self): + return "<%s [%s]>" % (self.__class__.__name__, self.option) + + +class CommandLineHelper(object): + """Helper class to handle the various ways options can be given either + on the command line of through the environment. + + For instance, an Option('--foo', env='FOO') can be passed as --foo on the + command line, or as FOO=1 in the environment *or* on the command line. + + If multiple variants are given, command line is prefered over the + environment, and if different values are given on the command line, the + last one wins. (This mimicks the behavior of autoconf, avoiding to break + existing mozconfigs using valid options in weird ways) + + Extra options can be added afterwards through API calls. For those, + conflicting values will raise an exception. + """ + + def __init__(self, environ=os.environ, argv=sys.argv): + self._environ = dict(environ) + self._args = OrderedDict() + self._extra_args = OrderedDict() + self._origins = {} + self._last = 0 + + assert argv and not argv[0].startswith("--") + for arg in argv[1:]: + self.add(arg, "command-line", self._args) + + def add(self, arg, origin="command-line", args=None): + assert origin != "default" + prefix, name, values = Option.split_option(arg) + if args is None: + args = self._extra_args + if args is self._extra_args and name in self._extra_args: + old_arg = self._extra_args[name][0] + old_prefix, _, old_values = Option.split_option(old_arg) + if prefix != old_prefix or values != old_values: + raise ConflictingOptionError( + "Cannot add '{arg}' to the {origin} set because it " + "conflicts with '{old_arg}' that was added earlier", + arg=arg, + origin=origin, + old_arg=old_arg, + old_origin=self._origins[old_arg], + ) + self._last += 1 + args[name] = arg, self._last + self._origins[arg] = origin + + def _prepare(self, option, args): + arg = None + origin = "command-line" + from_name = args.get(option.name) + from_env = args.get(option.env) + if from_name and from_env: + arg1, pos1 = from_name + arg2, pos2 = from_env + arg, pos = (arg1, pos1) if abs(pos1) > abs(pos2) else (arg2, pos2) + if args is self._extra_args and ( + option.get_value(arg1) != option.get_value(arg2) + ): + origin = self._origins[arg] + old_arg = arg2 if abs(pos1) > abs(pos2) else arg1 + raise ConflictingOptionError( + "Cannot add '{arg}' to the {origin} set because it " + "conflicts with '{old_arg}' that was added earlier", + arg=arg, + origin=origin, + old_arg=old_arg, + old_origin=self._origins[old_arg], + ) + elif from_name or from_env: + arg, pos = from_name if from_name else from_env + elif option.env and args is self._args: + env = self._environ.get(option.env) + if env is not None: + arg = "%s=%s" % (option.env, env) + origin = "environment" + + origin = self._origins.get(arg, origin) + + for k in (option.name, option.env): + try: + del args[k] + except KeyError: + pass + + return arg, origin + + def handle(self, option): + """Return the OptionValue corresponding to the given Option instance, + depending on the command line, environment, and extra arguments, and + the actual option or variable that set it. + Only works once for a given Option. + """ + assert isinstance(option, Option) + + arg, origin = self._prepare(option, self._args) + ret = option.get_value(arg, origin) + + extra_arg, extra_origin = self._prepare(option, self._extra_args) + extra_ret = option.get_value(extra_arg, extra_origin) + + if extra_ret.origin == "default": + return ret, arg + + if ret.origin != "default" and extra_ret != ret: + raise ConflictingOptionError( + "Cannot add '{arg}' to the {origin} set because it conflicts " + "with {old_arg} from the {old_origin} set", + arg=extra_arg, + origin=extra_ret.origin, + old_arg=arg, + old_origin=ret.origin, + ) + + return extra_ret, extra_arg + + def __iter__(self): + for d in (self._args, self._extra_args): + for arg, pos in six.itervalues(d): + yield arg diff --git a/python/mozbuild/mozbuild/configure/util.py b/python/mozbuild/mozbuild/configure/util.py new file mode 100644 index 0000000000..a58dc4d3f4 --- /dev/null +++ b/python/mozbuild/mozbuild/configure/util.py @@ -0,0 +1,235 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import codecs +import io +import itertools +import locale +import logging +import os +import sys +from collections import deque +from contextlib import contextmanager + +import six +from looseversion import LooseVersion + + +def getpreferredencoding(): + # locale._parse_localename makes locale.getpreferredencoding + # return None when LC_ALL is C, instead of e.g. 'US-ASCII' or + # 'ANSI_X3.4-1968' when it uses nl_langinfo. + encoding = None + try: + encoding = locale.getpreferredencoding() + except ValueError: + # On english OSX, LC_ALL is UTF-8 (not en-US.UTF-8), and + # that throws off locale._parse_localename, which ends up + # being used on e.g. homebrew python. + if os.environ.get("LC_ALL", "").upper() == "UTF-8": + encoding = "utf-8" + return encoding + + +class Version(LooseVersion): + """A simple subclass of looseversion.LooseVersion. + Adds attributes for `major`, `minor`, `patch` for the first three + version components so users can easily pull out major/minor + versions, like: + + v = Version('1.2b') + v.major == 1 + v.minor == 2 + v.patch == 0 + """ + + def __init__(self, version): + # Can't use super, LooseVersion's base class is not a new-style class. + LooseVersion.__init__(self, version) + # Take the first three integer components, stopping at the first + # non-integer and padding the rest with zeroes. + (self.major, self.minor, self.patch) = list( + itertools.chain( + itertools.takewhile(lambda x: isinstance(x, int), self.version), + (0, 0, 0), + ) + )[:3] + + +class ConfigureOutputHandler(logging.Handler): + """A logging handler class that sends info messages to stdout and other + messages to stderr. + + Messages sent to stdout are not formatted with the attached Formatter. + Additionally, if they end with '... ', no newline character is printed, + making the next message printed follow the '... '. + + Only messages above log level INFO (included) are logged. + + Messages below that level can be kept until an ERROR message is received, + at which point the last `maxlen` accumulated messages below INFO are + printed out. This feature is only enabled under the `queue_debug` context + manager. + """ + + def __init__(self, stdout=sys.stdout, stderr=sys.stderr, maxlen=20): + super(ConfigureOutputHandler, self).__init__() + + # Python has this feature where it sets the encoding of pipes to + # ascii, which blatantly fails when trying to print out non-ascii. + def fix_encoding(fh): + if six.PY3: + return fh + try: + isatty = fh.isatty() + except AttributeError: + isatty = True + + if not isatty: + encoding = getpreferredencoding() + if encoding: + return codecs.getwriter(encoding)(fh) + return fh + + self._stdout = fix_encoding(stdout) + self._stderr = fix_encoding(stderr) if stdout != stderr else self._stdout + try: + fd1 = self._stdout.fileno() + fd2 = self._stderr.fileno() + self._same_output = self._is_same_output(fd1, fd2) + except (AttributeError, io.UnsupportedOperation): + self._same_output = self._stdout == self._stderr + self._stdout_waiting = None + self._debug = deque(maxlen=maxlen + 1) + self._keep_if_debug = self.THROW + self._queue_is_active = False + + @staticmethod + def _is_same_output(fd1, fd2): + if fd1 == fd2: + return True + stat1 = os.fstat(fd1) + stat2 = os.fstat(fd2) + return stat1.st_ino == stat2.st_ino and stat1.st_dev == stat2.st_dev + + # possible values for _stdout_waiting + WAITING = 1 + INTERRUPTED = 2 + + # possible values for _keep_if_debug + THROW = 0 + KEEP = 1 + PRINT = 2 + + def emit(self, record): + try: + if record.levelno == logging.INFO: + stream = self._stdout + msg = six.ensure_text(record.getMessage()) + if self._stdout_waiting == self.INTERRUPTED and self._same_output: + msg = " ... %s" % msg + self._stdout_waiting = msg.endswith("... ") + if msg.endswith("... "): + self._stdout_waiting = self.WAITING + else: + self._stdout_waiting = None + msg = "%s\n" % msg + elif record.levelno < logging.INFO and self._keep_if_debug != self.PRINT: + if self._keep_if_debug == self.KEEP: + self._debug.append(record) + return + else: + if record.levelno >= logging.ERROR and len(self._debug): + self._emit_queue() + + if self._stdout_waiting == self.WAITING and self._same_output: + self._stdout_waiting = self.INTERRUPTED + self._stdout.write("\n") + self._stdout.flush() + stream = self._stderr + msg = "%s\n" % self.format(record) + stream.write(msg) + stream.flush() + except (KeyboardInterrupt, SystemExit, IOError): + raise + except Exception: + self.handleError(record) + + @contextmanager + def queue_debug(self): + if self._queue_is_active: + yield + return + self._queue_is_active = True + self._keep_if_debug = self.KEEP + try: + yield + except Exception: + self._emit_queue() + # The exception will be handled and very probably printed out by + # something upper in the stack. + raise + finally: + self._queue_is_active = False + self._keep_if_debug = self.THROW + self._debug.clear() + + def _emit_queue(self): + self._keep_if_debug = self.PRINT + if len(self._debug) == self._debug.maxlen: + r = self._debug.popleft() + self.emit( + logging.LogRecord( + r.name, + r.levelno, + r.pathname, + r.lineno, + "", + (), + None, + ) + ) + while True: + try: + self.emit(self._debug.popleft()) + except IndexError: + break + self._keep_if_debug = self.KEEP + + +class LineIO(object): + """File-like class that sends each line of the written data to a callback + (without carriage returns). + """ + + def __init__(self, callback, errors="strict"): + self._callback = callback + self._buf = "" + self._encoding = getpreferredencoding() + self._errors = errors + + def write(self, buf): + buf = six.ensure_text(buf, encoding=self._encoding or "utf-8") + lines = buf.splitlines() + if not lines: + return + if self._buf: + lines[0] = self._buf + lines[0] + self._buf = "" + if not buf.endswith("\n"): + self._buf = lines.pop() + + for line in lines: + self._callback(line) + + def close(self): + if self._buf: + self._callback(self._buf) + self._buf = "" + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() diff --git a/python/mozbuild/mozbuild/controller/__init__.py b/python/mozbuild/mozbuild/controller/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/controller/building.py b/python/mozbuild/mozbuild/controller/building.py new file mode 100644 index 0000000000..de6c01afe4 --- /dev/null +++ b/python/mozbuild/mozbuild/controller/building.py @@ -0,0 +1,1872 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import errno +import getpass +import io +import json +import logging +import os +import re +import shutil +import subprocess +import sys +import time +from collections import Counter, OrderedDict, namedtuple +from textwrap import TextWrapper + +import six +from mach.site import CommandSiteManager + +try: + import psutil +except Exception: + psutil = None + +import mozfile +import mozpack.path as mozpath +from mach.mixin.logging import LoggingMixin +from mach.util import get_state_dir +from mozsystemmonitor.resourcemonitor import SystemResourceMonitor +from mozterm.widgets import Footer + +from ..backend import get_backend_class +from ..base import MozbuildObject +from ..compilation.warnings import WarningsCollector, WarningsDatabase +from ..testing import install_test_files +from ..util import FileAvoidWrite, mkdir, resolve_target_to_make +from .clobber import Clobberer + +FINDER_SLOW_MESSAGE = """ +=================== +PERFORMANCE WARNING + +The OS X Finder application (file indexing used by Spotlight) used a lot of CPU +during the build - an average of %f%% (100%% is 1 core). This made your build +slower. + +Consider adding ".noindex" to the end of your object directory name to have +Finder ignore it. Or, add an indexing exclusion through the Spotlight System +Preferences. +=================== +""".strip() + + +INSTALL_TESTS_CLOBBER = "".join( + [ + TextWrapper().fill(line) + "\n" + for line in """ +The build system was unable to install tests because the CLOBBER file has \ +been updated. This means if you edited any test files, your changes may not \ +be picked up until a full/clobber build is performed. + +The easiest and fastest way to perform a clobber build is to run: + + $ mach clobber + $ mach build + +If you did not modify any test files, it is safe to ignore this message \ +and proceed with running tests. To do this run: + + $ touch {clobber_file} +""".splitlines() + ] +) + +CLOBBER_REQUESTED_MESSAGE = """ +=================== +The CLOBBER file was updated prior to this build. A clobber build may be +required to succeed, but we weren't expecting it to. + +Please consider filing a bug for this failure if you have reason to believe +this is a clobber bug and not due to local changes. +=================== +""".strip() + + +BuildOutputResult = namedtuple( + "BuildOutputResult", ("warning", "state_changed", "message") +) + + +class TierStatus(object): + """Represents the state and progress of tier traversal. + + The build system is organized into linear phases called tiers. Each tier + executes in the order it was defined, 1 at a time. + """ + + def __init__(self, resources): + """Accepts a SystemResourceMonitor to record results against.""" + self.tiers = OrderedDict() + self.tier_status = OrderedDict() + self.resources = resources + + def set_tiers(self, tiers): + """Record the set of known tiers.""" + for tier in tiers: + self.tiers[tier] = dict( + begin_time=None, + finish_time=None, + duration=None, + ) + self.tier_status[tier] = None + + def begin_tier(self, tier): + """Record that execution of a tier has begun.""" + self.tier_status[tier] = "active" + t = self.tiers[tier] + t["begin_time"] = time.monotonic() + self.resources.begin_phase(tier) + + def finish_tier(self, tier): + """Record that execution of a tier has finished.""" + self.tier_status[tier] = "finished" + t = self.tiers[tier] + t["finish_time"] = time.monotonic() + t["duration"] = self.resources.finish_phase(tier) + + def tiered_resource_usage(self): + """Obtains an object containing resource usage for tiers. + + The returned object is suitable for serialization. + """ + o = [] + + for tier, state in self.tiers.items(): + t_entry = dict( + name=tier, + start=state["begin_time"], + end=state["finish_time"], + duration=state["duration"], + ) + + self.add_resources_to_dict(t_entry, phase=tier) + + o.append(t_entry) + + return o + + def add_resources_to_dict(self, entry, start=None, end=None, phase=None): + """Helper function to append resource information to a dict.""" + cpu_percent = self.resources.aggregate_cpu_percent( + start=start, end=end, phase=phase, per_cpu=False + ) + cpu_times = self.resources.aggregate_cpu_times( + start=start, end=end, phase=phase, per_cpu=False + ) + io = self.resources.aggregate_io(start=start, end=end, phase=phase) + + if cpu_percent is None: + return entry + + entry["cpu_percent"] = cpu_percent + entry["cpu_times"] = list(cpu_times) + entry["io"] = list(io) + + return entry + + def add_resource_fields_to_dict(self, d): + for usage in self.resources.range_usage(): + cpu_times = self.resources.aggregate_cpu_times(per_cpu=False) + + d["cpu_times_fields"] = list(cpu_times._fields) + d["io_fields"] = list(usage.io._fields) + d["virt_fields"] = list(usage.virt._fields) + d["swap_fields"] = list(usage.swap._fields) + + return d + + +class BuildMonitor(MozbuildObject): + """Monitors the output of the build.""" + + def init(self, warnings_path): + """Create a new monitor. + + warnings_path is a path of a warnings database to use. + """ + self._warnings_path = warnings_path + self.resources = SystemResourceMonitor(poll_interval=1.0) + self._resources_started = False + + self.tiers = TierStatus(self.resources) + + self.warnings_database = WarningsDatabase() + if os.path.exists(warnings_path): + try: + self.warnings_database.load_from_file(warnings_path) + except ValueError: + os.remove(warnings_path) + + # Contains warnings unique to this invocation. Not populated with old + # warnings. + self.instance_warnings = WarningsDatabase() + + def on_warning(warning): + # Skip `errors` + if warning["type"] == "error": + return + + filename = warning["filename"] + + if not os.path.exists(filename): + raise Exception("Could not find file containing warning: %s" % filename) + + self.warnings_database.insert(warning) + # Make a copy so mutations don't impact other database. + self.instance_warnings.insert(warning.copy()) + + self._warnings_collector = WarningsCollector(on_warning, objdir=self.topobjdir) + self._build_tasks = [] + + self.build_objects = [] + self.build_dirs = set() + + def start(self): + """Record the start of the build.""" + self.start_time = time.monotonic() + self._finder_start_cpu = self._get_finder_cpu_usage() + + def start_resource_recording(self): + # This should be merged into start() once bug 892342 lands. + self.resources.start() + self._resources_started = True + + def on_line(self, line): + """Consume a line of output from the build system. + + This will parse the line for state and determine whether more action is + needed. + + Returns a BuildOutputResult instance. + + In this named tuple, warning will be an object describing a new parsed + warning. Otherwise it will be None. + + state_changed indicates whether the build system changed state with + this line. If the build system changed state, the caller may want to + query this instance for the current state in order to update UI, etc. + + message is either None, or the content of a message to be + displayed to the user. + """ + message = None + + if line.startswith("BUILDSTATUS"): + args = line.split()[1:] + + action = args.pop(0) + update_needed = True + + if action == "TIERS": + self.tiers.set_tiers(args) + update_needed = False + elif action == "TIER_START": + tier = args[0] + self.tiers.begin_tier(tier) + elif action == "TIER_FINISH": + (tier,) = args + self.tiers.finish_tier(tier) + elif action == "OBJECT_FILE": + self.build_objects.append(args[0]) + update_needed = False + elif action == "BUILD_VERBOSE": + build_dir = args[0] + if build_dir not in self.build_dirs: + self.build_dirs.add(build_dir) + message = build_dir + update_needed = False + else: + raise Exception("Unknown build status: %s" % action) + + return BuildOutputResult(None, update_needed, message) + elif line.startswith("BUILDTASK"): + _, data = line.split(maxsplit=1) + # Check that we can parse the JSON. Skip this line if we can't; + # we'll be missing data, but that's not a huge deal. + try: + json.loads(data) + self._build_tasks.append(data) + except json.decoder.JSONDecodeError: + pass + return BuildOutputResult(None, False, None) + + warning = None + + try: + warning = self._warnings_collector.process_line(line) + message = line + except Exception: + pass + + return BuildOutputResult(warning, False, message) + + def stop_resource_recording(self): + if self._resources_started: + self.resources.stop() + + self._resources_started = False + + def finish(self, record_usage=True): + """Record the end of the build.""" + self.stop_resource_recording() + self.end_time = time.monotonic() + self._finder_end_cpu = self._get_finder_cpu_usage() + self.elapsed = self.end_time - self.start_time + + self.warnings_database.prune() + self.warnings_database.save_to_file(self._warnings_path) + + if "MOZ_AUTOMATION" not in os.environ: + build_tasks_path = self._get_state_filename("build_tasks.json") + with io.open(build_tasks_path, "w", encoding="utf-8", newline="\n") as fh: + fh.write("[") + first = True + for task in self._build_tasks: + # We've already verified all of these are valid JSON, so we + # can write the data out to the file directly. + fh.write("%s\n %s" % ("," if not first else "", task)) + first = False + fh.write("\n]\n") + + # Record usage. + if not record_usage: + return + + try: + usage = self.get_resource_usage() + if not usage: + return + + self.log_resource_usage(usage) + # When running on automation, we store the resource usage data in + # the upload path, alongside, for convenience, a copy of the HTML + # viewer. + if "MOZ_AUTOMATION" in os.environ and "UPLOAD_PATH" in os.environ: + build_resources_path = os.path.join( + os.environ["UPLOAD_PATH"], "build_resources.json" + ) + shutil.copy( + os.path.join( + self.topsrcdir, + "python", + "mozbuild", + "mozbuild", + "resources", + "html-build-viewer", + "build_resources.html", + ), + os.environ["UPLOAD_PATH"], + ) + else: + build_resources_path = self._get_state_filename("build_resources.json") + with io.open( + build_resources_path, "w", encoding="utf-8", newline="\n" + ) as fh: + to_write = six.ensure_text( + json.dumps(self.resources.as_dict(), indent=2) + ) + fh.write(to_write) + except Exception as e: + self.log( + logging.WARNING, + "build_resources_error", + {"msg": str(e)}, + "Exception when writing resource usage file: {msg}", + ) + + def _get_finder_cpu_usage(self): + """Obtain the CPU usage of the Finder app on OS X. + + This is used to detect high CPU usage. + """ + if not sys.platform.startswith("darwin"): + return None + + if not psutil: + return None + + for proc in psutil.process_iter(): + if proc.name != "Finder": + continue + + if proc.username != getpass.getuser(): + continue + + # Try to isolate system finder as opposed to other "Finder" + # processes. + if not proc.exe.endswith("CoreServices/Finder.app/Contents/MacOS/Finder"): + continue + + return proc.get_cpu_times() + + return None + + def have_high_finder_usage(self): + """Determine whether there was high Finder CPU usage during the build. + + Returns True if there was high Finder CPU usage, False if there wasn't, + or None if there is nothing to report. + """ + if not self._finder_start_cpu: + return None, None + + # We only measure if the measured range is sufficiently long. + if self.elapsed < 15: + return None, None + + if not self._finder_end_cpu: + return None, None + + start = self._finder_start_cpu + end = self._finder_end_cpu + + start_total = start.user + start.system + end_total = end.user + end.system + + cpu_seconds = end_total - start_total + + # If Finder used more than 25% of 1 core during the build, report an + # error. + finder_percent = cpu_seconds / self.elapsed * 100 + + return finder_percent > 25, finder_percent + + def have_excessive_swapping(self): + """Determine whether there was excessive swapping during the build. + + Returns a tuple of (excessive, swap_in, swap_out). All values are None + if no swap information is available. + """ + if not self.have_resource_usage: + return None, None, None + + swap_in = sum(m.swap.sin for m in self.resources.measurements) + swap_out = sum(m.swap.sout for m in self.resources.measurements) + + # The threshold of 1024 MB has been arbitrarily chosen. + # + # Choosing a proper value that is ideal for everyone is hard. We will + # likely iterate on the logic until people are generally satisfied. + # If a value is too low, the eventual warning produced does not carry + # much meaning. If the threshold is too high, people may not see the + # warning and the warning will thus be ineffective. + excessive = swap_in > 512 * 1048576 or swap_out > 512 * 1048576 + return excessive, swap_in, swap_out + + @property + def have_resource_usage(self): + """Whether resource usage is available.""" + return self.resources.start_time is not None + + def get_resource_usage(self): + """Produce a data structure containing the low-level resource usage information. + + This data structure can e.g. be serialized into JSON and saved for + subsequent analysis. + + If no resource usage is available, None is returned. + """ + if not self.have_resource_usage: + return None + + cpu_percent = self.resources.aggregate_cpu_percent(phase=None, per_cpu=False) + cpu_times = self.resources.aggregate_cpu_times(phase=None, per_cpu=False) + io = self.resources.aggregate_io(phase=None) + + o = dict( + version=3, + argv=sys.argv, + start=self.start_time, + end=self.end_time, + duration=self.end_time - self.start_time, + resources=[], + cpu_percent=cpu_percent, + cpu_times=cpu_times, + io=io, + objects=self.build_objects, + ) + + o["tiers"] = self.tiers.tiered_resource_usage() + + self.tiers.add_resource_fields_to_dict(o) + + for usage in self.resources.range_usage(): + cpu_percent = self.resources.aggregate_cpu_percent( + usage.start, usage.end, per_cpu=False + ) + cpu_times = self.resources.aggregate_cpu_times( + usage.start, usage.end, per_cpu=False + ) + + entry = dict( + start=usage.start, + end=usage.end, + virt=list(usage.virt), + swap=list(usage.swap), + ) + + self.tiers.add_resources_to_dict(entry, start=usage.start, end=usage.end) + + o["resources"].append(entry) + + # If the imports for this file ran before the in-tree virtualenv + # was bootstrapped (for instance, for a clobber build in automation), + # psutil might not be available. + # + # Treat psutil as optional to avoid an outright failure to log resources + # TODO: it would be nice to collect data on the storage device as well + # in this case. + o["system"] = {} + if psutil: + o["system"].update( + dict( + logical_cpu_count=psutil.cpu_count(), + physical_cpu_count=psutil.cpu_count(logical=False), + swap_total=psutil.swap_memory()[0], + vmem_total=psutil.virtual_memory()[0], + ) + ) + + return o + + def log_resource_usage(self, usage): + """Summarize the resource usage of this build in a log message.""" + + if not usage: + return + + params = dict( + duration=self.end_time - self.start_time, + cpu_percent=usage["cpu_percent"], + io_read_bytes=usage["io"].read_bytes, + io_write_bytes=usage["io"].write_bytes, + io_read_time=usage["io"].read_time, + io_write_time=usage["io"].write_time, + ) + + message = ( + "Overall system resources - Wall time: {duration:.0f}s; " + "CPU: {cpu_percent:.0f}%; " + "Read bytes: {io_read_bytes}; Write bytes: {io_write_bytes}; " + "Read time: {io_read_time}; Write time: {io_write_time}" + ) + + self.log(logging.WARNING, "resource_usage", params, message) + + excessive, sin, sout = self.have_excessive_swapping() + if excessive is not None and (sin or sout): + sin /= 1048576 + sout /= 1048576 + self.log( + logging.WARNING, + "swap_activity", + {"sin": sin, "sout": sout}, + "Swap in/out (MB): {sin}/{sout}", + ) + + def ccache_stats(self, ccache=None): + ccache_stats = None + + if ccache is None: + ccache = mozfile.which("ccache") + if ccache: + # With CCache v3.7+ we can use --print-stats + has_machine_format = CCacheStats.check_version_3_7_or_newer(ccache) + try: + output = subprocess.check_output( + [ccache, "--print-stats" if has_machine_format else "-s"], + universal_newlines=True, + ) + ccache_stats = CCacheStats(output, has_machine_format) + except ValueError as e: + self.log(logging.WARNING, "ccache", {"msg": str(e)}, "{msg}") + return ccache_stats + + +class TerminalLoggingHandler(logging.Handler): + """Custom logging handler that works with terminal window dressing. + + This class should probably live elsewhere, like the mach core. Consider + this a proving ground for its usefulness. + """ + + def __init__(self): + logging.Handler.__init__(self) + + self.fh = sys.stdout + self.footer = None + + def flush(self): + self.acquire() + + try: + self.fh.flush() + finally: + self.release() + + def emit(self, record): + msg = self.format(record) + + self.acquire() + + try: + if self.footer: + self.footer.clear() + + self.fh.write(msg) + self.fh.write("\n") + + if self.footer: + self.footer.draw() + + # If we don't flush, the footer may not get drawn. + self.fh.flush() + finally: + self.release() + + +class BuildProgressFooter(Footer): + """Handles display of a build progress indicator in a terminal. + + When mach builds inside a blessed-supported terminal, it will render + progress information collected from a BuildMonitor. This class converts the + state of BuildMonitor into terminal output. + """ + + def __init__(self, terminal, monitor): + Footer.__init__(self, terminal) + self.tiers = six.viewitems(monitor.tiers.tier_status) + + def draw(self): + """Draws this footer in the terminal.""" + + if not self.tiers: + return + + # The drawn terminal looks something like: + # TIER: static export libs tools + + parts = [("bold", "TIER:")] + append = parts.append + for tier, status in self.tiers: + if status is None: + append(tier) + elif status == "finished": + append(("green", tier)) + else: + append(("underline_yellow", tier)) + + self.write(parts) + + +class OutputManager(LoggingMixin): + """Handles writing job output to a terminal or log.""" + + def __init__(self, log_manager, footer): + self.populate_logger() + + self.footer = None + terminal = log_manager.terminal + + # TODO convert terminal footer to config file setting. + if not terminal: + return + if os.environ.get("INSIDE_EMACS", None): + return + + if os.environ.get("MACH_NO_TERMINAL_FOOTER", None): + footer = None + + self.t = terminal + self.footer = footer + + self._handler = TerminalLoggingHandler() + self._handler.setFormatter(log_manager.terminal_formatter) + self._handler.footer = self.footer + + old = log_manager.replace_terminal_handler(self._handler) + self._handler.level = old.level + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + if self.footer: + self.footer.clear() + # Prevents the footer from being redrawn if logging occurs. + self._handler.footer = None + + def write_line(self, line): + if self.footer: + self.footer.clear() + + print(line) + + if self.footer: + self.footer.draw() + + def refresh(self): + if not self.footer: + return + + self.footer.clear() + self.footer.draw() + + +class BuildOutputManager(OutputManager): + """Handles writing build output to a terminal, to logs, etc.""" + + def __init__(self, log_manager, monitor, footer): + self.monitor = monitor + OutputManager.__init__(self, log_manager, footer) + + def __exit__(self, exc_type, exc_value, traceback): + OutputManager.__exit__(self, exc_type, exc_value, traceback) + + # Ensure the resource monitor is stopped because leaving it running + # could result in the process hanging on exit because the resource + # collection child process hasn't been told to stop. + self.monitor.stop_resource_recording() + + def on_line(self, line): + warning, state_changed, message = self.monitor.on_line(line) + + if message: + self.log(logging.INFO, "build_output", {"line": message}, "{line}") + elif state_changed: + have_handler = hasattr(self, "handler") + if have_handler: + self.handler.acquire() + try: + self.refresh() + finally: + if have_handler: + self.handler.release() + + +class StaticAnalysisFooter(Footer): + """Handles display of a static analysis progress indicator in a terminal.""" + + def __init__(self, terminal, monitor): + Footer.__init__(self, terminal) + self.monitor = monitor + + def draw(self): + """Draws this footer in the terminal.""" + + monitor = self.monitor + total = monitor.num_files + processed = monitor.num_files_processed + percent = "(%.2f%%)" % (processed * 100.0 / total) + parts = [ + ("bright_black", "Processing"), + ("yellow", str(processed)), + ("bright_black", "of"), + ("yellow", str(total)), + ("bright_black", "files"), + ("green", percent), + ] + if monitor.current_file: + parts.append(("bold", monitor.current_file)) + + self.write(parts) + + +class StaticAnalysisOutputManager(OutputManager): + """Handles writing static analysis output to a terminal or file.""" + + def __init__(self, log_manager, monitor, footer): + self.monitor = monitor + self.raw = "" + OutputManager.__init__(self, log_manager, footer) + + def on_line(self, line): + warning, relevant = self.monitor.on_line(line) + if relevant: + self.raw += line + "\n" + + if warning: + self.log( + logging.INFO, + "compiler_warning", + warning, + "Warning: {flag} in {filename}: {message}", + ) + + if relevant: + self.log(logging.INFO, "build_output", {"line": line}, "{line}") + else: + have_handler = hasattr(self, "handler") + if have_handler: + self.handler.acquire() + try: + self.refresh() + finally: + if have_handler: + self.handler.release() + + def write(self, path, output_format): + assert output_format in ("text", "json"), "Invalid output format {}".format( + output_format + ) + path = os.path.realpath(path) + + if output_format == "json": + self.monitor._warnings_database.save_to_file(path) + + else: + with io.open(path, "w", encoding="utf-8", newline="\n") as f: + f.write(self.raw) + + self.log( + logging.INFO, + "write_output", + {"path": path, "format": output_format}, + "Wrote {format} output in {path}", + ) + + +class CCacheStats(object): + """Holds statistics from ccache. + + Instances can be subtracted from each other to obtain differences. + print() or str() the object to show a ``ccache -s`` like output + of the captured stats. + + """ + + STATS_KEYS = [ + # (key, description) + # Refer to stats.c in ccache project for all the descriptions. + ("stats_zeroed", ("stats zeroed", "stats zero time")), + ("stats_updated", "stats updated"), + ("cache_hit_direct", "cache hit (direct)"), + ("cache_hit_preprocessed", "cache hit (preprocessed)"), + ("cache_hit_rate", "cache hit rate"), + ("cache_miss", "cache miss"), + ("link", "called for link"), + ("preprocessing", "called for preprocessing"), + ("multiple", "multiple source files"), + ("stdout", "compiler produced stdout"), + ("no_output", "compiler produced no output"), + ("empty_output", "compiler produced empty output"), + ("failed", "compile failed"), + ("error", "ccache internal error"), + ("preprocessor_error", "preprocessor error"), + ("cant_use_pch", "can't use precompiled header"), + ("compiler_missing", "couldn't find the compiler"), + ("cache_file_missing", "cache file missing"), + ("bad_args", "bad compiler arguments"), + ("unsupported_lang", "unsupported source language"), + ("compiler_check_failed", "compiler check failed"), + ("autoconf", "autoconf compile/link"), + ("unsupported_code_directive", "unsupported code directive"), + ("unsupported_compiler_option", "unsupported compiler option"), + ("out_stdout", "output to stdout"), + ("out_device", "output to a non-regular file"), + ("no_input", "no input file"), + ("bad_extra_file", "error hashing extra file"), + ("num_cleanups", "cleanups performed"), + ("cache_files", "files in cache"), + ("cache_size", "cache size"), + ("cache_max_size", "max cache size"), + ] + + SKIP_LINES = ( + "cache directory", + "primary config", + "secondary config", + ) + + STATS_KEYS_3_7_PLUS = { + "stats_zeroed_timestamp": "stats_zeroed", + "stats_updated_timestamp": "stats_updated", + "direct_cache_hit": "cache_hit_direct", + "preprocessed_cache_hit": "cache_hit_preprocessed", + # "cache_hit_rate" is not provided + "cache_miss": "cache_miss", + "called_for_link": "link", + "called_for_preprocessing": "preprocessing", + "multiple_source_files": "multiple", + "compiler_produced_stdout": "stdout", + "compiler_produced_no_output": "no_output", + "compiler_produced_empty_output": "empty_output", + "compile_failed": "failed", + "internal_error": "error", + "preprocessor_error": "preprocessor_error", + "could_not_use_precompiled_header": "cant_use_pch", + "could_not_find_compiler": "compiler_missing", + "missing_cache_file": "cache_file_missing", + "bad_compiler_arguments": "bad_args", + "unsupported_source_language": "unsupported_lang", + "compiler_check_failed": "compiler_check_failed", + "autoconf_test": "autoconf", + "unsupported_code_directive": "unsupported_code_directive", + "unsupported_compiler_option": "unsupported_compiler_option", + "output_to_stdout": "out_stdout", + "output_to_a_non_file": "out_device", + "no_input_file": "no_input", + "error_hashing_extra_file": "bad_extra_file", + "cleanups_performed": "num_cleanups", + "files_in_cache": "cache_files", + "cache_size_kibibyte": "cache_size", + # "cache_max_size" is obsolete and not printed anymore + } + + ABSOLUTE_KEYS = {"cache_files", "cache_size", "cache_max_size"} + FORMAT_KEYS = {"cache_size", "cache_max_size"} + + GiB = 1024 ** 3 + MiB = 1024 ** 2 + KiB = 1024 + + def __init__(self, output=None, has_machine_format=False): + """Construct an instance from the output of ccache -s.""" + self._values = {} + + if not output: + return + + if has_machine_format: + self._parse_machine_format(output) + else: + self._parse_human_format(output) + + def _parse_machine_format(self, output): + for line in output.splitlines(): + line = line.strip() + key, _, value = line.partition("\t") + stat_key = self.STATS_KEYS_3_7_PLUS.get(key) + if stat_key: + value = int(value) + if key.endswith("_kibibyte"): + value *= 1024 + self._values[stat_key] = value + + (direct, preprocessed, miss) = self.hit_rates() + self._values["cache_hit_rate"] = (direct + preprocessed) * 100 + + def _parse_human_format(self, output): + for line in output.splitlines(): + line = line.strip() + if line: + self._parse_line(line) + + def _parse_line(self, line): + line = six.ensure_text(line) + for stat_key, stat_description in self.STATS_KEYS: + if line.startswith(stat_description): + raw_value = self._strip_prefix(line, stat_description) + self._values[stat_key] = self._parse_value(raw_value) + break + else: + if not line.startswith(self.SKIP_LINES): + raise ValueError("Failed to parse ccache stats output: %s" % line) + + @staticmethod + def _strip_prefix(line, prefix): + if isinstance(prefix, tuple): + for p in prefix: + line = CCacheStats._strip_prefix(line, p) + return line + return line[len(prefix) :].strip() if line.startswith(prefix) else line + + @staticmethod + def _parse_value(raw_value): + try: + # ccache calls strftime with '%c' (src/stats.c) + ts = time.strptime(raw_value, "%c") + return int(time.mktime(ts)) + except ValueError: + if raw_value == "never": + return 0 + pass + + value = raw_value.split() + unit = "" + if len(value) == 1: + numeric = value[0] + elif len(value) == 2: + numeric, unit = value + else: + raise ValueError("Failed to parse ccache stats value: %s" % raw_value) + + if "." in numeric: + numeric = float(numeric) + else: + numeric = int(numeric) + + if unit in ("GB", "Gbytes"): + unit = CCacheStats.GiB + elif unit in ("MB", "Mbytes"): + unit = CCacheStats.MiB + elif unit in ("KB", "Kbytes"): + unit = CCacheStats.KiB + else: + unit = 1 + + return int(numeric * unit) + + def hit_rate_message(self): + return ( + "ccache (direct) hit rate: {:.1%}; (preprocessed) hit rate: {:.1%};" + " miss rate: {:.1%}".format(*self.hit_rates()) + ) + + def hit_rates(self): + direct = self._values["cache_hit_direct"] + preprocessed = self._values["cache_hit_preprocessed"] + miss = self._values["cache_miss"] + total = float(direct + preprocessed + miss) + + if total > 0: + direct /= total + preprocessed /= total + miss /= total + + return (direct, preprocessed, miss) + + def __sub__(self, other): + result = CCacheStats() + + for k, prefix in self.STATS_KEYS: + if k not in self._values and k not in other._values: + continue + + our_value = self._values.get(k, 0) + other_value = other._values.get(k, 0) + + if k in self.ABSOLUTE_KEYS: + result._values[k] = our_value + else: + result._values[k] = our_value - other_value + + return result + + def __str__(self): + LEFT_ALIGN = 34 + lines = [] + + for stat_key, stat_description in self.STATS_KEYS: + if stat_key not in self._values: + continue + + value = self._values[stat_key] + + if stat_key in self.FORMAT_KEYS: + value = "%15s" % self._format_value(value) + else: + value = "%8u" % value + + if isinstance(stat_description, tuple): + stat_description = stat_description[0] + + lines.append("%s%s" % (stat_description.ljust(LEFT_ALIGN), value)) + + return "\n".join(lines) + + def __nonzero__(self): + relative_values = [ + v for k, v in self._values.items() if k not in self.ABSOLUTE_KEYS + ] + return all(v >= 0 for v in relative_values) and any( + v > 0 for v in relative_values + ) + + def __bool__(self): + return self.__nonzero__() + + @staticmethod + def _format_value(v): + if v > CCacheStats.GiB: + return "%.1f Gbytes" % (float(v) / CCacheStats.GiB) + elif v > CCacheStats.MiB: + return "%.1f Mbytes" % (float(v) / CCacheStats.MiB) + else: + return "%.1f Kbytes" % (float(v) / CCacheStats.KiB) + + @staticmethod + def check_version_3_7_or_newer(ccache): + output_version = subprocess.check_output( + [ccache, "--version"], universal_newlines=True + ) + return CCacheStats._is_version_3_7_or_newer(output_version) + + @staticmethod + def _is_version_3_7_or_newer(output): + if "ccache version" not in output: + return False + + major = 0 + minor = 0 + + for line in output.splitlines(): + version = re.search(r"ccache version (\d+).(\d+).*", line) + if version: + major = int(version.group(1)) + minor = int(version.group(2)) + break + + return ((major << 8) + minor) >= ((3 << 8) + 7) + + +class BuildDriver(MozbuildObject): + """Provides a high-level API for build actions.""" + + def __init__(self, *args, **kwargs): + MozbuildObject.__init__(self, *args, virtualenv_name="build", **kwargs) + self.metrics = None + self.mach_context = None + + def build( + self, + metrics, + what=None, + jobs=0, + job_size=0, + directory=None, + verbose=False, + keep_going=False, + mach_context=None, + append_env=None, + virtualenv_topobjdir=None, + ): + """Invoke the build backend. + + ``what`` defines the thing to build. If not defined, the default + target is used. + """ + self.metrics = metrics + self.mach_context = mach_context + warnings_path = self._get_state_filename("warnings.json") + monitor = self._spawn(BuildMonitor) + monitor.init(warnings_path) + footer = BuildProgressFooter(self.log_manager.terminal, monitor) + + # Disable indexing in objdir because it is not necessary and can slow + # down builds. + mkdir(self.topobjdir, not_indexed=True) + + with BuildOutputManager(self.log_manager, monitor, footer) as output: + monitor.start() + + if directory is not None and not what: + print("Can only use -C/--directory with an explicit target " "name.") + return 1 + + if directory is not None: + directory = mozpath.normsep(directory) + if directory.startswith("/"): + directory = directory[1:] + + monitor.start_resource_recording() + + if self._check_clobber(self.mozconfig, os.environ): + return 1 + + self.mach_context.command_attrs["clobber"] = False + self.metrics.mozbuild.clobber.set(False) + config = None + try: + config = self.config_environment + except Exception: + # If we don't already have a config environment this is either + # a fresh objdir or $OBJDIR/config.status has been removed for + # some reason, which indicates a clobber of sorts. + self.mach_context.command_attrs["clobber"] = True + self.metrics.mozbuild.clobber.set(True) + + # Record whether a clobber was requested so we can print + # a special message later if the build fails. + clobber_requested = False + + # Write out any changes to the current mozconfig in case + # they should invalidate configure. + self._write_mozconfig_json() + + previous_backend = None + if config is not None: + previous_backend = config.substs.get("BUILD_BACKENDS", [None])[0] + + config_rc = None + # Even if we have a config object, it may be out of date + # if something that influences its result has changed. + if config is None or self.build_out_of_date( + mozpath.join(self.topobjdir, "config.status"), + mozpath.join(self.topobjdir, "config_status_deps.in"), + ): + if previous_backend and "Make" not in previous_backend: + clobber_requested = self._clobber_configure() + + if config is None: + print(" Config object not found by mach.") + + config_rc = self.configure( + metrics, + buildstatus_messages=True, + line_handler=output.on_line, + append_env=append_env, + virtualenv_topobjdir=virtualenv_topobjdir, + ) + + if config_rc != 0: + return config_rc + + config = self.reload_config_environment() + + if config.substs.get("MOZ_USING_CCACHE"): + ccache = config.substs.get("CCACHE") + ccache_start = monitor.ccache_stats(ccache) + else: + ccache_start = None + + # Collect glean metrics + substs = config.substs + mozbuild_metrics = metrics.mozbuild + mozbuild_metrics.compiler.set(substs.get("CC_TYPE", None)) + + def get_substs_flag(name): + return bool(substs.get(name, None)) + + mozbuild_metrics.artifact.set(get_substs_flag("MOZ_ARTIFACT_BUILDS")) + mozbuild_metrics.debug.set(get_substs_flag("MOZ_DEBUG")) + mozbuild_metrics.opt.set(get_substs_flag("MOZ_OPTIMIZE")) + mozbuild_metrics.ccache.set(get_substs_flag("CCACHE")) + using_sccache = get_substs_flag("MOZ_USING_SCCACHE") + mozbuild_metrics.sccache.set(using_sccache) + mozbuild_metrics.icecream.set(get_substs_flag("CXX_IS_ICECREAM")) + mozbuild_metrics.project.set(substs.get("MOZ_BUILD_APP", "")) + + all_backends = config.substs.get("BUILD_BACKENDS", [None]) + active_backend = all_backends[0] + + status = None + + if not config_rc and any( + [ + self.backend_out_of_date( + mozpath.join(self.topobjdir, "backend.%sBackend" % backend) + ) + for backend in all_backends + ] + ): + print("Build configuration changed. Regenerating backend.") + args = [ + config.substs["PYTHON3"], + mozpath.join(self.topobjdir, "config.status"), + ] + self.run_process(args, cwd=self.topobjdir, pass_thru=True) + + if jobs == 0: + for param in self.mozconfig.get("make_extra") or []: + key, value = param.split("=", 1) + if key == "MOZ_PARALLEL_BUILD": + jobs = int(value) + + if "Make" not in active_backend: + backend_cls = get_backend_class(active_backend)(config) + status = backend_cls.build(self, output, jobs, verbose, what) + + if status and clobber_requested: + for line in CLOBBER_REQUESTED_MESSAGE.splitlines(): + self.log( + logging.WARNING, "clobber", {"msg": line.rstrip()}, "{msg}" + ) + + if what and status is None: + # Collect target pairs. + target_pairs = [] + for target in what: + path_arg = self._wrap_path_argument(target) + + if directory is not None: + make_dir = os.path.join(self.topobjdir, directory) + make_target = target + else: + make_dir, make_target = resolve_target_to_make( + self.topobjdir, path_arg.relpath() + ) + + if make_dir is None and make_target is None: + return 1 + + if config.is_artifact_build and target.startswith("installers-"): + # See https://bugzilla.mozilla.org/show_bug.cgi?id=1387485 + print( + "Localized Builds are not supported with Artifact Builds enabled.\n" + "You should disable Artifact Builds (Use --disable-compile-environment " + "in your mozconfig instead) then re-build to proceed." + ) + return 1 + + # See bug 886162 - we don't want to "accidentally" build + # the entire tree (if that's really the intent, it's + # unlikely they would have specified a directory.) + if not make_dir and not make_target: + print( + "The specified directory doesn't contain a " + "Makefile and the first parent with one is the " + "root of the tree. Please specify a directory " + "with a Makefile or run |mach build| if you " + "want to build the entire tree." + ) + return 1 + + target_pairs.append((make_dir, make_target)) + + # Build target pairs. + for make_dir, make_target in target_pairs: + # We don't display build status messages during partial + # tree builds because they aren't reliable there. This + # could potentially be fixed if the build monitor were more + # intelligent about encountering undefined state. + no_build_status = "1" if make_dir is not None else "" + tgt_env = dict(append_env or {}) + tgt_env["NO_BUILDSTATUS_MESSAGES"] = no_build_status + status = self._run_make( + directory=make_dir, + target=make_target, + line_handler=output.on_line, + log=False, + print_directory=False, + ensure_exit_code=False, + num_jobs=jobs, + job_size=job_size, + silent=not verbose, + append_env=tgt_env, + keep_going=keep_going, + ) + + if status != 0: + break + + elif status is None: + # If the backend doesn't specify a build() method, then just + # call client.mk directly. + status = self._run_client_mk( + line_handler=output.on_line, + jobs=jobs, + job_size=job_size, + verbose=verbose, + keep_going=keep_going, + append_env=append_env, + ) + + self.log( + logging.WARNING, + "warning_summary", + {"count": len(monitor.warnings_database)}, + "{count} compiler warnings present.", + ) + + # Try to run the active build backend's post-build step, if possible. + try: + active_backend = config.substs.get("BUILD_BACKENDS", [None])[0] + if active_backend: + backend_cls = get_backend_class(active_backend)(config) + new_status = backend_cls.post_build( + self, output, jobs, verbose, status + ) + status = new_status + except Exception as ex: + self.log( + logging.DEBUG, + "post_build", + {"ex": str(ex)}, + "Unable to run active build backend's post-build step; " + + "failing the build due to exception: {ex}.", + ) + if not status: + # If the underlying build provided a failing status, pass + # it through; otherwise, fail. + status = 1 + + record_usage = status == 0 + + # On automation, only record usage for plain `mach build` + if "MOZ_AUTOMATION" in os.environ and what: + record_usage = False + + monitor.finish(record_usage=record_usage) + + if status == 0: + usage = monitor.get_resource_usage() + if usage: + self.mach_context.command_attrs["usage"] = usage + + # Print the collected compiler warnings. This is redundant with + # inline output from the compiler itself. However, unlike inline + # output, this list is sorted and grouped by file, making it + # easier to triage output. + # + # Only do this if we had a successful build. If the build failed, + # there are more important things in the log to look for than + # whatever code we warned about. + if not status: + # Suppress warnings for 3rd party projects in local builds + # until we suppress them for real. + # TODO remove entries/feature once we stop generating warnings + # in these directories. + pathToThirdparty = os.path.join( + self.topsrcdir, "tools", "rewriting", "ThirdPartyPaths.txt" + ) + + pathToGenerated = os.path.join( + self.topsrcdir, "tools", "rewriting", "Generated.txt" + ) + + if os.path.exists(pathToThirdparty): + with io.open( + pathToThirdparty, encoding="utf-8", newline="\n" + ) as f, io.open(pathToGenerated, encoding="utf-8", newline="\n") as g: + # Normalize the path (no trailing /) + LOCAL_SUPPRESS_DIRS = tuple( + [line.strip("\n/") for line in f] + + [line.strip("\n/") for line in g] + ) + else: + # For application based on gecko like thunderbird + LOCAL_SUPPRESS_DIRS = () + + suppressed_by_dir = Counter() + + THIRD_PARTY_CODE = "third-party code" + suppressed = set( + w.replace("-Wno-error=", "-W") + for w in substs.get("WARNINGS_CFLAGS", []) + + substs.get("WARNINGS_CXXFLAGS", []) + if w.startswith("-Wno-error=") + ) + warnings = [] + for warning in sorted(monitor.instance_warnings): + path = mozpath.normsep(warning["filename"]) + if path.startswith(self.topsrcdir): + path = path[len(self.topsrcdir) + 1 :] + + warning["normpath"] = path + + if "MOZ_AUTOMATION" not in os.environ: + if path.startswith(LOCAL_SUPPRESS_DIRS): + suppressed_by_dir[THIRD_PARTY_CODE] += 1 + continue + + if warning["flag"] in suppressed: + suppressed_by_dir[os.path.dirname(path)] += 1 + continue + + warnings.append(warning) + + if THIRD_PARTY_CODE in suppressed_by_dir: + suppressed_third_party_code = [ + (THIRD_PARTY_CODE, suppressed_by_dir.pop(THIRD_PARTY_CODE)) + ] + else: + suppressed_third_party_code = [] + for d, count in suppressed_third_party_code + sorted( + suppressed_by_dir.items() + ): + self.log( + logging.WARNING, + "suppressed_warning", + {"dir": d, "count": count}, + "(suppressed {count} warnings in {dir})", + ) + + for warning in warnings: + if warning["column"] is not None: + self.log( + logging.WARNING, + "compiler_warning", + warning, + "warning: {normpath}:{line}:{column} [{flag}] " "{message}", + ) + else: + self.log( + logging.WARNING, + "compiler_warning", + warning, + "warning: {normpath}:{line} [{flag}] {message}", + ) + + high_finder, finder_percent = monitor.have_high_finder_usage() + if high_finder: + print(FINDER_SLOW_MESSAGE % finder_percent) + + if config.substs.get("MOZ_USING_CCACHE"): + ccache_end = monitor.ccache_stats(ccache) + else: + ccache_end = None + + ccache_diff = None + if ccache_start and ccache_end: + ccache_diff = ccache_end - ccache_start + if ccache_diff: + self.log( + logging.INFO, + "ccache", + {"msg": ccache_diff.hit_rate_message()}, + "{msg}", + ) + + notify_minimum_time = 300 + try: + notify_minimum_time = int(os.environ.get("MACH_NOTIFY_MINTIME", "300")) + except ValueError: + # Just stick with the default + pass + + if monitor.elapsed > notify_minimum_time: + # Display a notification when the build completes. + self.notify("Build complete" if not status else "Build failed") + + if status: + if what and any( + [target for target in what if target not in ("faster", "binaries")] + ): + print( + "Hey! Builds initiated with `mach build " + "$A_SPECIFIC_TARGET` may not always work, even if the " + "code being built is correct. Consider doing a bare " + "`mach build` instead." + ) + return status + + if monitor.have_resource_usage: + excessive, swap_in, swap_out = monitor.have_excessive_swapping() + # if excessive: + # print(EXCESSIVE_SWAP_MESSAGE) + + print("To view resource usage of the build, run |mach " "resource-usage|.") + + long_build = monitor.elapsed > 1200 + + if long_build: + output.on_line( + "We know it took a while, but your build finally finished successfully!" + ) + if not using_sccache: + output.on_line( + "If you are building Firefox often, SCCache can save you a lot " + "of time. You can learn more here: " + "https://firefox-source-docs.mozilla.org/setup/" + "configuring_build_options.html#sccache" + ) + else: + output.on_line("Your build was successful!") + + # Only for full builds because incremental builders likely don't + # need to be burdened with this. + if not what: + try: + # Fennec doesn't have useful output from just building. We should + # arguably make the build action useful for Fennec. Another day... + if self.substs["MOZ_BUILD_APP"] != "mobile/android": + print("To take your build for a test drive, run: |mach run|") + app = self.substs["MOZ_BUILD_APP"] + if app in ("browser", "mobile/android"): + print( + "For more information on what to do now, see " + "https://firefox-source-docs.mozilla.org/setup/contributing_code.html" # noqa + ) + except Exception: + # Ignore Exceptions in case we can't find config.status (such + # as when doing OSX Universal builds) + pass + + return status + + def configure( + self, + metrics, + options=None, + buildstatus_messages=False, + line_handler=None, + append_env=None, + virtualenv_topobjdir=None, + ): + # Disable indexing in objdir because it is not necessary and can slow + # down builds. + self.metrics = metrics + mkdir(self.topobjdir, not_indexed=True) + self._write_mozconfig_json() + + def on_line(line): + self.log(logging.INFO, "build_output", {"line": line}, "{line}") + + line_handler = line_handler or on_line + + append_env = dict(append_env or {}) + + # Back when client.mk was used, `mk_add_options "export ..."` lines + # from the mozconfig would spill into the configure environment, so + # add that for backwards compatibility. + for line in self.mozconfig["make_extra"] or []: + if line.startswith("export "): + k, eq, v = line[len("export ") :].partition("=") + if eq == "=": + append_env[k] = v + + virtualenv_topobjdir = virtualenv_topobjdir or self.topobjdir + build_site = CommandSiteManager.from_environment( + self.topsrcdir, + lambda: get_state_dir(specific_to_topsrcdir=True, topsrcdir=self.topsrcdir), + "build", + os.path.join(virtualenv_topobjdir, "_virtualenvs"), + ) + build_site.ensure() + + command = [build_site.python_path, os.path.join(self.topsrcdir, "configure.py")] + if options: + command.extend(options) + + if buildstatus_messages: + line_handler("BUILDSTATUS TIERS configure") + line_handler("BUILDSTATUS TIER_START configure") + + env = os.environ.copy() + env.update(append_env) + + with subprocess.Popen( + command, + cwd=self.topobjdir, + env=env, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + ) as process: + for line in process.stdout: + line_handler(line.rstrip()) + status = process.wait() + if buildstatus_messages: + line_handler("BUILDSTATUS TIER_FINISH configure") + if status: + print('*** Fix above errors and then restart with "./mach build"') + else: + print("Configure complete!") + print("Be sure to run |mach build| to pick up any changes") + + return status + + def install_tests(self): + """Install test files.""" + + if self.is_clobber_needed(): + print( + INSTALL_TESTS_CLOBBER.format( + clobber_file=os.path.join(self.topobjdir, "CLOBBER") + ) + ) + sys.exit(1) + + install_test_files(mozpath.normpath(self.topsrcdir), self.topobjdir, "_tests") + + def _clobber_configure(self): + # This is an optimistic treatment of the CLOBBER file for when we have + # some trust in the build system: an update to the CLOBBER file is + # interpreted to mean that configure will fail during an incremental + # build, which is handled by removing intermediate configure artifacts + # and subsections of the objdir related to python and testing before + # proceeding. + clobberer = Clobberer(self.topsrcdir, self.topobjdir) + clobber_output = io.StringIO() + res = clobberer.maybe_do_clobber(os.getcwd(), False, clobber_output) + required, performed, message = res + assert not performed + if not required: + return False + + def remove_objdir_path(path): + path = mozpath.join(self.topobjdir, path) + self.log( + logging.WARNING, + "clobber", + {"path": path}, + "CLOBBER file has been updated, removing {path}.", + ) + mozfile.remove(path) + + # Remove files we think could cause "configure" clobber bugs. + for f in ("old-configure.vars", "config.cache", "configure.pkl"): + remove_objdir_path(f) + remove_objdir_path(mozpath.join("js", "src", f)) + + rm_dirs = [ + # Stale paths in our virtualenv may cause build-backend + # to fail. + "_virtualenvs", + # Some tests may accumulate state in the objdir that may + # become invalid after srcdir changes. + "_tests", + ] + + for d in rm_dirs: + remove_objdir_path(d) + + os.utime(mozpath.join(self.topobjdir, "CLOBBER"), None) + return True + + def _write_mozconfig_json(self): + mozconfig_json = os.path.join(self.topobjdir, ".mozconfig.json") + with FileAvoidWrite(mozconfig_json) as fh: + to_write = six.ensure_text( + json.dumps( + { + "topsrcdir": self.topsrcdir, + "topobjdir": self.topobjdir, + "mozconfig": self.mozconfig, + }, + sort_keys=True, + indent=2, + ) + ) + # json.dumps in python2 inserts some trailing whitespace while + # json.dumps in python3 does not, which defeats the FileAvoidWrite + # mechanism. Strip the trailing whitespace to avoid rewriting this + # file unnecessarily. + to_write = "\n".join([line.rstrip() for line in to_write.splitlines()]) + fh.write(to_write) + + def _run_client_mk( + self, + target=None, + line_handler=None, + jobs=0, + job_size=0, + verbose=None, + keep_going=False, + append_env=None, + ): + append_env = dict(append_env or {}) + append_env["TOPSRCDIR"] = self.topsrcdir + + append_env["CONFIG_GUESS"] = self.resolve_config_guess() + + mozconfig = self.mozconfig + + mozconfig_make_lines = [] + for arg in mozconfig["make_extra"] or []: + mozconfig_make_lines.append(arg) + + if mozconfig["make_flags"]: + mozconfig_make_lines.append( + "MOZ_MAKE_FLAGS=%s" % " ".join(mozconfig["make_flags"]) + ) + objdir = mozpath.normsep(self.topobjdir) + mozconfig_make_lines.append("MOZ_OBJDIR=%s" % objdir) + mozconfig_make_lines.append("OBJDIR=%s" % objdir) + + if mozconfig["path"]: + mozconfig_make_lines.append( + "FOUND_MOZCONFIG=%s" % mozpath.normsep(mozconfig["path"]) + ) + mozconfig_make_lines.append("export FOUND_MOZCONFIG") + + # The .mozconfig.mk file only contains exported variables and lines with + # UPLOAD_EXTRA_FILES. + mozconfig_filtered_lines = [ + line + for line in mozconfig_make_lines + # Bug 1418122 investigate why UPLOAD_EXTRA_FILES is special and + # remove it. + if line.startswith("export ") or "UPLOAD_EXTRA_FILES" in line + ] + + mozconfig_client_mk = os.path.join(self.topobjdir, ".mozconfig-client-mk") + with FileAvoidWrite(mozconfig_client_mk) as fh: + fh.write("\n".join(mozconfig_make_lines)) + + mozconfig_mk = os.path.join(self.topobjdir, ".mozconfig.mk") + with FileAvoidWrite(mozconfig_mk) as fh: + fh.write("\n".join(mozconfig_filtered_lines)) + + # Copy the original mozconfig to the objdir. + mozconfig_objdir = os.path.join(self.topobjdir, ".mozconfig") + if mozconfig["path"]: + with open(mozconfig["path"], "r") as ifh: + with FileAvoidWrite(mozconfig_objdir) as ofh: + ofh.write(ifh.read()) + else: + try: + os.unlink(mozconfig_objdir) + except OSError as e: + if e.errno != errno.ENOENT: + raise + + if mozconfig_make_lines: + self.log( + logging.WARNING, + "mozconfig_content", + { + "path": mozconfig["path"], + "content": "\n ".join(mozconfig_make_lines), + }, + "Adding make options from {path}\n {content}", + ) + + append_env["OBJDIR"] = mozpath.normsep(self.topobjdir) + + return self._run_make( + srcdir=True, + filename="client.mk", + ensure_exit_code=False, + print_directory=False, + target=target, + line_handler=line_handler, + log=False, + num_jobs=jobs, + job_size=job_size, + silent=not verbose, + keep_going=keep_going, + append_env=append_env, + ) + + def _check_clobber(self, mozconfig, env): + """Run `Clobberer.maybe_do_clobber`, log the result and return a status bool. + + Wraps the clobbering logic in `Clobberer.maybe_do_clobber` to provide logging + and handling of the `AUTOCLOBBER` mozconfig option. + + Return a bool indicating whether the clobber reached an error state. For example, + return `True` if the clobber was required but not completed, and return `False` if + the clobber was not required and not completed. + """ + auto_clobber = any( + [ + env.get("AUTOCLOBBER", False), + (mozconfig["env"] or {}).get("added", {}).get("AUTOCLOBBER", False), + "AUTOCLOBBER=1" in (mozconfig["make_extra"] or []), + ] + ) + from mozbuild.base import BuildEnvironmentNotFoundException + + substs = dict() + try: + substs = self.substs + except BuildEnvironmentNotFoundException: + # We'll just use an empty substs if there is no config. + pass + clobberer = Clobberer(self.topsrcdir, self.topobjdir, substs) + clobber_output = six.StringIO() + res = clobberer.maybe_do_clobber(os.getcwd(), auto_clobber, clobber_output) + clobber_output.seek(0) + for line in clobber_output.readlines(): + self.log(logging.WARNING, "clobber", {"msg": line.rstrip()}, "{msg}") + + clobber_required, clobber_performed, clobber_message = res + if clobber_required and not clobber_performed: + for line in clobber_message.splitlines(): + self.log(logging.WARNING, "clobber", {"msg": line.rstrip()}, "{msg}") + return True + + if clobber_performed and env.get("TINDERBOX_OUTPUT"): + self.log( + logging.WARNING, + "clobber", + {"msg": "TinderboxPrint: auto clobber"}, + "{msg}", + ) + + return False diff --git a/python/mozbuild/mozbuild/controller/clobber.py b/python/mozbuild/mozbuild/controller/clobber.py new file mode 100644 index 0000000000..3deba54d75 --- /dev/null +++ b/python/mozbuild/mozbuild/controller/clobber.py @@ -0,0 +1,249 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +r"""This module contains code for managing clobbering of the tree.""" + +import errno +import os +import subprocess +import sys +from textwrap import TextWrapper + +from mozfile.mozfile import remove as mozfileremove + +CLOBBER_MESSAGE = "".join( + [ + TextWrapper().fill(line) + "\n" + for line in """ +The CLOBBER file has been updated, indicating that an incremental build since \ +your last build will probably not work. A full/clobber build is required. + +The reason for the clobber is: + +{clobber_reason} + +Clobbering can be performed automatically. However, we didn't automatically \ +clobber this time because: + +{no_reason} + +The easiest and fastest way to clobber is to run: + + $ mach clobber + +If you know this clobber doesn't apply to you or you're feeling lucky -- \ +Well, are ya? -- you can ignore this clobber requirement by running: + + $ touch {clobber_file} +""".splitlines() + ] +) + + +class Clobberer(object): + def __init__(self, topsrcdir, topobjdir, substs=None): + """Create a new object to manage clobbering the tree. + + It is bound to a top source directory and to a specific object + directory. + """ + assert os.path.isabs(topsrcdir) + assert os.path.isabs(topobjdir) + + self.topsrcdir = os.path.normpath(topsrcdir) + self.topobjdir = os.path.normpath(topobjdir) + self.src_clobber = os.path.join(topsrcdir, "CLOBBER") + self.obj_clobber = os.path.join(topobjdir, "CLOBBER") + if substs: + self.substs = substs + else: + self.substs = dict() + + # Try looking for mozilla/CLOBBER, for comm-central + if not os.path.isfile(self.src_clobber): + comm_clobber = os.path.join(topsrcdir, "mozilla", "CLOBBER") + if os.path.isfile(comm_clobber): + self.src_clobber = comm_clobber + + def clobber_needed(self): + """Returns a bool indicating whether a tree clobber is required.""" + + # No object directory clobber file means we're good. + if not os.path.exists(self.obj_clobber): + return False + + # No source directory clobber means we're running from a source package + # that doesn't use clobbering. + if not os.path.exists(self.src_clobber): + return False + + # Object directory clobber older than current is fine. + if os.path.getmtime(self.src_clobber) <= os.path.getmtime(self.obj_clobber): + + return False + + return True + + def clobber_cause(self): + """Obtain the cause why a clobber is required. + + This reads the cause from the CLOBBER file. + + This returns a list of lines describing why the clobber was required. + Each line is stripped of leading and trailing whitespace. + """ + with open(self.src_clobber, "rt") as fh: + lines = [l.strip() for l in fh.readlines()] + return [l for l in lines if l and not l.startswith("#")] + + def have_winrm(self): + # `winrm -h` should print 'winrm version ...' and exit 1 + try: + p = subprocess.Popen( + ["winrm.exe", "-h"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) + return p.wait() == 1 and p.stdout.read().startswith("winrm") + except Exception: + return False + + def collect_subdirs(self, root, exclude): + """Gathers a list of subdirectories excluding specified items.""" + paths = [] + try: + for p in os.listdir(root): + if p not in exclude: + paths.append(os.path.join(root, p)) + except OSError as e: + if e.errno != errno.ENOENT: + raise + + return paths + + def delete_dirs(self, root, paths_to_delete): + """Deletes the given subdirectories in an optimal way.""" + procs = [] + for p in sorted(paths_to_delete): + path = os.path.join(root, p) + if ( + sys.platform.startswith("win") + and self.have_winrm() + and os.path.isdir(path) + ): + procs.append(subprocess.Popen(["winrm", "-rf", path])) + else: + # We use mozfile because it is faster than shutil.rmtree(). + mozfileremove(path) + + for p in procs: + p.wait() + + def remove_objdir(self, full=True): + """Remove the object directory. + + ``full`` controls whether to fully delete the objdir. If False, + some directories (e.g. Visual Studio Project Files) will not be + deleted. + """ + # Determine where cargo build artifacts are stored + RUST_TARGET_VARS = ("RUST_HOST_TARGET", "RUST_TARGET") + rust_targets = set( + [self.substs[x] for x in RUST_TARGET_VARS if x in self.substs] + ) + rust_build_kind = "release" + if self.substs.get("MOZ_DEBUG_RUST"): + rust_build_kind = "debug" + + # Top-level files and directories to not clobber by default. + no_clobber = {".mozbuild", "msvc", "_virtualenvs"} + + # Hold off on clobbering cargo build artifacts + no_clobber |= rust_targets + + if full: + paths = [self.topobjdir] + else: + paths = self.collect_subdirs(self.topobjdir, no_clobber) + + self.delete_dirs(self.topobjdir, paths) + + # Now handle cargo's build artifacts and skip removing the incremental + # compilation cache. + for target in rust_targets: + cargo_path = os.path.join(self.topobjdir, target, rust_build_kind) + paths = self.collect_subdirs( + cargo_path, + { + "incremental", + }, + ) + self.delete_dirs(cargo_path, paths) + + def maybe_do_clobber(self, cwd, allow_auto=False, fh=sys.stderr): + """Perform a clobber if it is required. Maybe. + + This is the API the build system invokes to determine if a clobber + is needed and to automatically perform that clobber if we can. + + This returns a tuple of (bool, bool, str). The elements are: + + - Whether a clobber was/is required. + - Whether a clobber was performed. + - The reason why the clobber failed or could not be performed. This + will be None if no clobber is required or if we clobbered without + error. + """ + assert cwd + cwd = os.path.normpath(cwd) + + if not self.clobber_needed(): + print("Clobber not needed.", file=fh) + return False, False, None + + # So a clobber is needed. We only perform a clobber if we are + # allowed to perform an automatic clobber (off by default) and if the + # current directory is not under the object directory. The latter is + # because operating systems, filesystems, and shell can throw fits + # if the current working directory is deleted from under you. While it + # can work in some scenarios, we take the conservative approach and + # never try. + if not allow_auto: + return ( + True, + False, + self._message( + "Automatic clobbering is not enabled\n" + ' (add "mk_add_options AUTOCLOBBER=1" to your ' + "mozconfig)." + ), + ) + + if cwd.startswith(self.topobjdir) and cwd != self.topobjdir: + return ( + True, + False, + self._message( + "Cannot clobber while the shell is inside the object directory." + ), + ) + + print("Automatically clobbering %s" % self.topobjdir, file=fh) + try: + self.remove_objdir(False) + print("Successfully completed auto clobber.", file=fh) + return True, True, None + except (IOError) as error: + return ( + True, + False, + self._message("Error when automatically clobbering: " + str(error)), + ) + + def _message(self, reason): + lines = [" " + line for line in self.clobber_cause()] + + return CLOBBER_MESSAGE.format( + clobber_reason="\n".join(lines), + no_reason=" " + reason, + clobber_file=self.obj_clobber, + ) diff --git a/python/mozbuild/mozbuild/doctor.py b/python/mozbuild/mozbuild/doctor.py new file mode 100644 index 0000000000..649b50200d --- /dev/null +++ b/python/mozbuild/mozbuild/doctor.py @@ -0,0 +1,605 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, # You can obtain one at http://mozilla.org/MPL/2.0/. + +import enum +import locale +import os +import socket +import subprocess +import sys +from pathlib import Path +from typing import Callable, List, Optional, Union + +import attr +import mozpack.path as mozpath +import mozversioncontrol +import psutil +import requests +from packaging.version import Version + +# Minimum recommended logical processors in system. +PROCESSORS_THRESHOLD = 4 + +# Minimum recommended total system memory, in gigabytes. +MEMORY_THRESHOLD = 7.4 + +# Minimum recommended free space on each disk, in gigabytes. +FREESPACE_THRESHOLD = 10 + +# Latest MozillaBuild version. +LATEST_MOZILLABUILD_VERSION = Version("4.0") + +DISABLE_LASTACCESS_WIN = """ +Disable the last access time feature? +This improves the speed of file and +directory access by deferring Last Access Time modification on disk by up to an +hour. Backup programs that rely on this feature may be affected. +https://technet.microsoft.com/en-us/library/cc785435.aspx +""" + +COMPILED_LANGUAGE_FILE_EXTENSIONS = [ + ".cc", + ".cxx", + ".c", + ".cpp", + ".h", + ".hpp", + ".rs", + ".rlib", + ".mk", +] + + +def get_mount_point(path: str) -> str: + """Return the mount point for a given path.""" + while path != "/" and not os.path.ismount(path): + path = mozpath.abspath(mozpath.join(path, os.pardir)) + return path + + +class CheckStatus(enum.Enum): + # Check is okay. + OK = enum.auto() + # We found an issue. + WARNING = enum.auto() + # We found an issue that will break build/configure/etc. + FATAL = enum.auto() + # The check was skipped. + SKIPPED = enum.auto() + + +@attr.s +class DoctorCheck: + # Name of the check. + name = attr.ib() + # Lines to display on screen. + display_text = attr.ib() + # `CheckStatus` for this given check. + status = attr.ib() + # Function to be called to fix the issues, if applicable. + fix = attr.ib(default=None) + + +CHECKS = {} + + +def check(func: Callable): + """Decorator that registers a function as a doctor check. + + The function should return a `DoctorCheck` or be an iterator of + checks. + """ + CHECKS[func.__name__] = func + + +@check +def dns(**kwargs) -> DoctorCheck: + """Check DNS is queryable.""" + try: + socket.getaddrinfo("mozilla.org", 80) + return DoctorCheck( + name="dns", + status=CheckStatus.OK, + display_text=["DNS query for mozilla.org completed successfully."], + ) + + except socket.gaierror: + return DoctorCheck( + name="dns", + status=CheckStatus.FATAL, + display_text=["Could not query DNS for mozilla.org."], + ) + + +@check +def internet(**kwargs) -> DoctorCheck: + """Check the internet is reachable via HTTPS.""" + try: + resp = requests.get("https://mozilla.org") + resp.raise_for_status() + + return DoctorCheck( + name="internet", + status=CheckStatus.OK, + display_text=["Internet is reachable."], + ) + + except Exception: + return DoctorCheck( + name="internet", + status=CheckStatus.FATAL, + display_text=["Could not reach a known website via HTTPS."], + ) + + +@check +def ssh(**kwargs) -> DoctorCheck: + """Check the status of `ssh hg.mozilla.org` for common errors.""" + try: + # We expect this command to return exit code 1 even when we hit + # the successful code path, since we don't specify a `pash` command. + proc = subprocess.run( + ["ssh", "hg.mozilla.org"], + encoding="utf-8", + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + ) + + # Command output from a successful `pash` run. + if "has privileges to access Mercurial over" in proc.stdout: + return DoctorCheck( + name="ssh", + status=CheckStatus.OK, + display_text=["SSH is properly configured for access to hg."], + ) + + if "Permission denied" in proc.stdout: + # Parse proc.stdout for username, which looks like: + # `@hg.mozilla.org: Permission denied (reason)` + login_string = proc.stdout.split()[0] + username, _host = login_string.split("@hg.mozilla.org") + + # `` should be an email. + if "@" not in username: + return DoctorCheck( + name="ssh", + status=CheckStatus.FATAL, + display_text=[ + "SSH username `{}` is not an email address.".format(username), + "hg.mozilla.org logins should be in the form `user@domain.com`.", + ], + ) + + return DoctorCheck( + name="ssh", + status=CheckStatus.WARNING, + display_text=[ + "SSH username `{}` does not have permission to push to " + "hg.mozilla.org.".format(username) + ], + ) + + if "Mercurial access is currently disabled on your account" in proc.stdout: + return DoctorCheck( + name="ssh", + status=CheckStatus.FATAL, + display_text=[ + "You previously had push access to hgmo, but due to inactivity", + "your access was revoked. Please file a bug in Bugzilla under", + "`Infrastructure & Operations :: Infrastructure: LDAP` to request", + "access.", + ], + ) + + return DoctorCheck( + name="ssh", + status=CheckStatus.WARNING, + display_text=[ + "Unexpected output from `ssh hg.mozilla.org`:", + proc.stdout, + ], + ) + + except subprocess.CalledProcessError: + return DoctorCheck( + name="ssh", + status=CheckStatus.WARNING, + display_text=["Could not run `ssh hg.mozilla.org`."], + ) + + +@check +def cpu(**kwargs) -> DoctorCheck: + """Check the host machine has the recommended processing power to develop Firefox.""" + cpu_count = psutil.cpu_count() + if cpu_count < PROCESSORS_THRESHOLD: + status = CheckStatus.WARNING + desc = "%d logical processors detected, <%d" % (cpu_count, PROCESSORS_THRESHOLD) + else: + status = CheckStatus.OK + desc = "%d logical processors detected, >=%d" % ( + cpu_count, + PROCESSORS_THRESHOLD, + ) + + return DoctorCheck(name="cpu", display_text=[desc], status=status) + + +@check +def memory(**kwargs) -> DoctorCheck: + """Check the host machine has the recommended memory to develop Firefox.""" + memory = psutil.virtual_memory().total + # Convert to gigabytes. + memory_GB = memory / 1024 ** 3.0 + if memory_GB < MEMORY_THRESHOLD: + status = CheckStatus.WARNING + desc = "%.1fGB of physical memory, <%.1fGB" % (memory_GB, MEMORY_THRESHOLD) + else: + status = CheckStatus.OK + desc = "%.1fGB of physical memory, >%.1fGB" % (memory_GB, MEMORY_THRESHOLD) + + return DoctorCheck(name="memory", display_text=[desc], status=status) + + +@check +def storage_freespace(topsrcdir: str, topobjdir: str, **kwargs) -> List[DoctorCheck]: + """Check the host machine has the recommended disk space to develop Firefox.""" + topsrcdir_mount = get_mount_point(topsrcdir) + topobjdir_mount = get_mount_point(topobjdir) + + mounts = [ + ("topsrcdir", topsrcdir, topsrcdir_mount), + ("topobjdir", topobjdir, topobjdir_mount), + ] + + mountpoint_line = topsrcdir_mount != topobjdir_mount + checks = [] + + for purpose, path, mount in mounts: + if not mountpoint_line: + mountpoint_line = True + continue + + desc = ["%s = %s" % (purpose, path)] + + try: + usage = psutil.disk_usage(mount) + freespace, size = usage.free, usage.total + freespace_GB = freespace / 1024 ** 3 + size_GB = size / 1024 ** 3 + if freespace_GB < FREESPACE_THRESHOLD: + status = CheckStatus.WARNING + desc.append( + "mountpoint = %s\n%dGB of %dGB free, <%dGB" + % (mount, freespace_GB, size_GB, FREESPACE_THRESHOLD) + ) + else: + status = CheckStatus.OK + desc.append( + "mountpoint = %s\n%dGB of %dGB free, >=%dGB" + % (mount, freespace_GB, size_GB, FREESPACE_THRESHOLD) + ) + + except OSError: + status = CheckStatus.FATAL + desc.append("path invalid") + + checks.append( + DoctorCheck(name="%s mount check" % mount, status=status, display_text=desc) + ) + + return checks + + +def fix_lastaccess_win(): + """Run `fsutil` to fix lastaccess behaviour.""" + try: + print("Disabling filesystem lastaccess") + + command = ["fsutil", "behavior", "set", "disablelastaccess", "1"] + subprocess.check_output(command) + + print("Filesystem lastaccess disabled.") + + except subprocess.CalledProcessError: + print("Could not disable filesystem lastaccess.") + + +@check +def fs_lastaccess( + topsrcdir: str, topobjdir: str, **kwargs +) -> Union[DoctorCheck, List[DoctorCheck]]: + """Check for the `lastaccess` behaviour on the filsystem, which can slow + down filesystem operations.""" + if sys.platform.startswith("win"): + # See 'fsutil behavior': + # https://technet.microsoft.com/en-us/library/cc785435.aspx + try: + command = ["fsutil", "behavior", "query", "disablelastaccess"] + fsutil_output = subprocess.check_output(command, encoding="utf-8") + disablelastaccess = int(fsutil_output.partition("=")[2][1]) + except subprocess.CalledProcessError: + return DoctorCheck( + name="lastaccess", + status=CheckStatus.WARNING, + display_text=["unable to check lastaccess behavior"], + ) + + if disablelastaccess in {1, 3}: + return DoctorCheck( + name="lastaccess", + status=CheckStatus.OK, + display_text=["lastaccess disabled systemwide"], + ) + elif disablelastaccess in {0, 2}: + return DoctorCheck( + name="lastaccess", + status=CheckStatus.WARNING, + display_text=["lastaccess enabled"], + fix=fix_lastaccess_win, + ) + + # `disablelastaccess` should be a value between 0-3. + return DoctorCheck( + name="lastaccess", + status=CheckStatus.WARNING, + display_text=["Could not parse `fsutil` for lastaccess behavior."], + ) + + elif any( + sys.platform.startswith(prefix) for prefix in ["freebsd", "linux", "openbsd"] + ): + topsrcdir_mount = get_mount_point(topsrcdir) + topobjdir_mount = get_mount_point(topobjdir) + mounts = [ + ("topsrcdir", topsrcdir, topsrcdir_mount), + ("topobjdir", topobjdir, topobjdir_mount), + ] + + common_mountpoint = topsrcdir_mount == topobjdir_mount + + mount_checks = [] + for _purpose, _path, mount in mounts: + mount_checks.append(check_mount_lastaccess(mount)) + if common_mountpoint: + break + + return mount_checks + + # Return "SKIPPED" if this test is not relevant. + return DoctorCheck( + name="lastaccess", + display_text=["lastaccess not relevant for this platform."], + status=CheckStatus.SKIPPED, + ) + + +def check_mount_lastaccess(mount: str) -> DoctorCheck: + """Check `lastaccess` behaviour for a Linux mount.""" + partitions = psutil.disk_partitions(all=True) + atime_opts = {"atime", "noatime", "relatime", "norelatime"} + option = "" + fstype = "" + for partition in partitions: + if partition.mountpoint == mount: + mount_opts = set(partition.opts.split(",")) + intersection = list(atime_opts & mount_opts) + fstype = partition.fstype + if len(intersection) == 1: + option = intersection[0] + break + + if fstype == "tmpfs": + status = CheckStatus.OK + desc = "%s is a tmpfs so noatime/reltime is not needed" % (mount) + elif not option: + status = CheckStatus.WARNING + if sys.platform.startswith("linux"): + option = "noatime/relatime" + else: + option = "noatime" + desc = "%s has no explicit %s mount option" % (mount, option) + elif option == "atime" or option == "norelatime": + status = CheckStatus.WARNING + desc = "%s has %s mount option" % (mount, option) + elif option == "noatime" or option == "relatime": + status = CheckStatus.OK + desc = "%s has %s mount option" % (mount, option) + + return DoctorCheck( + name="%s mount lastaccess" % mount, status=status, display_text=[desc] + ) + + +@check +def mozillabuild(**kwargs) -> DoctorCheck: + """Check that MozillaBuild is the latest version.""" + if not sys.platform.startswith("win"): + return DoctorCheck( + name="mozillabuild", + status=CheckStatus.SKIPPED, + display_text=["Non-Windows platform, MozillaBuild not relevant"], + ) + + MOZILLABUILD = mozpath.normpath(os.environ.get("MOZILLABUILD", "")) + if not MOZILLABUILD or not os.path.exists(MOZILLABUILD): + return DoctorCheck( + name="mozillabuild", + status=CheckStatus.WARNING, + display_text=["Not running under MozillaBuild."], + ) + + try: + with open(mozpath.join(MOZILLABUILD, "VERSION"), "r") as fh: + local_version = fh.readline() + + if not local_version: + return DoctorCheck( + name="mozillabuild", + status=CheckStatus.WARNING, + display_text=["Could not get local MozillaBuild version."], + ) + + if Version(local_version) < LATEST_MOZILLABUILD_VERSION: + status = CheckStatus.WARNING + desc = "MozillaBuild %s in use, <%s" % ( + local_version, + LATEST_MOZILLABUILD_VERSION, + ) + + else: + status = CheckStatus.OK + desc = "MozillaBuild %s in use" % local_version + + except (IOError, ValueError): + status = CheckStatus.FATAL + desc = "MozillaBuild version not found" + + return DoctorCheck(name="mozillabuild", status=status, display_text=[desc]) + + +@check +def bad_locale_utf8(**kwargs) -> DoctorCheck: + """Check to detect the invalid locale `UTF-8` on pre-3.8 Python.""" + if sys.version_info >= (3, 8): + return DoctorCheck( + name="utf8 locale", + status=CheckStatus.SKIPPED, + display_text=["Python version has fixed utf-8 locale bug."], + ) + + try: + # This line will attempt to get and parse the locale. + locale.getdefaultlocale() + + return DoctorCheck( + name="utf8 locale", + status=CheckStatus.OK, + display_text=["Python's locale is set to a valid value."], + ) + except ValueError: + return DoctorCheck( + name="utf8 locale", + status=CheckStatus.FATAL, + display_text=[ + "Your Python is using an invalid value for its locale.", + "Either update Python to version 3.8+, or set the following variables in ", + "your environment:", + " export LC_ALL=en_US.UTF-8", + " export LANG=en_US.UTF-8", + ], + ) + + +@check +def artifact_build( + topsrcdir: str, configure_args: Optional[List[str]], **kwargs +) -> DoctorCheck: + """Check that if Artifact Builds are enabled, that no + source files that would not be compiled are changed""" + + if configure_args is None or "--enable-artifact-builds" not in configure_args: + return DoctorCheck( + name="artifact_build", + status=CheckStatus.SKIPPED, + display_text=[ + "Artifact Builds are not enabled. No need to proceed checking for changed files." + ], + ) + + repo = mozversioncontrol.get_repository_object(topsrcdir) + changed_files = [ + Path(file) + for file in set(repo.get_outgoing_files()) | set(repo.get_changed_files()) + ] + + compiled_language_files_changed = "" + for file in changed_files: + if ( + file.suffix in COMPILED_LANGUAGE_FILE_EXTENSIONS + or file.stem.lower() == "makefile" + and not file.suffix == ".py" + ): + compiled_language_files_changed += ' - "' + str(file) + '"\n' + + if compiled_language_files_changed: + return DoctorCheck( + name="artifact_build", + status=CheckStatus.FATAL, + display_text=[ + "Artifact Builds are enabled, but the following files from compiled languages " + f"have been modified: \n{compiled_language_files_changed}\nThese files will " + "not be compiled, and your changes will not be realized in the build output." + "\n\nIf you want these changes to be realized, you should re-run './mach " + 'boostrap` and select a build that does not state "Artifact Mode".' + "\nFor additional information on Artifact Builds see: " + "https://firefox-source-docs.mozilla.org/contributing/build/" + "artifact_builds.html" + ], + ) + + return DoctorCheck( + name="artifact_build", + status=CheckStatus.OK, + display_text=["No Artifact Build conflicts found."], + ) + + +def run_doctor(fix: bool = False, verbose: bool = False, **kwargs) -> int: + """Run the doctor checks. + + If `fix` is `True`, run fixing functions for issues that can be resolved + automatically. + + By default, only print output from checks that result in a warning or + fatal issue. `verbose` will cause all output to be printed to the screen. + """ + issues_found = False + + fixes = [] + for _name, check_func in CHECKS.items(): + results = check_func(**kwargs) + + if isinstance(results, DoctorCheck): + results = [results] + + for result in results: + if result.status == CheckStatus.SKIPPED and not verbose: + continue + + if result.status != CheckStatus.OK: + # If we ever have a non-OK status, we shouldn't print + # the "No issues detected" line. + issues_found = True + + if result.status != CheckStatus.OK or verbose: + print("\n".join(result.display_text)) + + if result.fix: + fixes.append(result.fix) + + if not issues_found: + print("No issues detected.") + return 0 + + # If we can fix something but the user didn't ask us to, advise + # them to run with `--fix`. + if not fix: + if fixes: + print( + "Some of the issues found can be fixed; run " + "`./mach doctor --fix` to fix them." + ) + return 1 + + # Attempt to run the fix functions. + fixer_fail = 0 + for fixer in fixes: + try: + fixer() + except Exception: + fixer_fail = 1 + pass + + return fixer_fail diff --git a/python/mozbuild/mozbuild/dotproperties.py b/python/mozbuild/mozbuild/dotproperties.py new file mode 100644 index 0000000000..9b615cc43f --- /dev/null +++ b/python/mozbuild/mozbuild/dotproperties.py @@ -0,0 +1,86 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +# This file contains utility functions for reading .properties files + +import codecs +import re +import sys + +import six + +if sys.version_info[0] == 3: + str_type = str +else: + str_type = basestring + + +class DotProperties: + r"""A thin representation of a key=value .properties file.""" + + def __init__(self, file=None): + self._properties = {} + if file: + self.update(file) + + def update(self, file): + """Updates properties from a file name or file-like object. + + Ignores empty lines and comment lines.""" + + if isinstance(file, str_type): + f = codecs.open(file, "r", "utf-8") + else: + f = file + + for l in f.readlines(): + line = l.strip() + if not line or line.startswith("#"): + continue + (k, v) = re.split("\s*=\s*", line, 1) + self._properties[k] = v + + def get(self, key, default=None): + return self._properties.get(key, default) + + def get_list(self, prefix): + """Turns {'list.0':'foo', 'list.1':'bar'} into ['foo', 'bar']. + + Returns [] to indicate an empty or missing list.""" + + if not prefix.endswith("."): + prefix = prefix + "." + indexes = [] + for k, v in six.iteritems(self._properties): + if not k.startswith(prefix): + continue + key = k[len(prefix) :] + if "." in key: + # We have something like list.sublist.0. + continue + indexes.append(int(key)) + return [self._properties[prefix + str(index)] for index in sorted(indexes)] + + def get_dict(self, prefix, required_keys=[]): + """Turns {'foo.title':'title', ...} into {'title':'title', ...}. + + If ``|required_keys|`` is present, it must be an iterable of required key + names. If a required key is not present, ValueError is thrown. + + Returns {} to indicate an empty or missing dict.""" + + if not prefix.endswith("."): + prefix = prefix + "." + + D = dict( + (k[len(prefix) :], v) + for k, v in six.iteritems(self._properties) + if k.startswith(prefix) and "." not in k[len(prefix) :] + ) + + for required_key in required_keys: + if required_key not in D: + raise ValueError("Required key %s not present" % required_key) + + return D diff --git a/python/mozbuild/mozbuild/faster_daemon.py b/python/mozbuild/mozbuild/faster_daemon.py new file mode 100644 index 0000000000..13fb07a79c --- /dev/null +++ b/python/mozbuild/mozbuild/faster_daemon.py @@ -0,0 +1,328 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +Use pywatchman to watch source directories and perform partial +``mach build faster`` builds. +""" + +import datetime +import sys +import time + +import mozpack.path as mozpath + +# Watchman integration cribbed entirely from +# https://github.com/facebook/watchman/blob/19aebfebb0b5b0b5174b3914a879370ffc5dac37/python/bin/watchman-wait +import pywatchman +from mozpack.copier import FileCopier +from mozpack.manifests import InstallManifest + +import mozbuild.util +from mozbuild.backend import get_backend_class + + +def print_line(prefix, m, now=None): + now = now or datetime.datetime.utcnow() + print("[%s %sZ] %s" % (prefix, now.isoformat(), m)) + + +def print_copy_result(elapsed, destdir, result, verbose=True): + COMPLETE = ( + "Elapsed: {elapsed:.2f}s; From {dest}: Kept {existing} existing; " + "Added/updated {updated}; " + "Removed {rm_files} files and {rm_dirs} directories." + ) + + print_line( + "watch", + COMPLETE.format( + elapsed=elapsed, + dest=destdir, + existing=result.existing_files_count, + updated=result.updated_files_count, + rm_files=result.removed_files_count, + rm_dirs=result.removed_directories_count, + ), + ) + + +class FasterBuildException(Exception): + def __init__(self, message, cause): + Exception.__init__(self, message) + self.cause = cause + + +class FasterBuildChange(object): + def __init__(self): + self.unrecognized = set() + self.input_to_outputs = {} + self.output_to_inputs = {} + + +class Daemon(object): + def __init__(self, config_environment): + self.config_environment = config_environment + self._client = None + + @property + def defines(self): + defines = dict(self.config_environment.acdefines) + # These additions work around warts in the build system: see + # http://searchfox.org/mozilla-central/rev/ad093e98f42338effe2e2513e26c3a311dd96422/config/faster/rules.mk#92-93 + defines.update( + { + "AB_CD": "en-US", + } + ) + return defines + + @mozbuild.util.memoized_property + def file_copier(self): + # TODO: invalidate the file copier when the build system + # itself changes, i.e., the underlying unified manifest + # changes. + file_copier = FileCopier() + + unified_manifest = InstallManifest( + mozpath.join( + self.config_environment.topobjdir, "faster", "unified_install_dist_bin" + ) + ) + + unified_manifest.populate_registry(file_copier, defines_override=self.defines) + + return file_copier + + def subscribe_to_topsrcdir(self): + self.subscribe_to_dir("topsrcdir", self.config_environment.topsrcdir) + + def subscribe_to_dir(self, name, dir_to_watch): + query = { + "empty_on_fresh_instance": True, + "expression": [ + "allof", + ["type", "f"], + [ + "not", + [ + "anyof", + ["dirname", ".hg"], + ["name", ".hg", "wholename"], + ["dirname", ".git"], + ["name", ".git", "wholename"], + ], + ], + ], + "fields": ["name"], + } + watch = self.client.query("watch-project", dir_to_watch) + if "warning" in watch: + print("WARNING: ", watch["warning"], file=sys.stderr) + + root = watch["watch"] + if "relative_path" in watch: + query["relative_root"] = watch["relative_path"] + + # Get the initial clock value so that we only get updates. + # Wait 30s to allow for slow Windows IO. See + # https://facebook.github.io/watchman/docs/cmd/clock.html. + query["since"] = self.client.query("clock", root, {"sync_timeout": 30000})[ + "clock" + ] + + return self.client.query("subscribe", root, name, query) + + def changed_files(self): + # In theory we can parse just the result variable here, but + # the client object will accumulate all subscription results + # over time, so we ask it to remove and return those values. + files = set() + + data = self.client.getSubscription("topsrcdir") + if data: + for dat in data: + files |= set( + [ + mozpath.normpath( + mozpath.join(self.config_environment.topsrcdir, f) + ) + for f in dat.get("files", []) + ] + ) + + return files + + def incremental_copy(self, copier, force=False, verbose=True): + # Just like the 'repackage' target in browser/app/Makefile.in. + if "cocoa" == self.config_environment.substs["MOZ_WIDGET_TOOLKIT"]: + bundledir = mozpath.join( + self.config_environment.topobjdir, + "dist", + self.config_environment.substs["MOZ_MACBUNDLE_NAME"], + "Contents", + "Resources", + ) + start = time.monotonic() + result = copier.copy( + bundledir, + skip_if_older=not force, + remove_unaccounted=False, + remove_all_directory_symlinks=False, + remove_empty_directories=False, + ) + print_copy_result( + time.monotonic() - start, bundledir, result, verbose=verbose + ) + + destdir = mozpath.join(self.config_environment.topobjdir, "dist", "bin") + start = time.monotonic() + result = copier.copy( + destdir, + skip_if_older=not force, + remove_unaccounted=False, + remove_all_directory_symlinks=False, + remove_empty_directories=False, + ) + print_copy_result(time.monotonic() - start, destdir, result, verbose=verbose) + + def input_changes(self, verbose=True): + """ + Return an iterator of `FasterBuildChange` instances as inputs + to the faster build system change. + """ + + # TODO: provide the debug diagnostics we want: this print is + # not immediately before the watch. + if verbose: + print_line("watch", "Connecting to watchman") + # TODO: figure out why a large timeout is required for the + # client, and a robust strategy for retrying timed out + # requests. + self.client = pywatchman.client(timeout=5.0) + + try: + if verbose: + print_line("watch", "Checking watchman capabilities") + # TODO: restrict these capabilities to the minimal set. + self.client.capabilityCheck( + required=[ + "clock-sync-timeout", + "cmd-watch-project", + "term-dirname", + "wildmatch", + ] + ) + + if verbose: + print_line( + "watch", + "Subscribing to {}".format(self.config_environment.topsrcdir), + ) + self.subscribe_to_topsrcdir() + if verbose: + print_line( + "watch", "Watching {}".format(self.config_environment.topsrcdir) + ) + + input_to_outputs = self.file_copier.input_to_outputs_tree() + for input, outputs in input_to_outputs.items(): + if not outputs: + raise Exception( + "Refusing to watch input ({}) with no outputs".format(input) + ) + + while True: + try: + self.client.receive() + + changed = self.changed_files() + if not changed: + continue + + result = FasterBuildChange() + + for change in changed: + if change in input_to_outputs: + result.input_to_outputs[change] = set( + input_to_outputs[change] + ) + else: + result.unrecognized.add(change) + + for input, outputs in result.input_to_outputs.items(): + for output in outputs: + if output not in result.output_to_inputs: + result.output_to_inputs[output] = set() + result.output_to_inputs[output].add(input) + + yield result + + except pywatchman.SocketTimeout: + # Let's check to see if we're still functional. + self.client.query("version") + + except pywatchman.CommandError as e: + # Abstract away pywatchman errors. + raise FasterBuildException( + e, + "Command error using pywatchman to watch {}".format( + self.config_environment.topsrcdir + ), + ) + + except pywatchman.SocketTimeout as e: + # Abstract away pywatchman errors. + raise FasterBuildException( + e, + "Socket timeout using pywatchman to watch {}".format( + self.config_environment.topsrcdir + ), + ) + + finally: + self.client.close() + + def output_changes(self, verbose=True): + """ + Return an iterator of `FasterBuildChange` instances as outputs + from the faster build system are updated. + """ + for change in self.input_changes(verbose=verbose): + now = datetime.datetime.utcnow() + + for unrecognized in sorted(change.unrecognized): + print_line("watch", "! {}".format(unrecognized), now=now) + + all_outputs = set() + for input in sorted(change.input_to_outputs): + outputs = change.input_to_outputs[input] + + print_line("watch", "< {}".format(input), now=now) + for output in sorted(outputs): + print_line("watch", "> {}".format(output), now=now) + all_outputs |= outputs + + if all_outputs: + partial_copier = FileCopier() + for output in all_outputs: + partial_copier.add(output, self.file_copier[output]) + + self.incremental_copy(partial_copier, force=True, verbose=verbose) + yield change + + def watch(self, verbose=True): + try: + active_backend = self.config_environment.substs.get( + "BUILD_BACKENDS", [None] + )[0] + if active_backend: + backend_cls = get_backend_class(active_backend)(self.config_environment) + except Exception: + backend_cls = None + + for change in self.output_changes(verbose=verbose): + # Try to run the active build backend's post-build step, if possible. + if backend_cls: + backend_cls.post_build(self.config_environment, None, 1, False, 0) diff --git a/python/mozbuild/mozbuild/frontend/__init__.py b/python/mozbuild/mozbuild/frontend/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/frontend/context.py b/python/mozbuild/mozbuild/frontend/context.py new file mode 100644 index 0000000000..1e241c5656 --- /dev/null +++ b/python/mozbuild/mozbuild/frontend/context.py @@ -0,0 +1,3144 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +###################################################################### +# DO NOT UPDATE THIS FILE WITHOUT SIGN-OFF FROM A BUILD MODULE PEER. # +###################################################################### + +r"""This module contains the data structure (context) holding the configuration +from a moz.build. The data emitted by the frontend derives from those contexts. + +It also defines the set of variables and functions available in moz.build. +If you are looking for the absolute authority on what moz.build files can +contain, you've come to the right place. +""" + +import itertools +import operator +import os +from collections import Counter, OrderedDict +from types import FunctionType + +import mozpack.path as mozpath +import six + +from mozbuild.util import ( + HierarchicalStringList, + ImmutableStrictOrderingOnAppendList, + KeyedDefaultDict, + List, + ReadOnlyKeyedDefaultDict, + StrictOrderingOnAppendList, + StrictOrderingOnAppendListWithAction, + StrictOrderingOnAppendListWithFlagsFactory, + TypedList, + TypedNamedTuple, + memoize, + memoized_property, +) + +from .. import schedules +from ..testing import read_manifestparser_manifest, read_reftest_manifest + + +class ContextDerivedValue(object): + """Classes deriving from this one receive a special treatment in a + Context. See Context documentation. + """ + + __slots__ = () + + +class Context(KeyedDefaultDict): + """Represents a moz.build configuration context. + + Instances of this class are filled by the execution of sandboxes. + At the core, a Context is a dict, with a defined set of possible keys we'll + call variables. Each variable is associated with a type. + + When reading a value for a given key, we first try to read the existing + value. If a value is not found and it is defined in the allowed variables + set, we return a new instance of the class for that variable. We don't + assign default instances until they are accessed because this makes + debugging the end-result much simpler. Instead of a data structure with + lots of empty/default values, you have a data structure with only the + values that were read or touched. + + Instances of variables classes are created by invoking ``class_name()``, + except when class_name derives from ``ContextDerivedValue`` or + ``SubContext``, in which case ``class_name(instance_of_the_context)`` or + ``class_name(self)`` is invoked. A value is added to those calls when + instances are created during assignment (setitem). + + allowed_variables is a dict of the variables that can be set and read in + this context instance. Keys in this dict are the strings representing keys + in this context which are valid. Values are tuples of stored type, + assigned type, default value, a docstring describing the purpose of the + variable, and a tier indicator (see comment above the VARIABLES declaration + in this module). + + config is the ConfigEnvironment for this context. + """ + + def __init__(self, allowed_variables={}, config=None, finder=None): + self._allowed_variables = allowed_variables + self.main_path = None + self.current_path = None + # There aren't going to be enough paths for the performance of scanning + # a list to be a problem. + self._all_paths = [] + self.config = config + self._sandbox = None + self._finder = finder + KeyedDefaultDict.__init__(self, self._factory) + + def push_source(self, path): + """Adds the given path as source of the data from this context and make + it the current path for the context.""" + assert os.path.isabs(path) + if not self.main_path: + self.main_path = path + else: + # Callers shouldn't push after main_path has been popped. + assert self.current_path + self.current_path = path + # The same file can be pushed twice, so don't remove any previous + # occurrence. + self._all_paths.append(path) + + def pop_source(self): + """Get back to the previous current path for the context.""" + assert self.main_path + assert self.current_path + last = self._all_paths.pop() + # Keep the popped path in the list of all paths, but before the main + # path so that it's not popped again. + self._all_paths.insert(0, last) + if last == self.main_path: + self.current_path = None + else: + self.current_path = self._all_paths[-1] + return last + + def add_source(self, path): + """Adds the given path as source of the data from this context.""" + assert os.path.isabs(path) + if not self.main_path: + self.main_path = self.current_path = path + # Insert at the beginning of the list so that it's always before the + # main path. + if path not in self._all_paths: + self._all_paths.insert(0, path) + + @property + def error_is_fatal(self): + """Returns True if the error function should be fatal.""" + return self.config and getattr(self.config, "error_is_fatal", True) + + @property + def all_paths(self): + """Returns all paths ever added to the context.""" + return set(self._all_paths) + + @property + def source_stack(self): + """Returns the current stack of pushed sources.""" + if not self.current_path: + return [] + return self._all_paths[self._all_paths.index(self.main_path) :] + + @memoized_property + def objdir(self): + return mozpath.join(self.config.topobjdir, self.relobjdir).rstrip("/") + + @memoize + def _srcdir(self, path): + return mozpath.join(self.config.topsrcdir, self._relsrcdir(path)).rstrip("/") + + @property + def srcdir(self): + return self._srcdir(self.current_path or self.main_path) + + @memoize + def _relsrcdir(self, path): + return mozpath.relpath(mozpath.dirname(path), self.config.topsrcdir) + + @property + def relsrcdir(self): + assert self.main_path + return self._relsrcdir(self.current_path or self.main_path) + + @memoized_property + def relobjdir(self): + assert self.main_path + return mozpath.relpath(mozpath.dirname(self.main_path), self.config.topsrcdir) + + def _factory(self, key): + """Function called when requesting a missing key.""" + defaults = self._allowed_variables.get(key) + if not defaults: + raise KeyError("global_ns", "get_unknown", key) + + # If the default is specifically a lambda (or, rather, any function + # --but not a class that can be called), then it is actually a rule to + # generate the default that should be used. + default = defaults[0] + if issubclass(default, ContextDerivedValue): + return default(self) + else: + return default() + + def _validate(self, key, value, is_template=False): + """Validates whether the key is allowed and if the value's type + matches. + """ + stored_type, input_type, docs = self._allowed_variables.get( + key, (None, None, None) + ) + + if stored_type is None or not is_template and key in TEMPLATE_VARIABLES: + raise KeyError("global_ns", "set_unknown", key, value) + + # If the incoming value is not the type we store, we try to convert + # it to that type. This relies on proper coercion rules existing. This + # is the responsibility of whoever defined the symbols: a type should + # not be in the allowed set if the constructor function for the stored + # type does not accept an instance of that type. + if not isinstance(value, (stored_type, input_type)): + raise ValueError("global_ns", "set_type", key, value, input_type) + + return stored_type + + def __setitem__(self, key, value): + stored_type = self._validate(key, value) + + if not isinstance(value, stored_type): + if issubclass(stored_type, ContextDerivedValue): + value = stored_type(self, value) + else: + value = stored_type(value) + + return KeyedDefaultDict.__setitem__(self, key, value) + + def update(self, iterable={}, **kwargs): + """Like dict.update(), but using the context's setitem. + + This function is transactional: if setitem fails for one of the values, + the context is not updated at all.""" + if isinstance(iterable, dict): + iterable = iterable.items() + + update = {} + for key, value in itertools.chain(iterable, kwargs.items()): + stored_type = self._validate(key, value) + # Don't create an instance of stored_type if coercion is needed, + # until all values are validated. + update[key] = (value, stored_type) + for key, (value, stored_type) in update.items(): + if not isinstance(value, stored_type): + update[key] = stored_type(value) + else: + update[key] = value + KeyedDefaultDict.update(self, update) + + +class TemplateContext(Context): + def __init__(self, template=None, allowed_variables={}, config=None): + self.template = template + super(TemplateContext, self).__init__(allowed_variables, config) + + def _validate(self, key, value): + return Context._validate(self, key, value, True) + + +class SubContext(Context, ContextDerivedValue): + """A Context derived from another Context. + + Sub-contexts are intended to be used as context managers. + + Sub-contexts inherit paths and other relevant state from the parent + context. + """ + + def __init__(self, parent): + assert isinstance(parent, Context) + + Context.__init__(self, allowed_variables=self.VARIABLES, config=parent.config) + + # Copy state from parent. + for p in parent.source_stack: + self.push_source(p) + self._sandbox = parent._sandbox + + def __enter__(self): + if not self._sandbox or self._sandbox() is None: + raise Exception("a sandbox is required") + + self._sandbox().push_subcontext(self) + + def __exit__(self, exc_type, exc_value, traceback): + self._sandbox().pop_subcontext(self) + + +class InitializedDefines(ContextDerivedValue, OrderedDict): + def __init__(self, context, value=None): + OrderedDict.__init__(self) + for define in context.config.substs.get("MOZ_DEBUG_DEFINES", ()): + self[define] = 1 + if value: + if not isinstance(value, OrderedDict): + raise ValueError("Can only initialize with another OrderedDict") + self.update(value) + + def update(self, *other, **kwargs): + # Since iteration over non-ordered dicts is non-deterministic, this dict + # will be populated in an unpredictable order unless the argument to + # update() is also ordered. (It's important that we maintain this + # invariant so we can be sure that running `./mach build-backend` twice + # in a row without updating any files in the workspace generates exactly + # the same output.) + if kwargs: + raise ValueError("Cannot call update() with kwargs") + if other: + if not isinstance(other[0], OrderedDict): + raise ValueError("Can only call update() with another OrderedDict") + return super(InitializedDefines, self).update(*other, **kwargs) + raise ValueError("No arguments passed to update()") + + +class BaseCompileFlags(ContextDerivedValue, dict): + def __init__(self, context): + self._context = context + + klass_name = self.__class__.__name__ + for k, v, build_vars in self.flag_variables: + if not isinstance(k, six.text_type): + raise ValueError("Flag %s for %s is not a string" % (k, klass_name)) + if not isinstance(build_vars, tuple): + raise ValueError( + "Build variables `%s` for %s in %s is not a tuple" + % (build_vars, k, klass_name) + ) + + self._known_keys = set(k for k, v, _ in self.flag_variables) + + # Providing defaults here doesn't play well with multiple templates + # modifying COMPILE_FLAGS from the same moz.build, because the merge + # done after the template runs can't tell which values coming from + # a template were set and which were provided as defaults. + template_name = getattr(context, "template", None) + if template_name in (None, "Gyp"): + dict.__init__( + self, + ( + (k, v if v is None else TypedList(six.text_type)(v)) + for k, v, _ in self.flag_variables + ), + ) + else: + dict.__init__(self) + + +class HostCompileFlags(BaseCompileFlags): + def __init__(self, context): + self._context = context + main_src_dir = mozpath.dirname(context.main_path) + + self.flag_variables = ( + ( + "HOST_CXXFLAGS", + context.config.substs.get("HOST_CXXFLAGS"), + ("HOST_CXXFLAGS", "HOST_CXX_LDFLAGS"), + ), + ( + "HOST_CFLAGS", + context.config.substs.get("HOST_CFLAGS"), + ("HOST_CFLAGS", "HOST_C_LDFLAGS"), + ), + ( + "HOST_OPTIMIZE", + self._optimize_flags(), + ("HOST_CFLAGS", "HOST_CXXFLAGS", "HOST_C_LDFLAGS", "HOST_CXX_LDFLAGS"), + ), + ("RTL", None, ("HOST_CFLAGS", "HOST_C_LDFLAGS")), + ("HOST_DEFINES", None, ("HOST_CFLAGS", "HOST_CXXFLAGS")), + ("MOZBUILD_HOST_CFLAGS", [], ("HOST_CFLAGS", "HOST_C_LDFLAGS")), + ("MOZBUILD_HOST_CXXFLAGS", [], ("HOST_CXXFLAGS", "HOST_CXX_LDFLAGS")), + ( + "BASE_INCLUDES", + ["-I%s" % main_src_dir, "-I%s" % context.objdir], + ("HOST_CFLAGS", "HOST_CXXFLAGS"), + ), + ("LOCAL_INCLUDES", None, ("HOST_CFLAGS", "HOST_CXXFLAGS")), + ( + "EXTRA_INCLUDES", + ["-I%s/dist/include" % context.config.topobjdir], + ("HOST_CFLAGS", "HOST_CXXFLAGS"), + ), + ( + "WARNINGS_CFLAGS", + context.config.substs.get("WARNINGS_HOST_CFLAGS"), + ("HOST_CFLAGS",), + ), + ( + "WARNINGS_CXXFLAGS", + context.config.substs.get("WARNINGS_HOST_CXXFLAGS"), + ("HOST_CXXFLAGS",), + ), + ) + BaseCompileFlags.__init__(self, context) + + def _optimize_flags(self): + optimize_flags = [] + if self._context.config.substs.get("CROSS_COMPILE"): + optimize_flags += self._context.config.substs.get("HOST_OPTIMIZE_FLAGS") + elif self._context.config.substs.get("MOZ_OPTIMIZE"): + optimize_flags += self._context.config.substs.get("MOZ_OPTIMIZE_FLAGS") + return optimize_flags + + +class AsmFlags(BaseCompileFlags): + def __init__(self, context): + self._context = context + self.flag_variables = ( + ("DEFINES", None, ("SFLAGS",)), + ("LIBRARY_DEFINES", None, ("SFLAGS",)), + ("OS", context.config.substs.get("ASFLAGS"), ("ASFLAGS", "SFLAGS")), + ("DEBUG", self._debug_flags(), ("ASFLAGS", "SFLAGS")), + ("LOCAL_INCLUDES", None, ("SFLAGS",)), + ("MOZBUILD", None, ("ASFLAGS", "SFLAGS")), + ) + BaseCompileFlags.__init__(self, context) + + def _debug_flags(self): + debug_flags = [] + if self._context.config.substs.get( + "MOZ_DEBUG" + ) or self._context.config.substs.get("MOZ_DEBUG_SYMBOLS"): + if self._context.get("USE_NASM"): + if self._context.config.substs.get("OS_ARCH") == "WINNT": + debug_flags += ["-F", "cv8"] + elif self._context.config.substs.get("OS_ARCH") != "Darwin": + debug_flags += ["-F", "dwarf"] + elif ( + self._context.config.substs.get("OS_ARCH") == "WINNT" + and self._context.config.substs.get("CPU_ARCH") == "aarch64" + ): + # armasm64 accepts a paucity of options compared to ml/ml64. + pass + else: + debug_flags += self._context.config.substs.get( + "MOZ_DEBUG_FLAGS", "" + ).split() + return debug_flags + + +class LinkFlags(BaseCompileFlags): + def __init__(self, context): + self._context = context + + self.flag_variables = ( + ("OS", self._os_ldflags(), ("LDFLAGS",)), + ( + "MOZ_HARDENING_LDFLAGS", + context.config.substs.get("MOZ_HARDENING_LDFLAGS"), + ("LDFLAGS",), + ), + ("DEFFILE", None, ("LDFLAGS",)), + ("MOZBUILD", None, ("LDFLAGS",)), + ( + "FIX_LINK_PATHS", + context.config.substs.get("MOZ_FIX_LINK_PATHS"), + ("LDFLAGS",), + ), + ( + "OPTIMIZE", + ( + context.config.substs.get("MOZ_OPTIMIZE_LDFLAGS", []) + if context.config.substs.get("MOZ_OPTIMIZE") + else [] + ), + ("LDFLAGS",), + ), + ( + "CETCOMPAT", + ( + context.config.substs.get("MOZ_CETCOMPAT_LDFLAGS") + if context.config.substs.get("NIGHTLY_BUILD") + else [] + ), + ("LDFLAGS",), + ), + ) + BaseCompileFlags.__init__(self, context) + + def _os_ldflags(self): + flags = self._context.config.substs.get("OS_LDFLAGS", [])[:] + + if self._context.config.substs.get( + "MOZ_DEBUG" + ) or self._context.config.substs.get("MOZ_DEBUG_SYMBOLS"): + flags += self._context.config.substs.get("MOZ_DEBUG_LDFLAGS", []) + + # TODO: This is pretty convoluted, and isn't really a per-context thing, + # configure would be a better place to aggregate these. + if all( + [ + self._context.config.substs.get("OS_ARCH") == "WINNT", + not self._context.config.substs.get("GNU_CC"), + not self._context.config.substs.get("MOZ_DEBUG"), + ] + ): + + if self._context.config.substs.get("MOZ_OPTIMIZE"): + flags.append("-OPT:REF,ICF") + + return flags + + +class TargetCompileFlags(BaseCompileFlags): + """Base class that encapsulates some common logic between CompileFlags and + WasmCompileFlags. + """ + + def _debug_flags(self): + if self._context.config.substs.get( + "MOZ_DEBUG" + ) or self._context.config.substs.get("MOZ_DEBUG_SYMBOLS"): + return self._context.config.substs.get("MOZ_DEBUG_FLAGS", "").split() + return [] + + def _warnings_as_errors(self): + warnings_as_errors = self._context.config.substs.get("WARNINGS_AS_ERRORS") + if warnings_as_errors: + return [warnings_as_errors] + + def _optimize_flags(self): + if not self._context.config.substs.get("MOZ_OPTIMIZE"): + return [] + optimize_flags = None + if self._context.config.substs.get("MOZ_PGO"): + optimize_flags = self._context.config.substs.get("MOZ_PGO_OPTIMIZE_FLAGS") + if not optimize_flags: + # If MOZ_PGO_OPTIMIZE_FLAGS is empty we fall back to + # MOZ_OPTIMIZE_FLAGS. Presently this occurs on Windows. + optimize_flags = self._context.config.substs.get("MOZ_OPTIMIZE_FLAGS") + return optimize_flags + + def __setitem__(self, key, value): + if key not in self._known_keys: + raise ValueError( + "Invalid value. `%s` is not a compile flags " "category." % key + ) + if key in self and self[key] is None: + raise ValueError( + "`%s` may not be set in COMPILE_FLAGS from moz.build, this " + "value is resolved from the emitter." % key + ) + if not ( + isinstance(value, list) + and all(isinstance(v, six.string_types) for v in value) + ): + raise ValueError( + "A list of strings must be provided as a value for a compile " + "flags category." + ) + dict.__setitem__(self, key, value) + + +class CompileFlags(TargetCompileFlags): + def __init__(self, context): + main_src_dir = mozpath.dirname(context.main_path) + self._context = context + + self.flag_variables = ( + ("STL", context.config.substs.get("STL_FLAGS"), ("CXXFLAGS",)), + ( + "VISIBILITY", + context.config.substs.get("VISIBILITY_FLAGS"), + ("CXXFLAGS", "CFLAGS"), + ), + ( + "MOZ_HARDENING_CFLAGS", + context.config.substs.get("MOZ_HARDENING_CFLAGS"), + ("CXXFLAGS", "CFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"), + ), + ("DEFINES", None, ("CXXFLAGS", "CFLAGS")), + ("LIBRARY_DEFINES", None, ("CXXFLAGS", "CFLAGS")), + ( + "BASE_INCLUDES", + ["-I%s" % main_src_dir, "-I%s" % context.objdir], + ("CXXFLAGS", "CFLAGS"), + ), + ("LOCAL_INCLUDES", None, ("CXXFLAGS", "CFLAGS")), + ( + "EXTRA_INCLUDES", + ["-I%s/dist/include" % context.config.topobjdir], + ("CXXFLAGS", "CFLAGS"), + ), + ( + "OS_INCLUDES", + list( + itertools.chain( + *( + context.config.substs.get(v, []) + for v in ( + "NSPR_CFLAGS", + "NSS_CFLAGS", + "MOZ_JPEG_CFLAGS", + "MOZ_PNG_CFLAGS", + "MOZ_ZLIB_CFLAGS", + "MOZ_PIXMAN_CFLAGS", + "MOZ_ICU_CFLAGS", + ) + ) + ) + ), + ("CXXFLAGS", "CFLAGS"), + ), + ("RTL", None, ("CXXFLAGS", "CFLAGS")), + ( + "OS_COMPILE_CFLAGS", + context.config.substs.get("OS_COMPILE_CFLAGS"), + ("CFLAGS",), + ), + ( + "OS_COMPILE_CXXFLAGS", + context.config.substs.get("OS_COMPILE_CXXFLAGS"), + ("CXXFLAGS",), + ), + ( + "OS_CPPFLAGS", + context.config.substs.get("OS_CPPFLAGS"), + ("CXXFLAGS", "CFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"), + ), + ( + "OS_CFLAGS", + context.config.substs.get("OS_CFLAGS"), + ("CFLAGS", "C_LDFLAGS"), + ), + ( + "OS_CXXFLAGS", + context.config.substs.get("OS_CXXFLAGS"), + ("CXXFLAGS", "CXX_LDFLAGS"), + ), + ( + "DEBUG", + self._debug_flags(), + ("CFLAGS", "CXXFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"), + ), + ( + "CLANG_PLUGIN", + context.config.substs.get("CLANG_PLUGIN_FLAGS"), + ("CFLAGS", "CXXFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"), + ), + ( + "OPTIMIZE", + self._optimize_flags(), + ("CFLAGS", "CXXFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"), + ), + ( + "FRAMEPTR", + context.config.substs.get("MOZ_FRAMEPTR_FLAGS"), + ("CFLAGS", "CXXFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"), + ), + ( + "WARNINGS_AS_ERRORS", + self._warnings_as_errors(), + ("CXXFLAGS", "CFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"), + ), + ( + "WARNINGS_CFLAGS", + context.config.substs.get("WARNINGS_CFLAGS"), + ("CFLAGS",), + ), + ( + "WARNINGS_CXXFLAGS", + context.config.substs.get("WARNINGS_CXXFLAGS"), + ("CXXFLAGS",), + ), + ("MOZBUILD_CFLAGS", None, ("CFLAGS",)), + ("MOZBUILD_CXXFLAGS", None, ("CXXFLAGS",)), + ( + "COVERAGE", + context.config.substs.get("COVERAGE_CFLAGS"), + ("CXXFLAGS", "CFLAGS"), + ), + ( + "PASS_MANAGER", + context.config.substs.get("MOZ_PASS_MANAGER_FLAGS"), + ("CXXFLAGS", "CFLAGS"), + ), + ( + "FILE_PREFIX_MAP", + context.config.substs.get("MOZ_FILE_PREFIX_MAP_FLAGS"), + ("CXXFLAGS", "CFLAGS"), + ), + ( + # See bug 414641 + "NO_STRICT_ALIASING", + ["-fno-strict-aliasing"], + ("CXXFLAGS", "CFLAGS"), + ), + ( + # Disable floating-point contraction by default. + "FP_CONTRACT", + ( + ["-Xclang"] + if context.config.substs.get("CC_TYPE") == "clang-cl" + else [] + ) + + ["-ffp-contract=off"], + ("CXXFLAGS", "CFLAGS"), + ), + ) + + TargetCompileFlags.__init__(self, context) + + +class WasmFlags(TargetCompileFlags): + def __init__(self, context): + main_src_dir = mozpath.dirname(context.main_path) + self._context = context + + self.flag_variables = ( + ("LIBRARY_DEFINES", None, ("WASM_CXXFLAGS", "WASM_CFLAGS")), + ( + "BASE_INCLUDES", + ["-I%s" % main_src_dir, "-I%s" % context.objdir], + ("WASM_CXXFLAGS", "WASM_CFLAGS"), + ), + ("LOCAL_INCLUDES", None, ("WASM_CXXFLAGS", "WASM_CFLAGS")), + ( + "EXTRA_INCLUDES", + ["-I%s/dist/include" % context.config.topobjdir], + ("WASM_CXXFLAGS", "WASM_CFLAGS"), + ), + ( + "OS_INCLUDES", + list( + itertools.chain( + *( + context.config.substs.get(v, []) + for v in ( + "NSPR_CFLAGS", + "NSS_CFLAGS", + "MOZ_JPEG_CFLAGS", + "MOZ_PNG_CFLAGS", + "MOZ_ZLIB_CFLAGS", + "MOZ_PIXMAN_CFLAGS", + ) + ) + ) + ), + ("WASM_CXXFLAGS", "WASM_CFLAGS"), + ), + ("DEBUG", self._debug_flags(), ("WASM_CFLAGS", "WASM_CXXFLAGS")), + ( + "CLANG_PLUGIN", + context.config.substs.get("CLANG_PLUGIN_FLAGS"), + ("WASM_CFLAGS", "WASM_CXXFLAGS"), + ), + ("OPTIMIZE", self._optimize_flags(), ("WASM_CFLAGS", "WASM_CXXFLAGS")), + ( + "WARNINGS_AS_ERRORS", + self._warnings_as_errors(), + ("WASM_CXXFLAGS", "WASM_CFLAGS"), + ), + ("MOZBUILD_CFLAGS", None, ("WASM_CFLAGS",)), + ("MOZBUILD_CXXFLAGS", None, ("WASM_CXXFLAGS",)), + ("WASM_CFLAGS", context.config.substs.get("WASM_CFLAGS"), ("WASM_CFLAGS",)), + ( + "WASM_CXXFLAGS", + context.config.substs.get("WASM_CXXFLAGS"), + ("WASM_CXXFLAGS",), + ), + ("WASM_DEFINES", None, ("WASM_CFLAGS", "WASM_CXXFLAGS")), + ("MOZBUILD_WASM_CFLAGS", None, ("WASM_CFLAGS",)), + ("MOZBUILD_WASM_CXXFLAGS", None, ("WASM_CXXFLAGS",)), + ( + "NEWPM", + context.config.substs.get("MOZ_NEW_PASS_MANAGER_FLAGS"), + ("WASM_CFLAGS", "WASM_CXXFLAGS"), + ), + ( + "FILE_PREFIX_MAP", + context.config.substs.get("MOZ_FILE_PREFIX_MAP_FLAGS"), + ("WASM_CFLAGS", "WASM_CXXFLAGS"), + ), + ("STL", context.config.substs.get("STL_FLAGS"), ("WASM_CXXFLAGS",)), + ) + + TargetCompileFlags.__init__(self, context) + + def _debug_flags(self): + substs = self._context.config.substs + if substs.get("MOZ_DEBUG") or substs.get("MOZ_DEBUG_SYMBOLS"): + return ["-g"] + return [] + + def _optimize_flags(self): + if not self._context.config.substs.get("MOZ_OPTIMIZE"): + return [] + + # We don't want `MOZ_{PGO_,}OPTIMIZE_FLAGS here because they may contain + # optimization flags that aren't suitable for wasm (e.g. -freorder-blocks). + # Just optimize for size in all cases; we may want to make this + # configurable. + return ["-Os"] + + +class FinalTargetValue(ContextDerivedValue, six.text_type): + def __new__(cls, context, value=""): + if not value: + value = "dist/" + if context["XPI_NAME"]: + value += "xpi-stage/" + context["XPI_NAME"] + else: + value += "bin" + if context["DIST_SUBDIR"]: + value += "/" + context["DIST_SUBDIR"] + return six.text_type.__new__(cls, value) + + +def Enum(*values): + assert len(values) + default = values[0] + + class EnumClass(object): + def __new__(cls, value=None): + if value is None: + return default + if value in values: + return value + raise ValueError( + "Invalid value. Allowed values are: %s" + % ", ".join(repr(v) for v in values) + ) + + return EnumClass + + +class PathMeta(type): + """Meta class for the Path family of classes. + + It handles calling __new__ with the right arguments in cases where a Path + is instantiated with another instance of Path instead of having received a + context. + + It also makes Path(context, value) instantiate one of the + subclasses depending on the value, allowing callers to do + standard type checking (isinstance(path, ObjDirPath)) instead + of checking the value itself (path.startswith('!')). + """ + + def __call__(cls, context, value=None): + if isinstance(context, Path): + assert value is None + value = context + context = context.context + else: + assert isinstance(context, Context) + if isinstance(value, Path): + context = value.context + if not issubclass(cls, (SourcePath, ObjDirPath, AbsolutePath)): + if value.startswith("!"): + cls = ObjDirPath + elif value.startswith("%"): + cls = AbsolutePath + else: + cls = SourcePath + return super(PathMeta, cls).__call__(context, value) + + +class Path(six.with_metaclass(PathMeta, ContextDerivedValue, six.text_type)): + """Stores and resolves a source path relative to a given context + + This class is used as a backing type for some of the sandbox variables. + It expresses paths relative to a context. Supported paths are: + - '/topsrcdir/relative/paths' + - 'srcdir/relative/paths' + - '!/topobjdir/relative/paths' + - '!objdir/relative/paths' + - '%/filesystem/absolute/paths' + """ + + def __new__(cls, context, value=None): + self = super(Path, cls).__new__(cls, value) + self.context = context + self.srcdir = context.srcdir + return self + + def join(self, *p): + """ContextDerived equivalent of `mozpath.join(self, *p)`, returning a + new Path instance. + """ + return Path(self.context, mozpath.join(self, *p)) + + def __cmp__(self, other): + # We expect this function to never be called to avoid issues in the + # switch from Python 2 to 3. + raise AssertionError() + + def _cmp(self, other, op): + if isinstance(other, Path) and self.srcdir != other.srcdir: + return op(self.full_path, other.full_path) + return op(six.text_type(self), other) + + def __eq__(self, other): + return self._cmp(other, operator.eq) + + def __ne__(self, other): + return self._cmp(other, operator.ne) + + def __lt__(self, other): + return self._cmp(other, operator.lt) + + def __gt__(self, other): + return self._cmp(other, operator.gt) + + def __le__(self, other): + return self._cmp(other, operator.le) + + def __ge__(self, other): + return self._cmp(other, operator.ge) + + def __repr__(self): + return "<%s (%s)%s>" % (self.__class__.__name__, self.srcdir, self) + + def __hash__(self): + return hash(self.full_path) + + @memoized_property + def target_basename(self): + return mozpath.basename(self.full_path) + + +class SourcePath(Path): + """Like Path, but limited to paths in the source directory.""" + + def __new__(cls, context, value=None): + if value.startswith("!"): + raise ValueError(f'Object directory paths are not allowed\nPath: "{value}"') + if value.startswith("%"): + raise ValueError( + f'Filesystem absolute paths are not allowed\nPath: "{value}"' + ) + self = super(SourcePath, cls).__new__(cls, context, value) + + if value.startswith("/"): + path = None + if not path or not os.path.exists(path): + path = mozpath.join(context.config.topsrcdir, value[1:]) + else: + path = mozpath.join(self.srcdir, value) + self.full_path = mozpath.normpath(path) + return self + + @memoized_property + def translated(self): + """Returns the corresponding path in the objdir. + + Ideally, we wouldn't need this function, but the fact that both source + path under topsrcdir and the external source dir end up mixed in the + objdir (aka pseudo-rework), this is needed. + """ + return ObjDirPath(self.context, "!%s" % self).full_path + + +class RenamedSourcePath(SourcePath): + """Like SourcePath, but with a different base name when installed. + + The constructor takes a tuple of (source, target_basename). + + This class is not meant to be exposed to moz.build sandboxes as of now, + and is not supported by the RecursiveMake backend. + """ + + def __new__(cls, context, value): + assert isinstance(value, tuple) + source, target_basename = value + self = super(RenamedSourcePath, cls).__new__(cls, context, source) + self._target_basename = target_basename + return self + + @property + def target_basename(self): + return self._target_basename + + +class ObjDirPath(Path): + """Like Path, but limited to paths in the object directory.""" + + def __new__(cls, context, value=None): + if not value.startswith("!"): + raise ValueError("Object directory paths must start with ! prefix") + self = super(ObjDirPath, cls).__new__(cls, context, value) + + if value.startswith("!/"): + path = mozpath.join(context.config.topobjdir, value[2:]) + else: + path = mozpath.join(context.objdir, value[1:]) + self.full_path = mozpath.normpath(path) + return self + + +class AbsolutePath(Path): + """Like Path, but allows arbitrary paths outside the source and object directories.""" + + def __new__(cls, context, value=None): + if not value.startswith("%"): + raise ValueError("Absolute paths must start with % prefix") + if not os.path.isabs(value[1:]): + raise ValueError("Path '%s' is not absolute" % value[1:]) + self = super(AbsolutePath, cls).__new__(cls, context, value) + self.full_path = mozpath.normpath(value[1:]) + return self + + +@memoize +def ContextDerivedTypedList(klass, base_class=List): + """Specialized TypedList for use with ContextDerivedValue types.""" + assert issubclass(klass, ContextDerivedValue) + + class _TypedList(ContextDerivedValue, TypedList(klass, base_class)): + def __init__(self, context, iterable=[], **kwargs): + self.context = context + super(_TypedList, self).__init__(iterable, **kwargs) + + def normalize(self, e): + if not isinstance(e, klass): + e = klass(self.context, e) + return e + + return _TypedList + + +@memoize +def ContextDerivedTypedListWithItems(type, base_class=List): + """Specialized TypedList for use with ContextDerivedValue types.""" + + class _TypedListWithItems(ContextDerivedTypedList(type, base_class)): + def __getitem__(self, name): + name = self.normalize(name) + return super(_TypedListWithItems, self).__getitem__(name) + + return _TypedListWithItems + + +@memoize +def ContextDerivedTypedRecord(*fields): + """Factory for objects with certain properties and dynamic + type checks. + + This API is extremely similar to the TypedNamedTuple API, + except that properties may be mutated. This supports syntax like: + + .. code-block:: python + + VARIABLE_NAME.property += [ + 'item1', + 'item2', + ] + """ + + class _TypedRecord(ContextDerivedValue): + __slots__ = tuple([name for name, _ in fields]) + + def __init__(self, context): + for fname, ftype in self._fields.items(): + if issubclass(ftype, ContextDerivedValue): + setattr(self, fname, self._fields[fname](context)) + else: + setattr(self, fname, self._fields[fname]()) + + def __setattr__(self, name, value): + if name in self._fields and not isinstance(value, self._fields[name]): + value = self._fields[name](value) + object.__setattr__(self, name, value) + + _TypedRecord._fields = dict(fields) + return _TypedRecord + + +class Schedules(object): + """Similar to a ContextDerivedTypedRecord, but with different behavior + for the properties: + + * VAR.inclusive can only be appended to (+=), and can only contain values + from mozbuild.schedules.INCLUSIVE_COMPONENTS + + * VAR.exclusive can only be assigned to (no +=), and can only contain + values from mozbuild.schedules.ALL_COMPONENTS + """ + + __slots__ = ("_exclusive", "_inclusive") + + def __init__(self, inclusive=None, exclusive=None): + if inclusive is None: + self._inclusive = TypedList(Enum(*schedules.INCLUSIVE_COMPONENTS))() + else: + self._inclusive = inclusive + if exclusive is None: + self._exclusive = ImmutableStrictOrderingOnAppendList( + schedules.EXCLUSIVE_COMPONENTS + ) + else: + self._exclusive = exclusive + + # inclusive is mutable but cannot be assigned to (+= only) + @property + def inclusive(self): + return self._inclusive + + @inclusive.setter + def inclusive(self, value): + if value is not self._inclusive: + raise AttributeError("Cannot assign to this value - use += instead") + unexpected = [v for v in value if v not in schedules.INCLUSIVE_COMPONENTS] + if unexpected: + raise Exception( + "unexpected inclusive component(s) " + ", ".join(unexpected) + ) + + # exclusive is immutable but can be set (= only) + @property + def exclusive(self): + return self._exclusive + + @exclusive.setter + def exclusive(self, value): + if not isinstance(value, (tuple, list)): + raise Exception("expected a tuple or list") + unexpected = [v for v in value if v not in schedules.ALL_COMPONENTS] + if unexpected: + raise Exception( + "unexpected exclusive component(s) " + ", ".join(unexpected) + ) + self._exclusive = ImmutableStrictOrderingOnAppendList(sorted(value)) + + # components provides a synthetic summary of all components + @property + def components(self): + return list(sorted(set(self._inclusive) | set(self._exclusive))) + + # The `Files` context uses | to combine SCHEDULES from multiple levels; at this + # point the immutability is no longer needed so we use plain lists + def __or__(self, other): + inclusive = self._inclusive + other._inclusive + if other._exclusive == self._exclusive: + exclusive = self._exclusive + elif self._exclusive == schedules.EXCLUSIVE_COMPONENTS: + exclusive = other._exclusive + elif other._exclusive == schedules.EXCLUSIVE_COMPONENTS: + exclusive = self._exclusive + else: + # in a case where two SCHEDULES.exclusive set different values, take + # the later one; this acts the way we expect assignment to work. + exclusive = other._exclusive + return Schedules(inclusive=inclusive, exclusive=exclusive) + + +@memoize +def ContextDerivedTypedHierarchicalStringList(type): + """Specialized HierarchicalStringList for use with ContextDerivedValue + types.""" + + class _TypedListWithItems(ContextDerivedValue, HierarchicalStringList): + __slots__ = ("_strings", "_children", "_context") + + def __init__(self, context): + self._strings = ContextDerivedTypedList(type, StrictOrderingOnAppendList)( + context + ) + self._children = {} + self._context = context + + def _get_exportvariable(self, name): + child = self._children.get(name) + if not child: + child = self._children[name] = _TypedListWithItems(self._context) + return child + + return _TypedListWithItems + + +def OrderedPathListWithAction(action): + """Returns a class which behaves as a StrictOrderingOnAppendList, but + invokes the given callable with each input and a context as it is + read, storing a tuple including the result and the original item. + + This used to extend moz.build reading to make more data available in + filesystem-reading mode. + """ + + class _OrderedListWithAction( + ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendListWithAction) + ): + def __init__(self, context, *args): + def _action(item): + return item, action(context, item) + + super(_OrderedListWithAction, self).__init__(context, action=_action, *args) + + return _OrderedListWithAction + + +ManifestparserManifestList = OrderedPathListWithAction(read_manifestparser_manifest) +ReftestManifestList = OrderedPathListWithAction(read_reftest_manifest) + +BugzillaComponent = TypedNamedTuple( + "BugzillaComponent", [("product", six.text_type), ("component", six.text_type)] +) +SchedulingComponents = ContextDerivedTypedRecord( + ("inclusive", TypedList(six.text_type, StrictOrderingOnAppendList)), + ("exclusive", TypedList(six.text_type, StrictOrderingOnAppendList)), +) + +GeneratedFilesList = StrictOrderingOnAppendListWithFlagsFactory( + {"script": six.text_type, "inputs": list, "force": bool, "flags": list} +) + + +class Files(SubContext): + """Metadata attached to files. + + It is common to want to annotate files with metadata, such as which + Bugzilla component tracks issues with certain files. This sub-context is + where we stick that metadata. + + The argument to this sub-context is a file matching pattern that is applied + against the host file's directory. If the pattern matches a file whose info + is currently being sought, the metadata attached to this instance will be + applied to that file. + + Patterns are collections of filename characters with ``/`` used as the + directory separate (UNIX-style paths) and ``*`` and ``**`` used to denote + wildcard matching. + + Patterns without the ``*`` character are literal matches and will match at + most one entity. + + Patterns with ``*`` or ``**`` are wildcard matches. ``*`` matches files + at least within a single directory. ``**`` matches files across several + directories. + + ``foo.html`` + Will match only the ``foo.html`` file in the current directory. + ``*.jsm`` + Will match all ``.jsm`` files in the current directory. + ``**/*.cpp`` + Will match all ``.cpp`` files in this and all child directories. + ``foo/*.css`` + Will match all ``.css`` files in the ``foo/`` directory. + ``bar/*`` + Will match all files in the ``bar/`` directory and all of its + children directories. + ``bar/**`` + This is equivalent to ``bar/*`` above. + ``bar/**/foo`` + Will match all ``foo`` files in the ``bar/`` directory and all of its + children directories. + + The difference in behavior between ``*`` and ``**`` is only evident if + a pattern follows the ``*`` or ``**``. A pattern ending with ``*`` is + greedy. ``**`` is needed when you need an additional pattern after the + wildcard. e.g. ``**/foo``. + """ + + VARIABLES = { + "BUG_COMPONENT": ( + BugzillaComponent, + tuple, + """The bug component that tracks changes to these files. + + Values are a 2-tuple of unicode describing the Bugzilla product and + component. e.g. ``('Firefox Build System', 'General')``. + """, + ), + "FINAL": ( + bool, + bool, + """Mark variable assignments as finalized. + + During normal processing, values from newer Files contexts + overwrite previously set values. Last write wins. This behavior is + not always desired. ``FINAL`` provides a mechanism to prevent + further updates to a variable. + + When ``FINAL`` is set, the value of all variables defined in this + context are marked as frozen and all subsequent writes to them + are ignored during metadata reading. + + See :ref:`mozbuild_files_metadata_finalizing` for more info. + """, + ), + "SCHEDULES": ( + Schedules, + list, + """Maps source files to the CI tasks that should be scheduled when + they change. The tasks are grouped by named components, and those + names appear again in the taskgraph configuration + `($topsrcdir/taskgraph/). + + Some components are "inclusive", meaning that changes to most files + do not schedule them, aside from those described in a Files + subcontext. For example, py-lint tasks need not be scheduled for + most changes, but should be scheduled when any Python file changes. + Such components are named by appending to `SCHEDULES.inclusive`: + + with Files('**.py'): + SCHEDULES.inclusive += ['py-lint'] + + Other components are 'exclusive', meaning that changes to most + files schedule them, but some files affect only one or two + components. For example, most files schedule builds and tests of + Firefox for Android, OS X, Windows, and Linux, but files under + `mobile/android/` affect Android builds and tests exclusively, so + builds for other operating systems are not needed. Test suites + provide another example: most files schedule reftests, but changes + to reftest scripts need only schedule reftests and no other suites. + + Exclusive components are named by setting `SCHEDULES.exclusive`: + + with Files('mobile/android/**'): + SCHEDULES.exclusive = ['android'] + """, + ), + } + + def __init__(self, parent, *patterns): + super(Files, self).__init__(parent) + self.patterns = patterns + self.finalized = set() + + def __iadd__(self, other): + assert isinstance(other, Files) + + for k, v in other.items(): + if k == "SCHEDULES" and "SCHEDULES" in self: + self["SCHEDULES"] = self["SCHEDULES"] | v + continue + + # Ignore updates to finalized flags. + if k in self.finalized: + continue + + # Only finalize variables defined in this instance. + if k == "FINAL": + self.finalized |= set(other) - {"FINAL"} + continue + + self[k] = v + + return self + + def asdict(self): + """Return this instance as a dict with built-in data structures. + + Call this to obtain an object suitable for serializing. + """ + d = {} + if "BUG_COMPONENT" in self: + bc = self["BUG_COMPONENT"] + d["bug_component"] = (bc.product, bc.component) + + return d + + @staticmethod + def aggregate(files): + """Given a mapping of path to Files, obtain aggregate results. + + Consumers may want to extract useful information from a collection of + Files describing paths. e.g. given the files info data for N paths, + recommend a single bug component based on the most frequent one. This + function provides logic for deriving aggregate knowledge from a + collection of path File metadata. + + Note: the intent of this function is to operate on the result of + :py:func:`mozbuild.frontend.reader.BuildReader.files_info`. The + :py:func:`mozbuild.frontend.context.Files` instances passed in are + thus the "collapsed" (``__iadd__``ed) results of all ``Files`` from all + moz.build files relevant to a specific path, not individual ``Files`` + instances from a single moz.build file. + """ + d = {} + + bug_components = Counter() + + for f in files.values(): + bug_component = f.get("BUG_COMPONENT") + if bug_component: + bug_components[bug_component] += 1 + + d["bug_component_counts"] = [] + for c, count in bug_components.most_common(): + component = (c.product, c.component) + d["bug_component_counts"].append((c, count)) + + if "recommended_bug_component" not in d: + d["recommended_bug_component"] = component + recommended_count = count + elif count == recommended_count: + # Don't recommend a component if it doesn't have a clear lead. + d["recommended_bug_component"] = None + + # In case no bug components. + d.setdefault("recommended_bug_component", None) + + return d + + +# This defines functions that create sub-contexts. +# +# Values are classes that are SubContexts. The class name will be turned into +# a function that when called emits an instance of that class. +# +# Arbitrary arguments can be passed to the class constructor. The first +# argument is always the parent context. It is up to each class to perform +# argument validation. +SUBCONTEXTS = [Files] + +for cls in SUBCONTEXTS: + if not issubclass(cls, SubContext): + raise ValueError("SUBCONTEXTS entry not a SubContext class: %s" % cls) + + if not hasattr(cls, "VARIABLES"): + raise ValueError("SUBCONTEXTS entry does not have VARIABLES: %s" % cls) + +SUBCONTEXTS = {cls.__name__: cls for cls in SUBCONTEXTS} + + +# This defines the set of mutable global variables. +# +# Each variable is a tuple of: +# +# (storage_type, input_types, docs) + +VARIABLES = { + "SOURCES": ( + ContextDerivedTypedListWithItems( + Path, + StrictOrderingOnAppendListWithFlagsFactory({"no_pgo": bool, "flags": List}), + ), + list, + """Source code files. + + This variable contains a list of source code files to compile. + Accepts assembler, C, C++, Objective C/C++. + """, + ), + "FILES_PER_UNIFIED_FILE": ( + int, + int, + """The number of source files to compile into each unified source file. + + """, + ), + "IS_RUST_LIBRARY": ( + bool, + bool, + """Whether the current library defined by this moz.build is built by Rust. + + The library defined by this moz.build should have a build definition in + a Cargo.toml file that exists in this moz.build's directory. + """, + ), + "IS_GKRUST": ( + bool, + bool, + """Whether the current library defined by this moz.build is gkrust. + + Indicates whether the current library contains rust for libxul. + """, + ), + "RUST_LIBRARY_FEATURES": ( + List, + list, + """Cargo features to activate for this library. + + This variable should not be used directly; you should be using the + RustLibrary template instead. + """, + ), + "HOST_RUST_LIBRARY_FEATURES": ( + List, + list, + """Cargo features to activate for this host library. + + This variable should not be used directly; you should be using the + HostRustLibrary template instead. + """, + ), + "RUST_TESTS": ( + TypedList(six.text_type), + list, + """Names of Rust tests to build and run via `cargo test`. + """, + ), + "RUST_TEST_FEATURES": ( + TypedList(six.text_type), + list, + """Cargo features to activate for RUST_TESTS. + """, + ), + "UNIFIED_SOURCES": ( + ContextDerivedTypedList(Path, StrictOrderingOnAppendList), + list, + """Source code files that can be compiled together. + + This variable contains a list of source code files to compile, + that can be concatenated all together and built as a single source + file. This can help make the build faster and reduce the debug info + size. + """, + ), + "GENERATED_FILES": ( + GeneratedFilesList, + list, + """Generic generated files. + + Unless you have a reason not to, use the GeneratedFile template rather + than referencing GENERATED_FILES directly. The GeneratedFile template + has all the same arguments as the attributes listed below (``script``, + ``inputs``, ``flags``, ``force``), plus an additional ``entry_point`` + argument to specify a particular function to run in the given script. + + This variable contains a list of files for the build system to + generate at export time. The generation method may be declared + with optional ``script``, ``inputs``, ``flags``, and ``force`` + attributes on individual entries. + If the optional ``script`` attribute is not present on an entry, it + is assumed that rules for generating the file are present in + the associated Makefile.in. + + Example:: + + GENERATED_FILES += ['bar.c', 'baz.c', 'foo.c'] + bar = GENERATED_FILES['bar.c'] + bar.script = 'generate.py' + bar.inputs = ['datafile-for-bar'] + foo = GENERATED_FILES['foo.c'] + foo.script = 'generate.py' + foo.inputs = ['datafile-for-foo'] + + This definition will generate bar.c by calling the main method of + generate.py with a open (for writing) file object for bar.c, and + the string ``datafile-for-bar``. In a similar fashion, the main + method of generate.py will also be called with an open + (for writing) file object for foo.c and the string + ``datafile-for-foo``. Please note that only string arguments are + supported for passing to scripts, and that all arguments provided + to the script should be filenames relative to the directory in which + the moz.build file is located. + + To enable using the same script for generating multiple files with + slightly different non-filename parameters, alternative entry points + into ``script`` can be specified:: + + GENERATED_FILES += ['bar.c'] + bar = GENERATED_FILES['bar.c'] + bar.script = 'generate.py:make_bar' + + The chosen script entry point may optionally return a set of strings, + indicating extra files the output depends on. + + When the ``flags`` attribute is present, the given list of flags is + passed as extra arguments following the inputs. + + When the ``force`` attribute is present, the file is generated every + build, regardless of whether it is stale. This is special to the + RecursiveMake backend and intended for special situations only (e.g., + localization). Please consult a build peer (on the #build channel at + https://chat.mozilla.org) before using ``force``. + """, + ), + "DEFINES": ( + InitializedDefines, + dict, + """Dictionary of compiler defines to declare. + + These are passed in to the compiler as ``-Dkey='value'`` for string + values, ``-Dkey=value`` for numeric values, or ``-Dkey`` if the + value is True. Note that for string values, the outer-level of + single-quotes will be consumed by the shell. If you want to have + a string-literal in the program, the value needs to have + double-quotes. + + Example:: + + DEFINES['NS_NO_XPCOM'] = True + DEFINES['MOZ_EXTENSIONS_DB_SCHEMA'] = 15 + DEFINES['DLL_SUFFIX'] = '".so"' + + This will result in the compiler flags ``-DNS_NO_XPCOM``, + ``-DMOZ_EXTENSIONS_DB_SCHEMA=15``, and ``-DDLL_SUFFIX='".so"'``, + respectively. + + Note that these entries are not necessarily passed to the assembler. + Whether they are depends on the type of assembly file. As an + alternative, you may add a ``-DKEY=value`` entry to ``ASFLAGS``. + """, + ), + "DELAYLOAD_DLLS": ( + List, + list, + """Delay-loaded DLLs. + + This variable contains a list of DLL files which the module being linked + should load lazily. This only has an effect when building with MSVC. + """, + ), + "DIRS": ( + ContextDerivedTypedList(SourcePath), + list, + """Child directories to descend into looking for build frontend files. + + This works similarly to the ``DIRS`` variable in make files. Each str + value in the list is the name of a child directory. When this file is + done parsing, the build reader will descend into each listed directory + and read the frontend file there. If there is no frontend file, an error + is raised. + + Values are relative paths. They can be multiple directory levels + above or below. Use ``..`` for parent directories and ``/`` for path + delimiters. + """, + ), + "FINAL_TARGET_FILES": ( + ContextDerivedTypedHierarchicalStringList(Path), + list, + """List of files to be installed into the application directory. + + ``FINAL_TARGET_FILES`` will copy (or symlink, if the platform supports it) + the contents of its files to the directory specified by + ``FINAL_TARGET`` (typically ``dist/bin``). Files that are destined for a + subdirectory can be specified by accessing a field, or as a dict access. + For example, to export ``foo.png`` to the top-level directory and + ``bar.svg`` to the directory ``images/do-not-use``, append to + ``FINAL_TARGET_FILES`` like so:: + + FINAL_TARGET_FILES += ['foo.png'] + FINAL_TARGET_FILES.images['do-not-use'] += ['bar.svg'] + """, + ), + "FINAL_TARGET_PP_FILES": ( + ContextDerivedTypedHierarchicalStringList(Path), + list, + """Like ``FINAL_TARGET_FILES``, with preprocessing. + """, + ), + "LOCALIZED_FILES": ( + ContextDerivedTypedHierarchicalStringList(Path), + list, + """List of locale-dependent files to be installed into the application + directory. + + This functions similarly to ``FINAL_TARGET_FILES``, but the files are + sourced from the locale directory and will vary per localization. + For an en-US build, this is functionally equivalent to + ``FINAL_TARGET_FILES``. For a build with ``--enable-ui-locale``, + the file will be taken from ``$LOCALE_SRCDIR``, with the leading + ``en-US`` removed. For a l10n repack of an en-US build, the file + will be taken from the first location where it exists from: + * the merged locale directory if it exists + * ``$LOCALE_SRCDIR`` with the leading ``en-US`` removed + * the in-tree en-US location + + Source directory paths specified here must must include a leading ``en-US``. + Wildcards are allowed, and will be expanded at the time of locale packaging to match + files in the locale directory. + + Object directory paths are allowed here only if the path matches an entry in + ``LOCALIZED_GENERATED_FILES``. + + Files that are missing from a locale will typically have the en-US + version used, but for wildcard expansions only files from the + locale directory will be used, even if that means no files will + be copied. + + Example:: + + LOCALIZED_FILES.foo += [ + 'en-US/foo.js', + 'en-US/things/*.ini', + ] + + If this was placed in ``toolkit/locales/moz.build``, it would copy + ``toolkit/locales/en-US/foo.js`` and + ``toolkit/locales/en-US/things/*.ini`` to ``$(DIST)/bin/foo`` in an + en-US build, and in a build of a different locale (or a repack), + it would copy ``$(LOCALE_SRCDIR)/toolkit/foo.js`` and + ``$(LOCALE_SRCDIR)/toolkit/things/*.ini``. + """, + ), + "LOCALIZED_PP_FILES": ( + ContextDerivedTypedHierarchicalStringList(Path), + list, + """Like ``LOCALIZED_FILES``, with preprocessing. + + Note that the ``AB_CD`` define is available and expands to the current + locale being packaged, as with preprocessed entries in jar manifests. + """, + ), + "LOCALIZED_GENERATED_FILES": ( + GeneratedFilesList, + list, + """Like ``GENERATED_FILES``, but for files whose content varies based on the locale in use. + + For simple cases of text substitution, prefer ``LOCALIZED_PP_FILES``. + + Refer to the documentation of ``GENERATED_FILES``; for the most part things work the same. + The two major differences are: + 1. The function in the Python script will be passed an additional keyword argument `locale` + which provides the locale in use, i.e. ``en-US``. + 2. The ``inputs`` list may contain paths to files that will be taken from the locale + source directory (see ``LOCALIZED_FILES`` for a discussion of the specifics). Paths + in ``inputs`` starting with ``en-US/`` or containing ``locales/en-US/`` are considered + localized files. + + To place the generated output file in a specific location, list its objdir path in + ``LOCALIZED_FILES``. + + In addition, ``LOCALIZED_GENERATED_FILES`` can use the special substitutions ``{AB_CD}`` + and ``{AB_rCD}`` in their output paths. ``{AB_CD}`` expands to the current locale during + multi-locale builds and single-locale repacks and ``{AB_rCD}`` expands to an + Android-specific encoding of the current locale. Both expand to the empty string when the + current locale is ``en-US``. + """, + ), + "OBJDIR_FILES": ( + ContextDerivedTypedHierarchicalStringList(Path), + list, + """List of files to be installed anywhere in the objdir. Use sparingly. + + ``OBJDIR_FILES`` is similar to FINAL_TARGET_FILES, but it allows copying + anywhere in the object directory. This is intended for various one-off + cases, not for general use. If you wish to add entries to OBJDIR_FILES, + please consult a build peer (on the #build channel at https://chat.mozilla.org). + """, + ), + "OBJDIR_PP_FILES": ( + ContextDerivedTypedHierarchicalStringList(Path), + list, + """Like ``OBJDIR_FILES``, with preprocessing. Use sparingly. + """, + ), + "FINAL_LIBRARY": ( + six.text_type, + six.text_type, + """Library in which the objects of the current directory will be linked. + + This variable contains the name of a library, defined elsewhere with + ``LIBRARY_NAME``, in which the objects of the current directory will be + linked. + """, + ), + "CPP_UNIT_TESTS": ( + StrictOrderingOnAppendList, + list, + """Compile a list of C++ unit test names. + + Each name in this variable corresponds to an executable built from the + corresponding source file with the same base name. + + If the configuration token ``BIN_SUFFIX`` is set, its value will be + automatically appended to each name. If a name already ends with + ``BIN_SUFFIX``, the name will remain unchanged. + """, + ), + "FORCE_SHARED_LIB": ( + bool, + bool, + """Whether the library in this directory is a shared library. + """, + ), + "FORCE_STATIC_LIB": ( + bool, + bool, + """Whether the library in this directory is a static library. + """, + ), + "USE_STATIC_LIBS": ( + bool, + bool, + """Whether the code in this directory is a built against the static + runtime library. + + This variable only has an effect when building with MSVC. + """, + ), + "HOST_SOURCES": ( + ContextDerivedTypedList(Path, StrictOrderingOnAppendList), + list, + """Source code files to compile with the host compiler. + + This variable contains a list of source code files to compile. + with the host compiler. + """, + ), + "WASM_SOURCES": ( + ContextDerivedTypedList(Path, StrictOrderingOnAppendList), + list, + """Source code files to compile with the wasm compiler. + """, + ), + "HOST_LIBRARY_NAME": ( + six.text_type, + six.text_type, + """Name of target library generated when cross compiling. + """, + ), + "LIBRARY_DEFINES": ( + OrderedDict, + dict, + """Dictionary of compiler defines to declare for the entire library. + + This variable works like DEFINES, except that declarations apply to all + libraries that link into this library via FINAL_LIBRARY. + """, + ), + "LIBRARY_NAME": ( + six.text_type, + six.text_type, + """The code name of the library generated for a directory. + + By default STATIC_LIBRARY_NAME and SHARED_LIBRARY_NAME take this name. + In ``example/components/moz.build``,:: + + LIBRARY_NAME = 'xpcomsample' + + would generate ``example/components/libxpcomsample.so`` on Linux, or + ``example/components/xpcomsample.lib`` on Windows. + """, + ), + "SHARED_LIBRARY_NAME": ( + six.text_type, + six.text_type, + """The name of the static library generated for a directory, if it needs to + differ from the library code name. + + Implies FORCE_SHARED_LIB. + """, + ), + "SANDBOXED_WASM_LIBRARY_NAME": ( + six.text_type, + six.text_type, + """The name of the static sandboxed wasm library generated for a directory. + """, + ), + "SHARED_LIBRARY_OUTPUT_CATEGORY": ( + six.text_type, + six.text_type, + """The output category for this context's shared library. If set this will + correspond to the build command that will build this shared library, and + the library will not be built as part of the default build. + """, + ), + "RUST_LIBRARY_OUTPUT_CATEGORY": ( + six.text_type, + six.text_type, + """The output category for this context's rust library. If set this will + correspond to the build command that will build this rust library, and + the library will not be built as part of the default build. + """, + ), + "IS_FRAMEWORK": ( + bool, + bool, + """Whether the library to build should be built as a framework on OSX. + + This implies the name of the library won't be prefixed nor suffixed. + Implies FORCE_SHARED_LIB. + """, + ), + "STATIC_LIBRARY_NAME": ( + six.text_type, + six.text_type, + """The name of the static library generated for a directory, if it needs to + differ from the library code name. + + Implies FORCE_STATIC_LIB. + """, + ), + "USE_LIBS": ( + StrictOrderingOnAppendList, + list, + """List of libraries to link to programs and libraries. + """, + ), + "HOST_USE_LIBS": ( + StrictOrderingOnAppendList, + list, + """List of libraries to link to host programs and libraries. + """, + ), + "HOST_OS_LIBS": ( + List, + list, + """List of system libraries for host programs and libraries. + """, + ), + "LOCAL_INCLUDES": ( + ContextDerivedTypedList(Path, StrictOrderingOnAppendList), + list, + """Additional directories to be searched for include files by the compiler. + """, + ), + "NO_PGO": ( + bool, + bool, + """Whether profile-guided optimization is disable in this directory. + """, + ), + "OS_LIBS": ( + List, + list, + """System link libraries. + + This variable contains a list of system libaries to link against. + """, + ), + "RCFILE": ( + Path, + six.text_type, + """The program .rc file. + + This variable can only be used on Windows. + """, + ), + "RCINCLUDE": ( + Path, + six.text_type, + """The resource script file to be included in the default .res file. + + This variable can only be used on Windows. + """, + ), + "DEFFILE": ( + Path, + six.text_type, + """The program .def (module definition) file. + + This variable can only be used on Windows. + """, + ), + "SYMBOLS_FILE": ( + Path, + six.text_type, + """A file containing a list of symbols to export from a shared library. + + The given file contains a list of symbols to be exported, and is + preprocessed. + A special marker "@DATA@" must be added after a symbol name if it + points to data instead of code, so that the Windows linker can treat + them correctly. + """, + ), + "SIMPLE_PROGRAMS": ( + StrictOrderingOnAppendList, + list, + """Compile a list of executable names. + + Each name in this variable corresponds to an executable built from the + corresponding source file with the same base name. + + If the configuration token ``BIN_SUFFIX`` is set, its value will be + automatically appended to each name. If a name already ends with + ``BIN_SUFFIX``, the name will remain unchanged. + """, + ), + "SONAME": ( + six.text_type, + six.text_type, + """The soname of the shared object currently being linked + + soname is the "logical name" of a shared object, often used to provide + version backwards compatibility. This variable makes sense only for + shared objects, and is supported only on some unix platforms. + """, + ), + "HOST_SIMPLE_PROGRAMS": ( + StrictOrderingOnAppendList, + list, + """Compile a list of host executable names. + + Each name in this variable corresponds to a hosst executable built + from the corresponding source file with the same base name. + + If the configuration token ``HOST_BIN_SUFFIX`` is set, its value will + be automatically appended to each name. If a name already ends with + ``HOST_BIN_SUFFIX``, the name will remain unchanged. + """, + ), + "RUST_PROGRAMS": ( + StrictOrderingOnAppendList, + list, + """Compile a list of Rust host executable names. + + Each name in this variable corresponds to an executable built from + the Cargo.toml in the same directory. + """, + ), + "HOST_RUST_PROGRAMS": ( + StrictOrderingOnAppendList, + list, + """Compile a list of Rust executable names. + + Each name in this variable corresponds to an executable built from + the Cargo.toml in the same directory. + """, + ), + "CONFIGURE_SUBST_FILES": ( + ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), + list, + """Output files that will be generated using configure-like substitution. + + This is a substitute for ``AC_OUTPUT`` in autoconf. For each path in this + list, we will search for a file in the srcdir having the name + ``{path}.in``. The contents of this file will be read and variable + patterns like ``@foo@`` will be substituted with the values of the + ``AC_SUBST`` variables declared during configure. + """, + ), + "CONFIGURE_DEFINE_FILES": ( + ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), + list, + """Output files generated from configure/config.status. + + This is a substitute for ``AC_CONFIG_HEADER`` in autoconf. This is very + similar to ``CONFIGURE_SUBST_FILES`` except the generation logic takes + into account the values of ``AC_DEFINE`` instead of ``AC_SUBST``. + """, + ), + "EXPORTS": ( + ContextDerivedTypedHierarchicalStringList(Path), + list, + """List of files to be exported, and in which subdirectories. + + ``EXPORTS`` is generally used to list the include files to be exported to + ``dist/include``, but it can be used for other files as well. This variable + behaves as a list when appending filenames for export in the top-level + directory. Files can also be appended to a field to indicate which + subdirectory they should be exported to. For example, to export + ``foo.h`` to the top-level directory, and ``bar.h`` to ``mozilla/dom/``, + append to ``EXPORTS`` like so:: + + EXPORTS += ['foo.h'] + EXPORTS.mozilla.dom += ['bar.h'] + + Entries in ``EXPORTS`` are paths, so objdir paths may be used, but + any files listed from the objdir must also be listed in + ``GENERATED_FILES``. + """, + ), + "PROGRAM": ( + six.text_type, + six.text_type, + """Compiled executable name. + + If the configuration token ``BIN_SUFFIX`` is set, its value will be + automatically appended to ``PROGRAM``. If ``PROGRAM`` already ends with + ``BIN_SUFFIX``, ``PROGRAM`` will remain unchanged. + """, + ), + "HOST_PROGRAM": ( + six.text_type, + six.text_type, + """Compiled host executable name. + + If the configuration token ``HOST_BIN_SUFFIX`` is set, its value will be + automatically appended to ``HOST_PROGRAM``. If ``HOST_PROGRAM`` already + ends with ``HOST_BIN_SUFFIX``, ``HOST_PROGRAM`` will remain unchanged. + """, + ), + "DIST_INSTALL": ( + Enum(None, False, True), + bool, + """Whether to install certain files into the dist directory. + + By default, some files types are installed in the dist directory, and + some aren't. Set this variable to True to force the installation of + some files that wouldn't be installed by default. Set this variable to + False to force to not install some files that would be installed by + default. + + This is confusing for historical reasons, but eventually, the behavior + will be made explicit. + """, + ), + "JAR_MANIFESTS": ( + ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), + list, + """JAR manifest files that should be processed as part of the build. + + JAR manifests are files in the tree that define how to package files + into JARs and how chrome registration is performed. For more info, + see :ref:`jar_manifests`. + """, + ), + # IDL Generation. + "XPIDL_SOURCES": ( + ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), + list, + """XPCOM Interface Definition Files (xpidl). + + This is a list of files that define XPCOM interface definitions. + Entries must be files that exist. Entries are almost certainly ``.idl`` + files. + """, + ), + "XPIDL_MODULE": ( + six.text_type, + six.text_type, + """XPCOM Interface Definition Module Name. + + This is the name of the ``.xpt`` file that is created by linking + ``XPIDL_SOURCES`` together. If unspecified, it defaults to be the same + as ``MODULE``. + """, + ), + "XPCOM_MANIFESTS": ( + ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList), + list, + """XPCOM Component Manifest Files. + + This is a list of files that define XPCOM components to be added + to the component registry. + """, + ), + "PREPROCESSED_IPDL_SOURCES": ( + StrictOrderingOnAppendList, + list, + """Preprocessed IPDL source files. + + These files will be preprocessed, then parsed and converted to + ``.cpp`` files. + """, + ), + "IPDL_SOURCES": ( + StrictOrderingOnAppendList, + list, + """IPDL source files. + + These are ``.ipdl`` files that will be parsed and converted to + ``.cpp`` files. + """, + ), + "WEBIDL_FILES": ( + StrictOrderingOnAppendList, + list, + """WebIDL source files. + + These will be parsed and converted to ``.cpp`` and ``.h`` files. + """, + ), + "GENERATED_EVENTS_WEBIDL_FILES": ( + StrictOrderingOnAppendList, + list, + """WebIDL source files for generated events. + + These will be parsed and converted to ``.cpp`` and ``.h`` files. + """, + ), + "TEST_WEBIDL_FILES": ( + StrictOrderingOnAppendList, + list, + """Test WebIDL source files. + + These will be parsed and converted to ``.cpp`` and ``.h`` files + if tests are enabled. + """, + ), + "GENERATED_WEBIDL_FILES": ( + StrictOrderingOnAppendList, + list, + """Generated WebIDL source files. + + These will be generated from some other files. + """, + ), + "PREPROCESSED_TEST_WEBIDL_FILES": ( + StrictOrderingOnAppendList, + list, + """Preprocessed test WebIDL source files. + + These will be preprocessed, then parsed and converted to .cpp + and ``.h`` files if tests are enabled. + """, + ), + "PREPROCESSED_WEBIDL_FILES": ( + StrictOrderingOnAppendList, + list, + """Preprocessed WebIDL source files. + + These will be preprocessed before being parsed and converted. + """, + ), + "WEBIDL_EXAMPLE_INTERFACES": ( + StrictOrderingOnAppendList, + list, + """Names of example WebIDL interfaces to build as part of the build. + + Names in this list correspond to WebIDL interface names defined in + WebIDL files included in the build from one of the \*WEBIDL_FILES + variables. + """, + ), + # Test declaration. + "A11Y_MANIFESTS": ( + ManifestparserManifestList, + list, + """List of manifest files defining a11y tests. + """, + ), + "BROWSER_CHROME_MANIFESTS": ( + ManifestparserManifestList, + list, + """List of manifest files defining browser chrome tests. + """, + ), + "ANDROID_INSTRUMENTATION_MANIFESTS": ( + ManifestparserManifestList, + list, + """List of manifest files defining Android instrumentation tests. + """, + ), + "FIREFOX_UI_FUNCTIONAL_MANIFESTS": ( + ManifestparserManifestList, + list, + """List of manifest files defining firefox-ui-functional tests. + """, + ), + "MARIONETTE_LAYOUT_MANIFESTS": ( + ManifestparserManifestList, + list, + """List of manifest files defining marionette-layout tests. + """, + ), + "MARIONETTE_UNIT_MANIFESTS": ( + ManifestparserManifestList, + list, + """List of manifest files defining marionette-unit tests. + """, + ), + "METRO_CHROME_MANIFESTS": ( + ManifestparserManifestList, + list, + """List of manifest files defining metro browser chrome tests. + """, + ), + "MOCHITEST_CHROME_MANIFESTS": ( + ManifestparserManifestList, + list, + """List of manifest files defining mochitest chrome tests. + """, + ), + "MOCHITEST_MANIFESTS": ( + ManifestparserManifestList, + list, + """List of manifest files defining mochitest tests. + """, + ), + "REFTEST_MANIFESTS": ( + ReftestManifestList, + list, + """List of manifest files defining reftests. + + These are commonly named reftest.list. + """, + ), + "CRASHTEST_MANIFESTS": ( + ReftestManifestList, + list, + """List of manifest files defining crashtests. + + These are commonly named crashtests.list. + """, + ), + "XPCSHELL_TESTS_MANIFESTS": ( + ManifestparserManifestList, + list, + """List of manifest files defining xpcshell tests. + """, + ), + "PYTHON_UNITTEST_MANIFESTS": ( + ManifestparserManifestList, + list, + """List of manifest files defining python unit tests. + """, + ), + "PERFTESTS_MANIFESTS": ( + ManifestparserManifestList, + list, + """List of manifest files defining MozPerftest performance tests. + """, + ), + "CRAMTEST_MANIFESTS": ( + ManifestparserManifestList, + list, + """List of manifest files defining cram unit tests. + """, + ), + "TELEMETRY_TESTS_CLIENT_MANIFESTS": ( + ManifestparserManifestList, + list, + """List of manifest files defining telemetry client tests. + """, + ), + # The following variables are used to control the target of installed files. + "XPI_NAME": ( + six.text_type, + six.text_type, + """The name of an extension XPI to generate. + + When this variable is present, the results of this directory will end up + being packaged into an extension instead of the main dist/bin results. + """, + ), + "DIST_SUBDIR": ( + six.text_type, + six.text_type, + """The name of an alternate directory to install files to. + + When this variable is present, the results of this directory will end up + being placed in the $(DIST_SUBDIR) subdirectory of where it would + otherwise be placed. + """, + ), + "FINAL_TARGET": ( + FinalTargetValue, + six.text_type, + """The name of the directory to install targets to. + + The directory is relative to the top of the object directory. The + default value is dependent on the values of XPI_NAME and DIST_SUBDIR. If + neither are present, the result is dist/bin. If XPI_NAME is present, the + result is dist/xpi-stage/$(XPI_NAME). If DIST_SUBDIR is present, then + the $(DIST_SUBDIR) directory of the otherwise default value is used. + """, + ), + "USE_EXTENSION_MANIFEST": ( + bool, + bool, + """Controls the name of the manifest for JAR files. + + By default, the name of the manifest is ${JAR_MANIFEST}.manifest. + Setting this variable to ``True`` changes the name of the manifest to + chrome.manifest. + """, + ), + "GYP_DIRS": ( + StrictOrderingOnAppendListWithFlagsFactory( + { + "variables": dict, + "input": six.text_type, + "sandbox_vars": dict, + "no_chromium": bool, + "no_unified": bool, + "non_unified_sources": StrictOrderingOnAppendList, + "action_overrides": dict, + } + ), + list, + """Defines a list of object directories handled by gyp configurations. + + Elements of this list give the relative object directory. For each + element of the list, GYP_DIRS may be accessed as a dictionary + (GYP_DIRS[foo]). The object this returns has attributes that need to be + set to further specify gyp processing: + - input, gives the path to the root gyp configuration file for that + object directory. + - variables, a dictionary containing variables and values to pass + to the gyp processor. + - sandbox_vars, a dictionary containing variables and values to + pass to the mozbuild processor on top of those derived from gyp + configuration. + - no_chromium, a boolean which if set to True disables some + special handling that emulates gyp_chromium. + - no_unified, a boolean which if set to True disables source + file unification entirely. + - non_unified_sources, a list containing sources files, relative to + the current moz.build, that should be excluded from source file + unification. + - action_overrides, a dict of action_name to values of the `script` + attribute to use for GENERATED_FILES for the specified action. + + Typical use looks like: + GYP_DIRS += ['foo', 'bar'] + GYP_DIRS['foo'].input = 'foo/foo.gyp' + GYP_DIRS['foo'].variables = { + 'foo': 'bar', + (...) + } + (...) + """, + ), + "SPHINX_TREES": ( + dict, + dict, + """Describes what the Sphinx documentation tree will look like. + + Keys are relative directories inside the final Sphinx documentation + tree to install files into. Values are directories (relative to this + file) whose content to copy into the Sphinx documentation tree. + """, + ), + "SPHINX_PYTHON_PACKAGE_DIRS": ( + StrictOrderingOnAppendList, + list, + """Directories containing Python packages that Sphinx documents. + """, + ), + "COMPILE_FLAGS": ( + CompileFlags, + dict, + """Recipe for compile flags for this context. Not to be manipulated + directly. + """, + ), + "LINK_FLAGS": ( + LinkFlags, + dict, + """Recipe for linker flags for this context. Not to be manipulated + directly. + """, + ), + "WASM_FLAGS": ( + WasmFlags, + dict, + """Recipe for wasm flags for this context. Not to be + manipulated directly. + """, + ), + "ASM_FLAGS": ( + AsmFlags, + dict, + """Recipe for linker flags for this context. Not to be + manipulated directly. + """, + ), + "CFLAGS": ( + List, + list, + """Flags passed to the C compiler for all of the C source files + declared in this directory. + + Note that the ordering of flags matters here, these flags will be + added to the compiler's command line in the same order as they + appear in the moz.build file. + """, + ), + "CXXFLAGS": ( + List, + list, + """Flags passed to the C++ compiler for all of the C++ source files + declared in this directory. + + Note that the ordering of flags matters here; these flags will be + added to the compiler's command line in the same order as they + appear in the moz.build file. + """, + ), + "HOST_COMPILE_FLAGS": ( + HostCompileFlags, + dict, + """Recipe for host compile flags for this context. Not to be manipulated + directly. + """, + ), + "HOST_DEFINES": ( + InitializedDefines, + dict, + """Dictionary of compiler defines to declare for host compilation. + See ``DEFINES`` for specifics. + """, + ), + "WASM_CFLAGS": ( + List, + list, + """Flags passed to the C-to-wasm compiler for all of the C + source files declared in this directory. + + Note that the ordering of flags matters here, these flags will be + added to the compiler's command line in the same order as they + appear in the moz.build file. + """, + ), + "WASM_CXXFLAGS": ( + List, + list, + """Flags passed to the C++-to-wasm compiler for all of the + C++ source files declared in this directory. + + Note that the ordering of flags matters here; these flags will be + added to the compiler's command line in the same order as they + appear in the moz.build file. + """, + ), + "WASM_DEFINES": ( + InitializedDefines, + dict, + """Dictionary of compiler defines to declare for wasm compilation. + See ``DEFINES`` for specifics. + """, + ), + "CMFLAGS": ( + List, + list, + """Flags passed to the Objective-C compiler for all of the Objective-C + source files declared in this directory. + + Note that the ordering of flags matters here; these flags will be + added to the compiler's command line in the same order as they + appear in the moz.build file. + """, + ), + "CMMFLAGS": ( + List, + list, + """Flags passed to the Objective-C++ compiler for all of the + Objective-C++ source files declared in this directory. + + Note that the ordering of flags matters here; these flags will be + added to the compiler's command line in the same order as they + appear in the moz.build file. + """, + ), + "ASFLAGS": ( + List, + list, + """Flags passed to the assembler for all of the assembly source files + declared in this directory. + + Note that the ordering of flags matters here; these flags will be + added to the assembler's command line in the same order as they + appear in the moz.build file. + """, + ), + "HOST_CFLAGS": ( + List, + list, + """Flags passed to the host C compiler for all of the C source files + declared in this directory. + + Note that the ordering of flags matters here, these flags will be + added to the compiler's command line in the same order as they + appear in the moz.build file. + """, + ), + "HOST_CXXFLAGS": ( + List, + list, + """Flags passed to the host C++ compiler for all of the C++ source files + declared in this directory. + + Note that the ordering of flags matters here; these flags will be + added to the compiler's command line in the same order as they + appear in the moz.build file. + """, + ), + "LDFLAGS": ( + List, + list, + """Flags passed to the linker when linking all of the libraries and + executables declared in this directory. + + Note that the ordering of flags matters here; these flags will be + added to the linker's command line in the same order as they + appear in the moz.build file. + """, + ), + "EXTRA_DSO_LDOPTS": ( + List, + list, + """Flags passed to the linker when linking a shared library. + + Note that the ordering of flags matter here, these flags will be + added to the linker's command line in the same order as they + appear in the moz.build file. + """, + ), + "WIN32_EXE_LDFLAGS": ( + List, + list, + """Flags passed to the linker when linking a Windows .exe executable + declared in this directory. + + Note that the ordering of flags matter here, these flags will be + added to the linker's command line in the same order as they + appear in the moz.build file. + + This variable only has an effect on Windows. + """, + ), + "TEST_HARNESS_FILES": ( + ContextDerivedTypedHierarchicalStringList(Path), + list, + """List of files to be installed for test harnesses. + + ``TEST_HARNESS_FILES`` can be used to install files to any directory + under $objdir/_tests. Files can be appended to a field to indicate + which subdirectory they should be exported to. For example, + to export ``foo.py`` to ``_tests/foo``, append to + ``TEST_HARNESS_FILES`` like so:: + TEST_HARNESS_FILES.foo += ['foo.py'] + + Files from topsrcdir and the objdir can also be installed by prefixing + the path(s) with a '/' character and a '!' character, respectively:: + TEST_HARNESS_FILES.path += ['/build/bar.py', '!quux.py'] + """, + ), + "NO_EXPAND_LIBS": ( + bool, + bool, + """Forces to build a real static library, and no corresponding fake + library. + """, + ), + "USE_NASM": ( + bool, + bool, + """Use the nasm assembler to assemble assembly files from SOURCES. + + By default, the build will use the toolchain assembler, $(AS), to + assemble source files in assembly language (.s or .asm files). Setting + this value to ``True`` will cause it to use nasm instead. + + If nasm is not available on this system, or does not support the + current target architecture, an error will be raised. + """, + ), + "USE_INTEGRATED_CLANGCL_AS": ( + bool, + bool, + """Use the integrated clang-cl assembler to assemble assembly files from SOURCES. + + This allows using clang-cl to assemble assembly files which is useful + on platforms like aarch64 where the alternative is to have to run a + pre-processor to generate files with suitable syntax. + """, + ), +} + +# Sanity check: we don't want any variable above to have a list as storage type. +for name, (storage_type, input_types, docs) in VARIABLES.items(): + if storage_type == list: + raise RuntimeError('%s has a "list" storage type. Use "List" instead.' % name) + +# Set of variables that are only allowed in templates: +TEMPLATE_VARIABLES = { + "CPP_UNIT_TESTS", + "FORCE_SHARED_LIB", + "HOST_PROGRAM", + "HOST_LIBRARY_NAME", + "HOST_SIMPLE_PROGRAMS", + "IS_FRAMEWORK", + "IS_GKRUST", + "LIBRARY_NAME", + "PROGRAM", + "SIMPLE_PROGRAMS", +} + +# Add a note to template variable documentation. +for name in TEMPLATE_VARIABLES: + if name not in VARIABLES: + raise RuntimeError("%s is in TEMPLATE_VARIABLES but not in VARIABLES." % name) + storage_type, input_types, docs = VARIABLES[name] + docs += "This variable is only available in templates.\n" + VARIABLES[name] = (storage_type, input_types, docs) + + +# The set of functions exposed to the sandbox. +# +# Each entry is a tuple of: +# +# (function returning the corresponding function from a given sandbox, +# (argument types), docs) +# +# The first element is an attribute on Sandbox that should be a function type. +# +FUNCTIONS = { + "include": ( + lambda self: self._include, + (SourcePath,), + """Include another mozbuild file in the context of this one. + + This is similar to a ``#include`` in C languages. The filename passed to + the function will be read and its contents will be evaluated within the + context of the calling file. + + If a relative path is given, it is evaluated as relative to the file + currently being processed. If there is a chain of multiple include(), + the relative path computation is from the most recent/active file. + + If an absolute path is given, it is evaluated from ``TOPSRCDIR``. In + other words, ``include('/foo')`` references the path + ``TOPSRCDIR + '/foo'``. + + Example usage + ^^^^^^^^^^^^^ + + Include ``sibling.build`` from the current directory.:: + + include('sibling.build') + + Include ``foo.build`` from a path within the top source directory:: + + include('/elsewhere/foo.build') + """, + ), + "export": ( + lambda self: self._export, + (str,), + """Make the specified variable available to all child directories. + + The variable specified by the argument string is added to the + environment of all directories specified in the DIRS and TEST_DIRS + variables. If those directories themselves have child directories, + the variable will be exported to all of them. + + The value used for the variable is the final value at the end of the + moz.build file, so it is possible (but not recommended style) to place + the export before the definition of the variable. + + This function is limited to the upper-case variables that have special + meaning in moz.build files. + + NOTE: Please consult with a build peer (on the #build channel at + https://chat.mozilla.org) before adding a new use of this function. + + Example usage + ^^^^^^^^^^^^^ + + To make all children directories install as the given extension:: + + XPI_NAME = 'cool-extension' + export('XPI_NAME') + """, + ), + "warning": ( + lambda self: self._warning, + (str,), + """Issue a warning. + + Warnings are string messages that are printed during execution. + + Warnings are ignored during execution. + """, + ), + "error": ( + lambda self: self._error, + (str,), + """Issue a fatal error. + + If this function is called, processing is aborted immediately. + """, + ), + "template": ( + lambda self: self._template_decorator, + (FunctionType,), + """Decorator for template declarations. + + Templates are a special kind of functions that can be declared in + mozbuild files. Uppercase variables assigned in the function scope + are considered to be the result of the template. + + Contrary to traditional python functions: + - return values from template functions are ignored, + - template functions don't have access to the global scope. + + Example template + ^^^^^^^^^^^^^^^^ + + The following ``Program`` template sets two variables ``PROGRAM`` and + ``USE_LIBS``. ``PROGRAM`` is set to the argument given on the template + invocation, and ``USE_LIBS`` to contain "mozglue":: + + @template + def Program(name): + PROGRAM = name + USE_LIBS += ['mozglue'] + + Template invocation + ^^^^^^^^^^^^^^^^^^^ + + A template is invoked in the form of a function call:: + + Program('myprog') + + The result of the template, being all the uppercase variable it sets + is mixed to the existing set of variables defined in the mozbuild file + invoking the template:: + + FINAL_TARGET = 'dist/other' + USE_LIBS += ['mylib'] + Program('myprog') + USE_LIBS += ['otherlib'] + + The above mozbuild results in the following variables set: + + - ``FINAL_TARGET`` is 'dist/other' + - ``USE_LIBS`` is ['mylib', 'mozglue', 'otherlib'] + - ``PROGRAM`` is 'myprog' + + """, + ), +} + + +TestDirsPlaceHolder = List() + + +# Special variables. These complement VARIABLES. +# +# Each entry is a tuple of: +# +# (function returning the corresponding value from a given context, type, docs) +# +SPECIAL_VARIABLES = { + "TOPSRCDIR": ( + lambda context: context.config.topsrcdir, + str, + """Constant defining the top source directory. + + The top source directory is the parent directory containing the source + code and all build files. It is typically the root directory of a + cloned repository. + """, + ), + "TOPOBJDIR": ( + lambda context: context.config.topobjdir, + str, + """Constant defining the top object directory. + + The top object directory is the parent directory which will contain + the output of the build. This is commonly referred to as "the object + directory." + """, + ), + "RELATIVEDIR": ( + lambda context: context.relsrcdir, + str, + """Constant defining the relative path of this file. + + The relative path is from ``TOPSRCDIR``. This is defined as relative + to the main file being executed, regardless of whether additional + files have been included using ``include()``. + """, + ), + "SRCDIR": ( + lambda context: context.srcdir, + str, + """Constant defining the source directory of this file. + + This is the path inside ``TOPSRCDIR`` where this file is located. It + is the same as ``TOPSRCDIR + RELATIVEDIR``. + """, + ), + "OBJDIR": ( + lambda context: context.objdir, + str, + """The path to the object directory for this file. + + Is is the same as ``TOPOBJDIR + RELATIVEDIR``. + """, + ), + "CONFIG": ( + lambda context: ReadOnlyKeyedDefaultDict( + lambda key: context.config.substs.get(key) + ), + dict, + """Dictionary containing the current configuration variables. + + All the variables defined by the configuration system are available + through this object. e.g. ``ENABLE_TESTS``, ``CFLAGS``, etc. + + Values in this container are read-only. Attempts at changing values + will result in a run-time error. + + Access to an unknown variable will return None. + """, + ), + "EXTRA_COMPONENTS": ( + lambda context: context["FINAL_TARGET_FILES"].components._strings, + list, + """Additional component files to distribute. + + This variable contains a list of files to copy into + ``$(FINAL_TARGET)/components/``. + """, + ), + "EXTRA_PP_COMPONENTS": ( + lambda context: context["FINAL_TARGET_PP_FILES"].components._strings, + list, + """Javascript XPCOM files. + + This variable contains a list of files to preprocess. Generated + files will be installed in the ``/components`` directory of the distribution. + """, + ), + "JS_PREFERENCE_FILES": ( + lambda context: context["FINAL_TARGET_FILES"].defaults.pref._strings, + list, + """Exported JavaScript files. + + A list of files copied into the dist directory for packaging and installation. + Path will be defined for gre or application prefs dir based on what is building. + """, + ), + "JS_PREFERENCE_PP_FILES": ( + lambda context: context["FINAL_TARGET_PP_FILES"].defaults.pref._strings, + list, + """Like JS_PREFERENCE_FILES, preprocessed.. + """, + ), + "RESOURCE_FILES": ( + lambda context: context["FINAL_TARGET_FILES"].res, + list, + """List of resources to be exported, and in which subdirectories. + + ``RESOURCE_FILES`` is used to list the resource files to be exported to + ``dist/bin/res``, but it can be used for other files as well. This variable + behaves as a list when appending filenames for resources in the top-level + directory. Files can also be appended to a field to indicate which + subdirectory they should be exported to. For example, to export + ``foo.res`` to the top-level directory, and ``bar.res`` to ``fonts/``, + append to ``RESOURCE_FILES`` like so:: + + RESOURCE_FILES += ['foo.res'] + RESOURCE_FILES.fonts += ['bar.res'] + """, + ), + "CONTENT_ACCESSIBLE_FILES": ( + lambda context: context["FINAL_TARGET_FILES"].contentaccessible, + list, + """List of files which can be accessed by web content through resource:// URIs. + + ``CONTENT_ACCESSIBLE_FILES`` is used to list the files to be exported + to ``dist/bin/contentaccessible``. Files can also be appended to a + field to indicate which subdirectory they should be exported to. + """, + ), + "EXTRA_JS_MODULES": ( + lambda context: context["FINAL_TARGET_FILES"].modules, + list, + """Additional JavaScript files to distribute. + + This variable contains a list of files to copy into + ``$(FINAL_TARGET)/modules. + """, + ), + "EXTRA_PP_JS_MODULES": ( + lambda context: context["FINAL_TARGET_PP_FILES"].modules, + list, + """Additional JavaScript files to distribute. + + This variable contains a list of files to copy into + ``$(FINAL_TARGET)/modules``, after preprocessing. + """, + ), + "TESTING_JS_MODULES": ( + lambda context: context["TEST_HARNESS_FILES"].modules, + list, + """JavaScript modules to install in the test-only destination. + + Some JavaScript modules (JSMs) are test-only and not distributed + with Firefox. This variable defines them. + + To install modules in a subdirectory, use properties of this + variable to control the final destination. e.g. + + ``TESTING_JS_MODULES.foo += ['module.jsm']``. + """, + ), + "TEST_DIRS": ( + lambda context: context["DIRS"] + if context.config.substs.get("ENABLE_TESTS") + else TestDirsPlaceHolder, + list, + """Like DIRS but only for directories that contain test-only code. + + If tests are not enabled, this variable will be ignored. + + This variable may go away once the transition away from Makefiles is + complete. + """, + ), +} + +# Deprecation hints. +DEPRECATION_HINTS = { + "ASM_FLAGS": """ + Please use + + ASFLAGS + + instead of manipulating ASM_FLAGS directly. + """, + "CPP_UNIT_TESTS": """ + Please use' + + CppUnitTests(['foo', 'bar']) + + instead of + + CPP_UNIT_TESTS += ['foo', 'bar'] + """, + "DISABLE_STL_WRAPPING": """ + Please use + + DisableStlWrapping() + + instead of + + DISABLE_STL_WRAPPING = True + """, + "HOST_PROGRAM": """ + Please use + + HostProgram('foo') + + instead of + + HOST_PROGRAM = 'foo' + """, + "HOST_LIBRARY_NAME": """ + Please use + + HostLibrary('foo') + + instead of + + HOST_LIBRARY_NAME = 'foo' + """, + "HOST_SIMPLE_PROGRAMS": """ + Please use + + HostSimplePrograms(['foo', 'bar']) + + instead of + + HOST_SIMPLE_PROGRAMS += ['foo', 'bar']" + """, + "LIBRARY_NAME": """ + Please use + + Library('foo') + + instead of + + LIBRARY_NAME = 'foo' + """, + "NO_VISIBILITY_FLAGS": """ + Please use + + NoVisibilityFlags() + + instead of + + NO_VISIBILITY_FLAGS = True + """, + "PROGRAM": """ + Please use + + Program('foo') + + instead of + + PROGRAM = 'foo'" + """, + "SIMPLE_PROGRAMS": """ + Please use + + SimplePrograms(['foo', 'bar']) + + instead of + + SIMPLE_PROGRAMS += ['foo', 'bar']" + """, + "ALLOW_COMPILER_WARNINGS": """ + Please use + + AllowCompilerWarnings() + + instead of + + ALLOW_COMPILER_WARNINGS = True + """, + "FORCE_SHARED_LIB": """ + Please use + + SharedLibrary('foo') + + instead of + + Library('foo') [ or LIBRARY_NAME = 'foo' ] + FORCE_SHARED_LIB = True + """, + "IS_FRAMEWORK": """ + Please use + + Framework('foo') + + instead of + + Library('foo') [ or LIBRARY_NAME = 'foo' ] + IS_FRAMEWORK = True + """, + "IS_GKRUST": """ + Please use + + RustLibrary('gkrust', ... is_gkrust=True) + + instead of + + RustLibrary('gkrust') [ or LIBRARY_NAME = 'gkrust' ] + IS_GKRUST = True + """, + "TOOL_DIRS": "Please use the DIRS variable instead.", + "TEST_TOOL_DIRS": "Please use the TEST_DIRS variable instead.", + "PARALLEL_DIRS": "Please use the DIRS variable instead.", + "NO_DIST_INSTALL": """ + Please use + + DIST_INSTALL = False + + instead of + + NO_DIST_INSTALL = True + """, + "GENERATED_SOURCES": """ + Please use + + SOURCES += [ '!foo.cpp' ] + + instead of + + GENERATED_SOURCES += [ 'foo.cpp'] + """, + "GENERATED_INCLUDES": """ + Please use + + LOCAL_INCLUDES += [ '!foo' ] + + instead of + + GENERATED_INCLUDES += [ 'foo' ] + """, + "DIST_FILES": """ + Please use + + FINAL_TARGET_PP_FILES += [ 'foo' ] + + instead of + + DIST_FILES += [ 'foo' ] + """, +} + +# Make sure that all template variables have a deprecation hint. +for name in TEMPLATE_VARIABLES: + if name not in DEPRECATION_HINTS: + raise RuntimeError("Missing deprecation hint for %s" % name) diff --git a/python/mozbuild/mozbuild/frontend/data.py b/python/mozbuild/mozbuild/frontend/data.py new file mode 100644 index 0000000000..84a47f90cf --- /dev/null +++ b/python/mozbuild/mozbuild/frontend/data.py @@ -0,0 +1,1369 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +r"""Data structures representing Mozilla's source tree. + +The frontend files are parsed into static data structures. These data +structures are defined in this module. + +All data structures of interest are children of the TreeMetadata class. + +Logic for populating these data structures is not defined in this class. +Instead, what we have here are dumb container classes. The emitter module +contains the code for converting executed mozbuild files into these data +structures. +""" + +from collections import OrderedDict, defaultdict + +import mozpack.path as mozpath +import six +from mozpack.chrome.manifest import ManifestEntry + +from mozbuild.frontend.context import ObjDirPath, SourcePath + +from ..testing import all_test_flavors +from ..util import group_unified_files +from .context import FinalTargetValue + + +class TreeMetadata(object): + """Base class for all data being captured.""" + + __slots__ = () + + def to_dict(self): + return {k.lower(): getattr(self, k) for k in self.DICT_ATTRS} + + +class ContextDerived(TreeMetadata): + """Build object derived from a single Context instance. + + It holds fields common to all context derived classes. This class is likely + never instantiated directly but is instead derived from. + """ + + __slots__ = ( + "context_main_path", + "context_all_paths", + "topsrcdir", + "topobjdir", + "relsrcdir", + "srcdir", + "objdir", + "config", + "_context", + ) + + def __init__(self, context): + TreeMetadata.__init__(self) + + # Capture the files that were evaluated to fill this context. + self.context_main_path = context.main_path + self.context_all_paths = context.all_paths + + # Basic directory state. + self.topsrcdir = context.config.topsrcdir + self.topobjdir = context.config.topobjdir + + self.relsrcdir = context.relsrcdir + self.srcdir = context.srcdir + self.objdir = context.objdir + + self.config = context.config + + self._context = context + + @property + def install_target(self): + return self._context["FINAL_TARGET"] + + @property + def installed(self): + return self._context["DIST_INSTALL"] is not False + + @property + def defines(self): + defines = self._context["DEFINES"] + return Defines(self._context, defines) if defines else None + + @property + def relobjdir(self): + return mozpath.relpath(self.objdir, self.topobjdir) + + +class HostMixin(object): + @property + def defines(self): + defines = self._context["HOST_DEFINES"] + return HostDefines(self._context, defines) if defines else None + + +class DirectoryTraversal(ContextDerived): + """Describes how directory traversal for building should work. + + This build object is likely only of interest to the recursive make backend. + Other build backends should (ideally) not attempt to mimic the behavior of + the recursive make backend. The only reason this exists is to support the + existing recursive make backend while the transition to mozbuild frontend + files is complete and we move to a more optimal build backend. + + Fields in this class correspond to similarly named variables in the + frontend files. + """ + + __slots__ = ("dirs",) + + def __init__(self, context): + ContextDerived.__init__(self, context) + + self.dirs = [] + + +class BaseConfigSubstitution(ContextDerived): + """Base class describing autogenerated files as part of config.status.""" + + __slots__ = ("input_path", "output_path", "relpath") + + def __init__(self, context): + ContextDerived.__init__(self, context) + + self.input_path = None + self.output_path = None + self.relpath = None + + +class ConfigFileSubstitution(BaseConfigSubstitution): + """Describes a config file that will be generated using substitutions.""" + + +class VariablePassthru(ContextDerived): + """A dict of variables to pass through to backend.mk unaltered. + + The purpose of this object is to facilitate rapid transitioning of + variables from Makefile.in to moz.build. In the ideal world, this class + does not exist and every variable has a richer class representing it. + As long as we rely on this class, we lose the ability to have flexibility + in our build backends since we will continue to be tied to our rules.mk. + """ + + __slots__ = "variables" + + def __init__(self, context): + ContextDerived.__init__(self, context) + self.variables = {} + + +class ComputedFlags(ContextDerived): + """Aggregate flags for consumption by various backends.""" + + __slots__ = ("flags",) + + def __init__(self, context, reader_flags): + ContextDerived.__init__(self, context) + self.flags = reader_flags + + def resolve_flags(self, key, value): + # Bypass checks done by CompileFlags that would keep us from + # setting a value here. + dict.__setitem__(self.flags, key, value) + + def get_flags(self): + flags = defaultdict(list) + for key, _, dest_vars in self.flags.flag_variables: + value = self.flags.get(key) + if value: + for dest_var in dest_vars: + flags[dest_var].extend(value) + return sorted(flags.items()) + + +class XPIDLModule(ContextDerived): + """Describes an XPIDL module to be compiled.""" + + __slots__ = ("name", "idl_files") + + def __init__(self, context, name, idl_files): + ContextDerived.__init__(self, context) + + assert all(isinstance(idl, SourcePath) for idl in idl_files) + self.name = name + self.idl_files = idl_files + + +class BaseDefines(ContextDerived): + """Context derived container object for DEFINES/HOST_DEFINES, + which are OrderedDicts. + """ + + __slots__ = "defines" + + def __init__(self, context, defines): + ContextDerived.__init__(self, context) + self.defines = defines + + def get_defines(self): + for define, value in six.iteritems(self.defines): + if value is True: + yield ("-D%s" % define) + elif value is False: + yield ("-U%s" % define) + else: + yield ("-D%s=%s" % (define, value)) + + def update(self, more_defines): + if isinstance(more_defines, Defines): + self.defines.update(more_defines.defines) + else: + self.defines.update(more_defines) + + +class Defines(BaseDefines): + pass + + +class HostDefines(BaseDefines): + pass + + +class WasmDefines(BaseDefines): + pass + + +class WebIDLCollection(ContextDerived): + """Collects WebIDL info referenced during the build.""" + + def __init__(self, context): + ContextDerived.__init__(self, context) + self.sources = set() + self.generated_sources = set() + self.generated_events_sources = set() + self.preprocessed_sources = set() + self.test_sources = set() + self.preprocessed_test_sources = set() + self.example_interfaces = set() + + def all_regular_sources(self): + return ( + self.sources + | self.generated_sources + | self.generated_events_sources + | self.preprocessed_sources + ) + + def all_regular_basenames(self): + return [mozpath.basename(source) for source in self.all_regular_sources()] + + def all_regular_stems(self): + return [mozpath.splitext(b)[0] for b in self.all_regular_basenames()] + + def all_regular_bindinggen_stems(self): + for stem in self.all_regular_stems(): + yield "%sBinding" % stem + + for source in self.generated_events_sources: + yield mozpath.splitext(mozpath.basename(source))[0] + + def all_regular_cpp_basenames(self): + for stem in self.all_regular_bindinggen_stems(): + yield "%s.cpp" % stem + + def all_test_sources(self): + return self.test_sources | self.preprocessed_test_sources + + def all_test_basenames(self): + return [mozpath.basename(source) for source in self.all_test_sources()] + + def all_test_stems(self): + return [mozpath.splitext(b)[0] for b in self.all_test_basenames()] + + def all_test_cpp_basenames(self): + return sorted("%sBinding.cpp" % s for s in self.all_test_stems()) + + def all_static_sources(self): + return self.sources | self.generated_events_sources | self.test_sources + + def all_non_static_sources(self): + return self.generated_sources | self.all_preprocessed_sources() + + def all_non_static_basenames(self): + return [mozpath.basename(s) for s in self.all_non_static_sources()] + + def all_preprocessed_sources(self): + return self.preprocessed_sources | self.preprocessed_test_sources + + def all_sources(self): + return set(self.all_regular_sources()) | set(self.all_test_sources()) + + def all_basenames(self): + return [mozpath.basename(source) for source in self.all_sources()] + + def all_stems(self): + return [mozpath.splitext(b)[0] for b in self.all_basenames()] + + def generated_events_basenames(self): + return [mozpath.basename(s) for s in self.generated_events_sources] + + def generated_events_stems(self): + return [mozpath.splitext(b)[0] for b in self.generated_events_basenames()] + + @property + def unified_source_mapping(self): + # Bindings are compiled in unified mode to speed up compilation and + # to reduce linker memory size. Note that test bindings are separated + # from regular ones so tests bindings aren't shipped. + return list( + group_unified_files( + sorted(self.all_regular_cpp_basenames()), + unified_prefix="UnifiedBindings", + unified_suffix="cpp", + files_per_unified_file=32, + ) + ) + + def all_source_files(self): + from mozwebidlcodegen import WebIDLCodegenManager + + return sorted(list(WebIDLCodegenManager.GLOBAL_DEFINE_FILES)) + sorted( + set(p for p, _ in self.unified_source_mapping) + ) + + +class IPDLCollection(ContextDerived): + """Collects IPDL files during the build.""" + + def __init__(self, context): + ContextDerived.__init__(self, context) + self.sources = set() + self.preprocessed_sources = set() + + def all_sources(self): + return self.sources | self.preprocessed_sources + + def all_regular_sources(self): + return self.sources + + def all_preprocessed_sources(self): + return self.preprocessed_sources + + def all_source_files(self): + # Source files generated by IPDL are built as generated UnifiedSources + # from the context which included the IPDL file, rather than the context + # which builds the IPDLCollection, so we report no files here. + return [] + + +class XPCOMComponentManifests(ContextDerived): + """Collects XPCOM manifest files during the build.""" + + def __init__(self, context): + ContextDerived.__init__(self, context) + self.manifests = set() + + def all_sources(self): + return self.manifests + + def all_source_files(self): + return [] + + +class LinkageWrongKindError(Exception): + """Error thrown when trying to link objects of the wrong kind""" + + +class Linkable(ContextDerived): + """Generic context derived container object for programs and libraries""" + + __slots__ = ( + "cxx_link", + "lib_defines", + "linked_libraries", + "linked_system_libs", + "sources", + ) + + def __init__(self, context): + ContextDerived.__init__(self, context) + self.cxx_link = False + self.linked_libraries = [] + self.linked_system_libs = [] + self.lib_defines = Defines(context, OrderedDict()) + self.sources = defaultdict(list) + + def link_library(self, obj): + assert isinstance(obj, BaseLibrary) + if obj.KIND != self.KIND: + raise LinkageWrongKindError("%s != %s" % (obj.KIND, self.KIND)) + self.linked_libraries.append(obj) + if obj.cxx_link and not isinstance(obj, SharedLibrary): + self.cxx_link = True + obj.refs.append(self) + + def link_system_library(self, lib): + # The '$' check is here as a special temporary rule, allowing the + # inherited use of make variables, most notably in TK_LIBS. + if not lib.startswith("$") and not lib.startswith("-"): + type_var = "HOST_CC_TYPE" if self.KIND == "host" else "CC_TYPE" + compiler_type = self.config.substs.get(type_var) + if compiler_type in ("gcc", "clang"): + lib = "-l%s" % lib + elif self.KIND == "host": + lib = "%s%s%s" % ( + self.config.host_import_prefix, + lib, + self.config.host_import_suffix, + ) + else: + lib = "%s%s%s" % ( + self.config.import_prefix, + lib, + self.config.import_suffix, + ) + self.linked_system_libs.append(lib) + + def source_files(self): + all_sources = [] + # This is ordered for reproducibility and consistently w/ + # config/rules.mk + for suffix in (".c", ".S", ".cpp", ".m", ".mm", ".s"): + all_sources += self.sources.get(suffix, []) + return all_sources + + def _get_objs(self, sources): + obj_prefix = "" + if self.KIND == "host": + obj_prefix = "host_" + + return [ + mozpath.join( + self.objdir, + "%s%s.%s" + % ( + obj_prefix, + mozpath.splitext(mozpath.basename(f))[0], + self._obj_suffix(), + ), + ) + for f in sources + ] + + def _obj_suffix(self): + """Can be overridden by a base class for custom behavior.""" + return self.config.substs.get("OBJ_SUFFIX", "") + + @property + def objs(self): + return self._get_objs(self.source_files()) + + +class BaseProgram(Linkable): + """Context derived container object for programs, which is a unicode + string. + + This class handles automatically appending a binary suffix to the program + name. + If the suffix is not defined, the program name is unchanged. + Otherwise, if the program name ends with the given suffix, it is unchanged + Otherwise, the suffix is appended to the program name. + """ + + __slots__ = "program" + + DICT_ATTRS = {"install_target", "KIND", "program", "relobjdir"} + + def __init__(self, context, program, is_unit_test=False): + Linkable.__init__(self, context) + + bin_suffix = context.config.substs.get(self.SUFFIX_VAR, "") + if not program.endswith(bin_suffix): + program += bin_suffix + self.program = program + self.is_unit_test = is_unit_test + + @property + def output_path(self): + if self.installed: + return ObjDirPath( + self._context, "!/" + mozpath.join(self.install_target, self.program) + ) + else: + return ObjDirPath(self._context, "!" + self.program) + + def __repr__(self): + return "<%s: %s/%s>" % (type(self).__name__, self.relobjdir, self.program) + + @property + def name(self): + return self.program + + +class Program(BaseProgram): + """Context derived container object for PROGRAM""" + + SUFFIX_VAR = "BIN_SUFFIX" + KIND = "target" + + +class HostProgram(HostMixin, BaseProgram): + """Context derived container object for HOST_PROGRAM""" + + SUFFIX_VAR = "HOST_BIN_SUFFIX" + KIND = "host" + + @property + def install_target(self): + return "dist/host/bin" + + +class SimpleProgram(BaseProgram): + """Context derived container object for each program in SIMPLE_PROGRAMS""" + + SUFFIX_VAR = "BIN_SUFFIX" + KIND = "target" + + def source_files(self): + for srcs in self.sources.values(): + for f in srcs: + if ( + mozpath.basename(mozpath.splitext(f)[0]) + == mozpath.splitext(self.program)[0] + ): + return [f] + return [] + + +class HostSimpleProgram(HostMixin, BaseProgram): + """Context derived container object for each program in + HOST_SIMPLE_PROGRAMS""" + + SUFFIX_VAR = "HOST_BIN_SUFFIX" + KIND = "host" + + def source_files(self): + for srcs in self.sources.values(): + for f in srcs: + if ( + "host_%s" % mozpath.basename(mozpath.splitext(f)[0]) + == mozpath.splitext(self.program)[0] + ): + return [f] + return [] + + +def cargo_output_directory(context, target_var): + # cargo creates several directories and places its build artifacts + # in those directories. The directory structure depends not only + # on the target, but also what sort of build we are doing. + rust_build_kind = "release" + if context.config.substs.get("MOZ_DEBUG_RUST"): + rust_build_kind = "debug" + return mozpath.join(context.config.substs[target_var], rust_build_kind) + + +# Rust programs aren't really Linkable, since Cargo handles all the details +# of linking things. +class BaseRustProgram(ContextDerived): + __slots__ = ( + "name", + "cargo_file", + "location", + "SUFFIX_VAR", + "KIND", + "TARGET_SUBST_VAR", + ) + + def __init__(self, context, name, cargo_file): + ContextDerived.__init__(self, context) + self.name = name + self.cargo_file = cargo_file + # Skip setting properties below which depend on cargo + # when we don't have a compile environment. The required + # config keys won't be available, but the instance variables + # that we don't set should never be accessed by the actual + # build in that case. + if not context.config.substs.get("COMPILE_ENVIRONMENT"): + return + cargo_dir = cargo_output_directory(context, self.TARGET_SUBST_VAR) + exe_file = "%s%s" % (name, context.config.substs.get(self.SUFFIX_VAR, "")) + self.location = mozpath.join(cargo_dir, exe_file) + + +class RustProgram(BaseRustProgram): + SUFFIX_VAR = "BIN_SUFFIX" + KIND = "target" + TARGET_SUBST_VAR = "RUST_TARGET" + + +class HostRustProgram(BaseRustProgram): + SUFFIX_VAR = "HOST_BIN_SUFFIX" + KIND = "host" + TARGET_SUBST_VAR = "RUST_HOST_TARGET" + + +class RustTests(ContextDerived): + __slots__ = ("names", "features", "output_category") + + def __init__(self, context, names, features): + ContextDerived.__init__(self, context) + self.names = names + self.features = features + self.output_category = "rusttests" + + +class BaseLibrary(Linkable): + """Generic context derived container object for libraries.""" + + __slots__ = ("basename", "lib_name", "import_name", "refs") + + def __init__(self, context, basename): + Linkable.__init__(self, context) + + self.basename = self.lib_name = basename + if self.lib_name: + self.lib_name = "%s%s%s" % ( + context.config.lib_prefix, + self.lib_name, + context.config.lib_suffix, + ) + self.import_name = self.lib_name + + self.refs = [] + + def __repr__(self): + return "<%s: %s/%s>" % (type(self).__name__, self.relobjdir, self.lib_name) + + @property + def name(self): + return self.lib_name + + +class Library(BaseLibrary): + """Context derived container object for a library""" + + KIND = "target" + __slots__ = () + + def __init__(self, context, basename, real_name=None): + BaseLibrary.__init__(self, context, real_name or basename) + self.basename = basename + + +class StaticLibrary(Library): + """Context derived container object for a static library""" + + __slots__ = ("link_into", "no_expand_lib") + + def __init__( + self, context, basename, real_name=None, link_into=None, no_expand_lib=False + ): + Library.__init__(self, context, basename, real_name) + self.link_into = link_into + self.no_expand_lib = no_expand_lib + + +class SandboxedWasmLibrary(Library): + """Context derived container object for a static sandboxed wasm library""" + + # This is a real static library; make it known to the build system. + no_expand_lib = True + KIND = "wasm" + + def __init__(self, context, basename, real_name=None): + Library.__init__(self, context, basename, real_name) + + # Wasm libraries are not going to compile unless we have a compiler + # for them. + assert context.config.substs["WASM_CC"] and context.config.substs["WASM_CXX"] + + self.lib_name = "%s%s%s" % ( + context.config.dll_prefix, + real_name or basename, + context.config.dll_suffix, + ) + + def _obj_suffix(self): + """Can be overridden by a base class for custom behavior.""" + return self.config.substs.get("WASM_OBJ_SUFFIX", "") + + +class BaseRustLibrary(object): + slots = ( + "cargo_file", + "crate_type", + "dependencies", + "deps_path", + "features", + "output_category", + "is_gkrust", + ) + + def init( + self, + context, + basename, + cargo_file, + crate_type, + dependencies, + features, + is_gkrust, + ): + self.is_gkrust = is_gkrust + self.cargo_file = cargo_file + self.crate_type = crate_type + # We need to adjust our naming here because cargo replaces '-' in + # package names defined in Cargo.toml with underscores in actual + # filenames. But we need to keep the basename consistent because + # many other things in the build system depend on that. + assert self.crate_type == "staticlib" + self.lib_name = "%s%s%s" % ( + context.config.lib_prefix, + basename.replace("-", "_"), + context.config.lib_suffix, + ) + self.dependencies = dependencies + self.features = features + self.output_category = context.get("RUST_LIBRARY_OUTPUT_CATEGORY") + # Skip setting properties below which depend on cargo + # when we don't have a compile environment. The required + # config keys won't be available, but the instance variables + # that we don't set should never be accessed by the actual + # build in that case. + if not context.config.substs.get("COMPILE_ENVIRONMENT"): + return + build_dir = mozpath.join( + context.config.topobjdir, + cargo_output_directory(context, self.TARGET_SUBST_VAR), + ) + self.import_name = mozpath.join(build_dir, self.lib_name) + self.deps_path = mozpath.join(build_dir, "deps") + + +class RustLibrary(StaticLibrary, BaseRustLibrary): + """Context derived container object for a rust static library""" + + KIND = "target" + TARGET_SUBST_VAR = "RUST_TARGET" + FEATURES_VAR = "RUST_LIBRARY_FEATURES" + LIB_FILE_VAR = "RUST_LIBRARY_FILE" + __slots__ = BaseRustLibrary.slots + + def __init__( + self, + context, + basename, + cargo_file, + crate_type, + dependencies, + features, + is_gkrust=False, + link_into=None, + ): + StaticLibrary.__init__( + self, + context, + basename, + link_into=link_into, + # A rust library is a real static library ; make + # it known to the build system. + no_expand_lib=True, + ) + BaseRustLibrary.init( + self, + context, + basename, + cargo_file, + crate_type, + dependencies, + features, + is_gkrust, + ) + + +class SharedLibrary(Library): + """Context derived container object for a shared library""" + + __slots__ = ( + "soname", + "variant", + "symbols_file", + "output_category", + "symbols_link_arg", + ) + + DICT_ATTRS = { + "basename", + "import_name", + "install_target", + "lib_name", + "relobjdir", + "soname", + } + + FRAMEWORK = 1 + MAX_VARIANT = 2 + + def __init__( + self, + context, + basename, + real_name=None, + soname=None, + variant=None, + symbols_file=False, + ): + assert variant in range(1, self.MAX_VARIANT) or variant is None + Library.__init__(self, context, basename, real_name) + self.variant = variant + self.lib_name = real_name or basename + self.output_category = context.get("SHARED_LIBRARY_OUTPUT_CATEGORY") + assert self.lib_name + + if variant == self.FRAMEWORK: + self.import_name = self.lib_name + else: + self.import_name = "%s%s%s" % ( + context.config.import_prefix, + self.lib_name, + context.config.import_suffix, + ) + self.lib_name = "%s%s%s" % ( + context.config.dll_prefix, + self.lib_name, + context.config.dll_suffix, + ) + if soname: + self.soname = "%s%s%s" % ( + context.config.dll_prefix, + soname, + context.config.dll_suffix, + ) + else: + self.soname = self.lib_name + + if symbols_file is False: + # No symbols file. + self.symbols_file = None + elif symbols_file is True: + # Symbols file with default name. + if context.config.substs["OS_TARGET"] == "WINNT": + self.symbols_file = "%s.def" % self.lib_name + else: + self.symbols_file = "%s.symbols" % self.lib_name + else: + # Explicitly provided name. + self.symbols_file = symbols_file + + if self.symbols_file: + os_target = context.config.substs["OS_TARGET"] + if os_target == "Darwin": + self.symbols_link_arg = ( + "-Wl,-exported_symbols_list," + self.symbols_file + ) + elif os_target == "SunOS": + self.symbols_link_arg = ( + "-z gnu-version-script-compat -Wl,--version-script," + + self.symbols_file + ) + elif os_target == "WINNT": + if context.config.substs.get("GNU_CC"): + self.symbols_link_arg = self.symbols_file + else: + self.symbols_link_arg = "-DEF:" + self.symbols_file + elif context.config.substs.get("GCC_USE_GNU_LD"): + self.symbols_link_arg = "-Wl,--version-script," + self.symbols_file + + +class HostSharedLibrary(HostMixin, Library): + """Context derived container object for a host shared library. + + This class supports less things than SharedLibrary does for target shared + libraries. Currently has enough build system support to build the clang + plugin.""" + + KIND = "host" + + def __init__(self, context, basename): + Library.__init__(self, context, basename) + self.lib_name = "%s%s%s" % ( + context.config.host_dll_prefix, + self.basename, + context.config.host_dll_suffix, + ) + + +class ExternalLibrary(object): + """Empty mixin for libraries built by an external build system.""" + + +class ExternalStaticLibrary(StaticLibrary, ExternalLibrary): + """Context derived container for static libraries built by an external + build system.""" + + +class ExternalSharedLibrary(SharedLibrary, ExternalLibrary): + """Context derived container for shared libraries built by an external + build system.""" + + +class HostLibrary(HostMixin, BaseLibrary): + """Context derived container object for a host library""" + + KIND = "host" + no_expand_lib = False + + +class HostRustLibrary(HostLibrary, BaseRustLibrary): + """Context derived container object for a host rust library""" + + KIND = "host" + TARGET_SUBST_VAR = "RUST_HOST_TARGET" + FEATURES_VAR = "HOST_RUST_LIBRARY_FEATURES" + LIB_FILE_VAR = "HOST_RUST_LIBRARY_FILE" + __slots__ = BaseRustLibrary.slots + no_expand_lib = True + + def __init__( + self, + context, + basename, + cargo_file, + crate_type, + dependencies, + features, + is_gkrust, + ): + HostLibrary.__init__(self, context, basename) + BaseRustLibrary.init( + self, + context, + basename, + cargo_file, + crate_type, + dependencies, + features, + is_gkrust, + ) + + +class TestManifest(ContextDerived): + """Represents a manifest file containing information about tests.""" + + __slots__ = ( + # The type of test manifest this is. + "flavor", + # Maps source filename to destination filename. The destination + # path is relative from the tests root directory. Values are 2-tuples + # of (destpath, is_test_file) where the 2nd item is True if this + # item represents a test file (versus a support file). + "installs", + # A list of pattern matching installs to perform. Entries are + # (base, pattern, dest). + "pattern_installs", + # Where all files for this manifest flavor are installed in the unified + # test package directory. + "install_prefix", + # Set of files provided by an external mechanism. + "external_installs", + # Set of files required by multiple test directories, whose installation + # will be resolved when running tests. + "deferred_installs", + # The full path of this manifest file. + "path", + # The directory where this manifest is defined. + "directory", + # The parsed manifestparser.TestManifest instance. + "manifest", + # List of tests. Each element is a dict of metadata. + "tests", + # The relative path of the parsed manifest within the srcdir. + "manifest_relpath", + # The relative path of the parsed manifest within the objdir. + "manifest_obj_relpath", + # The relative paths to all source files for this manifest. + "source_relpaths", + # If this manifest is a duplicate of another one, this is the + # manifestparser.TestManifest of the other one. + "dupe_manifest", + ) + + def __init__( + self, + context, + path, + manifest, + flavor=None, + install_prefix=None, + relpath=None, + sources=(), + dupe_manifest=False, + ): + ContextDerived.__init__(self, context) + + assert flavor in all_test_flavors() + + self.path = path + self.directory = mozpath.dirname(path) + self.manifest = manifest + self.flavor = flavor + self.install_prefix = install_prefix + self.manifest_relpath = relpath + self.manifest_obj_relpath = relpath + self.source_relpaths = sources + self.dupe_manifest = dupe_manifest + self.installs = {} + self.pattern_installs = [] + self.tests = [] + self.external_installs = set() + self.deferred_installs = set() + + +class LocalInclude(ContextDerived): + """Describes an individual local include path.""" + + __slots__ = ("path",) + + def __init__(self, context, path): + ContextDerived.__init__(self, context) + + self.path = path + + +class PerSourceFlag(ContextDerived): + """Describes compiler flags specified for individual source files.""" + + __slots__ = ("file_name", "flags") + + def __init__(self, context, file_name, flags): + ContextDerived.__init__(self, context) + + self.file_name = file_name + self.flags = flags + + +class JARManifest(ContextDerived): + """Describes an individual JAR manifest file and how to process it. + + This class isn't very useful for optimizing backends yet because we don't + capture defines. We can't capture defines safely until all of them are + defined in moz.build and not Makefile.in files. + """ + + __slots__ = ("path",) + + def __init__(self, context, path): + ContextDerived.__init__(self, context) + + self.path = path + + +class BaseSources(ContextDerived): + """Base class for files to be compiled during the build.""" + + __slots__ = ("files", "static_files", "generated_files", "canonical_suffix") + + def __init__(self, context, static_files, generated_files, canonical_suffix): + ContextDerived.__init__(self, context) + + # Sorted so output is consistent and we don't bump mtimes, but always + # order generated files after static ones to be consistent across build + # environments, which may have different objdir paths relative to + # topsrcdir. + self.static_files = sorted(static_files) + self.generated_files = sorted(generated_files) + self.files = self.static_files + self.generated_files + self.canonical_suffix = canonical_suffix + + +class Sources(BaseSources): + """Represents files to be compiled during the build.""" + + def __init__(self, context, static_files, generated_files, canonical_suffix): + BaseSources.__init__( + self, context, static_files, generated_files, canonical_suffix + ) + + +class PgoGenerateOnlySources(BaseSources): + """Represents files to be compiled during the build. + + These files are only used during the PGO generation phase.""" + + def __init__(self, context, files): + BaseSources.__init__(self, context, files, [], ".cpp") + + +class HostSources(HostMixin, BaseSources): + """Represents files to be compiled for the host during the build.""" + + def __init__(self, context, static_files, generated_files, canonical_suffix): + BaseSources.__init__( + self, context, static_files, generated_files, canonical_suffix + ) + + +class WasmSources(BaseSources): + """Represents files to be compiled with the wasm compiler during the build.""" + + def __init__(self, context, static_files, generated_files, canonical_suffix): + BaseSources.__init__( + self, context, static_files, generated_files, canonical_suffix + ) + + +class UnifiedSources(BaseSources): + """Represents files to be compiled in a unified fashion during the build.""" + + __slots__ = ("have_unified_mapping", "unified_source_mapping") + + def __init__(self, context, static_files, generated_files, canonical_suffix): + BaseSources.__init__( + self, context, static_files, generated_files, canonical_suffix + ) + + unified_build = context.config.substs.get("ENABLE_UNIFIED_BUILD", False) + files_per_unified_file = ( + context.get("FILES_PER_UNIFIED_FILE", 16) if unified_build else 1 + ) + + self.have_unified_mapping = files_per_unified_file > 1 + + if self.have_unified_mapping: + # On Windows, path names have a maximum length of 255 characters, + # so avoid creating extremely long path names. + unified_prefix = context.relsrcdir + if len(unified_prefix) > 20: + unified_prefix = unified_prefix[-20:].split("/", 1)[-1] + unified_prefix = unified_prefix.replace("/", "_") + + suffix = self.canonical_suffix[1:] + unified_prefix = "Unified_%s_%s" % (suffix, unified_prefix) + self.unified_source_mapping = list( + group_unified_files( + # NOTE: self.files is already (partially) sorted, and we + # intentionally do not re-sort it here to avoid a dependency + # on the build environment's objdir path. + self.files, + unified_prefix=unified_prefix, + unified_suffix=suffix, + files_per_unified_file=files_per_unified_file, + ) + ) + + +class InstallationTarget(ContextDerived): + """Describes the rules that affect where files get installed to.""" + + __slots__ = ("xpiname", "subdir", "target", "enabled") + + def __init__(self, context): + ContextDerived.__init__(self, context) + + self.xpiname = context.get("XPI_NAME", "") + self.subdir = context.get("DIST_SUBDIR", "") + self.target = context["FINAL_TARGET"] + self.enabled = context["DIST_INSTALL"] is not False + + def is_custom(self): + """Returns whether or not the target is not derived from the default + given xpiname and subdir.""" + + return ( + FinalTargetValue(dict(XPI_NAME=self.xpiname, DIST_SUBDIR=self.subdir)) + == self.target + ) + + +class FinalTargetFiles(ContextDerived): + """Sandbox container object for FINAL_TARGET_FILES, which is a + HierarchicalStringList. + + We need an object derived from ContextDerived for use in the backend, so + this object fills that role. It just has a reference to the underlying + HierarchicalStringList, which is created when parsing FINAL_TARGET_FILES. + """ + + __slots__ = "files" + + def __init__(self, sandbox, files): + ContextDerived.__init__(self, sandbox) + self.files = files + + +class FinalTargetPreprocessedFiles(ContextDerived): + """Sandbox container object for FINAL_TARGET_PP_FILES, which is a + HierarchicalStringList. + + We need an object derived from ContextDerived for use in the backend, so + this object fills that role. It just has a reference to the underlying + HierarchicalStringList, which is created when parsing + FINAL_TARGET_PP_FILES. + """ + + __slots__ = "files" + + def __init__(self, sandbox, files): + ContextDerived.__init__(self, sandbox) + self.files = files + + +class LocalizedFiles(FinalTargetFiles): + """Sandbox container object for LOCALIZED_FILES, which is a + HierarchicalStringList. + """ + + pass + + +class LocalizedPreprocessedFiles(FinalTargetPreprocessedFiles): + """Sandbox container object for LOCALIZED_PP_FILES, which is a + HierarchicalStringList. + """ + + pass + + +class ObjdirFiles(FinalTargetFiles): + """Sandbox container object for OBJDIR_FILES, which is a + HierarchicalStringList. + """ + + @property + def install_target(self): + return "" + + +class ObjdirPreprocessedFiles(FinalTargetPreprocessedFiles): + """Sandbox container object for OBJDIR_PP_FILES, which is a + HierarchicalStringList. + """ + + @property + def install_target(self): + return "" + + +class TestHarnessFiles(FinalTargetFiles): + """Sandbox container object for TEST_HARNESS_FILES, + which is a HierarchicalStringList. + """ + + @property + def install_target(self): + return "_tests" + + +class Exports(FinalTargetFiles): + """Context derived container object for EXPORTS, which is a + HierarchicalStringList. + + We need an object derived from ContextDerived for use in the backend, so + this object fills that role. It just has a reference to the underlying + HierarchicalStringList, which is created when parsing EXPORTS. + """ + + @property + def install_target(self): + return "dist/include" + + +class GeneratedFile(ContextDerived): + """Represents a generated file.""" + + __slots__ = ( + "script", + "method", + "outputs", + "inputs", + "flags", + "required_before_export", + "required_before_compile", + "required_during_compile", + "localized", + "force", + "py2", + ) + + def __init__( + self, + context, + script, + method, + outputs, + inputs, + flags=(), + localized=False, + force=False, + py2=False, + required_during_compile=None, + ): + ContextDerived.__init__(self, context) + self.script = script + self.method = method + self.outputs = outputs if isinstance(outputs, tuple) else (outputs,) + self.inputs = inputs + self.flags = flags + self.localized = localized + self.force = force + self.py2 = py2 + + if self.config.substs.get("MOZ_WIDGET_TOOLKIT") == "android": + # In GeckoView builds we process Jinja files during pre-export + self.required_before_export = [ + f for f in self.inputs if f.endswith(".jinja") + ] + else: + self.required_before_export = False + + suffixes = [ + ".h", + ".py", + ".rs", + # We need to compile Java to generate JNI wrappers for native code + # compilation to consume. + "android_apks", + ".profdata", + ".webidl", + ] + + try: + lib_suffix = context.config.substs["LIB_SUFFIX"] + suffixes.append("." + lib_suffix) + except KeyError: + # Tests may not define LIB_SUFFIX + pass + + suffixes = tuple(suffixes) + + self.required_before_compile = [ + f + for f in self.outputs + if f.endswith(suffixes) or "stl_wrappers/" in f or "xpidl.stub" in f + ] + + if required_during_compile is None: + self.required_during_compile = [ + f + for f in self.outputs + if f.endswith( + (".asm", ".c", ".cpp", ".inc", ".m", ".mm", ".def", "symverscript") + ) + ] + else: + self.required_during_compile = required_during_compile + + +class ChromeManifestEntry(ContextDerived): + """Represents a chrome.manifest entry.""" + + __slots__ = ("path", "entry") + + def __init__(self, context, manifest_path, entry): + ContextDerived.__init__(self, context) + assert isinstance(entry, ManifestEntry) + self.path = mozpath.join(self.install_target, manifest_path) + # Ensure the entry is relative to the directory containing the + # manifest path. + entry = entry.rebase(mozpath.dirname(manifest_path)) + # Then add the install_target to the entry base directory. + self.entry = entry.move(mozpath.dirname(self.path)) diff --git a/python/mozbuild/mozbuild/frontend/emitter.py b/python/mozbuild/mozbuild/frontend/emitter.py new file mode 100644 index 0000000000..8d62072421 --- /dev/null +++ b/python/mozbuild/mozbuild/frontend/emitter.py @@ -0,0 +1,1892 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import logging +import os +import sys +import time +import traceback +from collections import OrderedDict, defaultdict + +import mozinfo +import mozpack.path as mozpath +import six +import toml +from mach.mixin.logging import LoggingMixin +from mozpack.chrome.manifest import Manifest + +from mozbuild.base import ExecutionSummary +from mozbuild.util import OrderedDefaultDict, memoize + +from ..testing import REFTEST_FLAVORS, TEST_MANIFESTS, SupportFilesConverter +from .context import Context, ObjDirPath, Path, SourcePath, SubContext +from .data import ( + BaseRustProgram, + ChromeManifestEntry, + ComputedFlags, + ConfigFileSubstitution, + Defines, + DirectoryTraversal, + Exports, + ExternalSharedLibrary, + ExternalStaticLibrary, + FinalTargetFiles, + FinalTargetPreprocessedFiles, + GeneratedFile, + HostDefines, + HostLibrary, + HostProgram, + HostRustLibrary, + HostRustProgram, + HostSharedLibrary, + HostSimpleProgram, + HostSources, + InstallationTarget, + IPDLCollection, + JARManifest, + Library, + Linkable, + LocalInclude, + LocalizedFiles, + LocalizedPreprocessedFiles, + ObjdirFiles, + ObjdirPreprocessedFiles, + PerSourceFlag, + Program, + RustLibrary, + RustProgram, + RustTests, + SandboxedWasmLibrary, + SharedLibrary, + SimpleProgram, + Sources, + StaticLibrary, + TestHarnessFiles, + TestManifest, + UnifiedSources, + VariablePassthru, + WasmDefines, + WasmSources, + WebIDLCollection, + XPCOMComponentManifests, + XPIDLModule, +) +from .reader import SandboxValidationError + + +class TreeMetadataEmitter(LoggingMixin): + """Converts the executed mozbuild files into data structures. + + This is a bridge between reader.py and data.py. It takes what was read by + reader.BuildReader and converts it into the classes defined in the data + module. + """ + + def __init__(self, config): + self.populate_logger() + + self.config = config + + mozinfo.find_and_update_from_json(config.topobjdir) + + self.info = dict(mozinfo.info) + + self._libs = OrderedDefaultDict(list) + self._binaries = OrderedDict() + self._compile_dirs = set() + self._host_compile_dirs = set() + self._wasm_compile_dirs = set() + self._asm_compile_dirs = set() + self._compile_flags = dict() + self._compile_as_flags = dict() + self._linkage = [] + self._static_linking_shared = set() + self._crate_verified_local = set() + self._crate_directories = dict() + self._idls = defaultdict(set) + + # Keep track of external paths (third party build systems), starting + # from what we run a subconfigure in. We'll eliminate some directories + # as we traverse them with moz.build (e.g. js/src). + subconfigures = os.path.join(self.config.topobjdir, "subconfigures") + paths = [] + if os.path.exists(subconfigures): + paths = open(subconfigures).read().splitlines() + self._external_paths = set(mozpath.normsep(d) for d in paths) + + self._emitter_time = 0.0 + self._object_count = 0 + self._test_files_converter = SupportFilesConverter() + + def summary(self): + return ExecutionSummary( + "Processed into {object_count:d} build config descriptors in " + "{execution_time:.2f}s", + execution_time=self._emitter_time, + object_count=self._object_count, + ) + + def emit(self, output, emitfn=None): + """Convert the BuildReader output into data structures. + + The return value from BuildReader.read_topsrcdir() (a generator) is + typically fed into this function. + """ + contexts = {} + emitfn = emitfn or self.emit_from_context + + def emit_objs(objs): + for o in objs: + self._object_count += 1 + yield o + + for out in output: + # Nothing in sub-contexts is currently of interest to us. Filter + # them all out. + if isinstance(out, SubContext): + continue + + if isinstance(out, Context): + # Keep all contexts around, we will need them later. + contexts[os.path.normcase(out.objdir)] = out + + start = time.monotonic() + # We need to expand the generator for the timings to work. + objs = list(emitfn(out)) + self._emitter_time += time.monotonic() - start + + for o in emit_objs(objs): + yield o + + else: + raise Exception("Unhandled output type: %s" % type(out)) + + # Don't emit Linkable objects when COMPILE_ENVIRONMENT is not set + if self.config.substs.get("COMPILE_ENVIRONMENT"): + start = time.monotonic() + objs = list(self._emit_libs_derived(contexts)) + self._emitter_time += time.monotonic() - start + + for o in emit_objs(objs): + yield o + + def _emit_libs_derived(self, contexts): + + # First aggregate idl sources. + webidl_attrs = [ + ("GENERATED_EVENTS_WEBIDL_FILES", lambda c: c.generated_events_sources), + ("GENERATED_WEBIDL_FILES", lambda c: c.generated_sources), + ("PREPROCESSED_TEST_WEBIDL_FILES", lambda c: c.preprocessed_test_sources), + ("PREPROCESSED_WEBIDL_FILES", lambda c: c.preprocessed_sources), + ("TEST_WEBIDL_FILES", lambda c: c.test_sources), + ("WEBIDL_FILES", lambda c: c.sources), + ("WEBIDL_EXAMPLE_INTERFACES", lambda c: c.example_interfaces), + ] + ipdl_attrs = [ + ("IPDL_SOURCES", lambda c: c.sources), + ("PREPROCESSED_IPDL_SOURCES", lambda c: c.preprocessed_sources), + ] + xpcom_attrs = [("XPCOM_MANIFESTS", lambda c: c.manifests)] + + idl_sources = {} + for root, cls, attrs in ( + (self.config.substs.get("WEBIDL_ROOT"), WebIDLCollection, webidl_attrs), + (self.config.substs.get("IPDL_ROOT"), IPDLCollection, ipdl_attrs), + ( + self.config.substs.get("XPCOM_ROOT"), + XPCOMComponentManifests, + xpcom_attrs, + ), + ): + if root: + collection = cls(contexts[os.path.normcase(root)]) + for var, src_getter in attrs: + src_getter(collection).update(self._idls[var]) + + idl_sources[root] = collection.all_source_files() + if isinstance(collection, WebIDLCollection): + # Test webidl sources are added here as a somewhat special + # case. + idl_sources[mozpath.join(root, "test")] = [ + s for s in collection.all_test_cpp_basenames() + ] + + yield collection + + # Next do FINAL_LIBRARY linkage. + for lib in (l for libs in self._libs.values() for l in libs): + if not isinstance(lib, (StaticLibrary, RustLibrary)) or not lib.link_into: + continue + if lib.link_into not in self._libs: + raise SandboxValidationError( + 'FINAL_LIBRARY ("%s") does not match any LIBRARY_NAME' + % lib.link_into, + contexts[os.path.normcase(lib.objdir)], + ) + candidates = self._libs[lib.link_into] + + # When there are multiple candidates, but all are in the same + # directory and have a different type, we want all of them to + # have the library linked. The typical usecase is when building + # both a static and a shared library in a directory, and having + # that as a FINAL_LIBRARY. + if ( + len(set(type(l) for l in candidates)) == len(candidates) + and len(set(l.objdir for l in candidates)) == 1 + ): + for c in candidates: + c.link_library(lib) + else: + raise SandboxValidationError( + 'FINAL_LIBRARY ("%s") matches a LIBRARY_NAME defined in ' + "multiple places:\n %s" + % (lib.link_into, "\n ".join(l.objdir for l in candidates)), + contexts[os.path.normcase(lib.objdir)], + ) + + # ...and USE_LIBS linkage. + for context, obj, variable in self._linkage: + self._link_libraries(context, obj, variable, idl_sources) + + def recurse_refs(lib): + for o in lib.refs: + yield o + if isinstance(o, StaticLibrary): + for q in recurse_refs(o): + yield q + + # Check that all static libraries refering shared libraries in + # USE_LIBS are linked into a shared library or program. + for lib in self._static_linking_shared: + if all(isinstance(o, StaticLibrary) for o in recurse_refs(lib)): + shared_libs = sorted( + l.basename + for l in lib.linked_libraries + if isinstance(l, SharedLibrary) + ) + raise SandboxValidationError( + 'The static "%s" library is not used in a shared library ' + "or a program, but USE_LIBS contains the following shared " + "library names:\n %s\n\nMaybe you can remove the " + 'static "%s" library?' + % (lib.basename, "\n ".join(shared_libs), lib.basename), + contexts[os.path.normcase(lib.objdir)], + ) + + @memoize + def rust_libraries(obj): + libs = [] + for o in obj.linked_libraries: + if isinstance(o, (HostRustLibrary, RustLibrary)): + libs.append(o) + elif isinstance(o, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)): + libs.extend(rust_libraries(o)) + return libs + + def check_rust_libraries(obj): + rust_libs = set(rust_libraries(obj)) + if len(rust_libs) <= 1: + return + if isinstance(obj, (Library, HostLibrary)): + what = '"%s" library' % obj.basename + else: + what = '"%s" program' % obj.name + raise SandboxValidationError( + "Cannot link the following Rust libraries into the %s:\n" + "%s\nOnly one is allowed." + % ( + what, + "\n".join( + " - %s" % r.basename + for r in sorted(rust_libs, key=lambda r: r.basename) + ), + ), + contexts[os.path.normcase(obj.objdir)], + ) + + # Propagate LIBRARY_DEFINES to all child libraries recursively. + def propagate_defines(outerlib, defines): + outerlib.lib_defines.update(defines) + for lib in outerlib.linked_libraries: + # Propagate defines only along FINAL_LIBRARY paths, not USE_LIBS + # paths. + if ( + isinstance(lib, StaticLibrary) + and lib.link_into == outerlib.basename + ): + propagate_defines(lib, defines) + + for lib in (l for libs in self._libs.values() for l in libs): + if isinstance(lib, Library): + propagate_defines(lib, lib.lib_defines) + check_rust_libraries(lib) + yield lib + + for lib in (l for libs in self._libs.values() for l in libs): + lib_defines = list(lib.lib_defines.get_defines()) + if lib_defines: + objdir_flags = self._compile_flags[lib.objdir] + objdir_flags.resolve_flags("LIBRARY_DEFINES", lib_defines) + + objdir_flags = self._compile_as_flags.get(lib.objdir) + if objdir_flags: + objdir_flags.resolve_flags("LIBRARY_DEFINES", lib_defines) + + for flags_obj in self._compile_flags.values(): + yield flags_obj + + for flags_obj in self._compile_as_flags.values(): + yield flags_obj + + for obj in self._binaries.values(): + if isinstance(obj, Linkable): + check_rust_libraries(obj) + yield obj + + LIBRARY_NAME_VAR = { + "host": "HOST_LIBRARY_NAME", + "target": "LIBRARY_NAME", + "wasm": "SANDBOXED_WASM_LIBRARY_NAME", + } + + ARCH_VAR = {"host": "HOST_OS_ARCH", "target": "OS_TARGET"} + + STDCXXCOMPAT_NAME = {"host": "host_stdc++compat", "target": "stdc++compat"} + + def _link_libraries(self, context, obj, variable, extra_sources): + """Add linkage declarations to a given object.""" + assert isinstance(obj, Linkable) + + if context.objdir in extra_sources: + # All "extra sources" are .cpp for the moment, and happen to come + # first in order. + obj.sources[".cpp"] = extra_sources[context.objdir] + obj.sources[".cpp"] + + for path in context.get(variable, []): + self._link_library(context, obj, variable, path) + + # Link system libraries from OS_LIBS/HOST_OS_LIBS. + for lib in context.get(variable.replace("USE", "OS"), []): + obj.link_system_library(lib) + + # We have to wait for all the self._link_library calls above to have + # happened for obj.cxx_link to be final. + # FIXME: Theoretically, HostSharedLibrary shouldn't be here (bug + # 1474022). + if ( + not isinstance( + obj, (StaticLibrary, HostLibrary, HostSharedLibrary, BaseRustProgram) + ) + and obj.cxx_link + ): + if ( + context.config.substs.get("MOZ_STDCXX_COMPAT") + and context.config.substs.get(self.ARCH_VAR.get(obj.KIND)) == "Linux" + ): + self._link_library( + context, obj, variable, self.STDCXXCOMPAT_NAME[obj.KIND] + ) + if obj.KIND == "target": + for lib in context.config.substs.get("STLPORT_LIBS", []): + obj.link_system_library(lib) + + def _link_library(self, context, obj, variable, path): + force_static = path.startswith("static:") and obj.KIND == "target" + if force_static: + path = path[7:] + name = mozpath.basename(path) + dir = mozpath.dirname(path) + candidates = [l for l in self._libs[name] if l.KIND == obj.KIND] + if dir: + if dir.startswith("/"): + dir = mozpath.normpath(mozpath.join(obj.topobjdir, dir[1:])) + else: + dir = mozpath.normpath(mozpath.join(obj.objdir, dir)) + dir = mozpath.relpath(dir, obj.topobjdir) + candidates = [l for l in candidates if l.relobjdir == dir] + if not candidates: + # If the given directory is under one of the external + # (third party) paths, use a fake library reference to + # there. + for d in self._external_paths: + if dir.startswith("%s/" % d): + candidates = [ + self._get_external_library(dir, name, force_static) + ] + break + + if not candidates: + raise SandboxValidationError( + '%s contains "%s", but there is no "%s" %s in %s.' + % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir), + context, + ) + + if len(candidates) > 1: + # If there's more than one remaining candidate, it could be + # that there are instances for the same library, in static and + # shared form. + libs = {} + for l in candidates: + key = mozpath.join(l.relobjdir, l.basename) + if force_static: + if isinstance(l, StaticLibrary): + libs[key] = l + else: + if key in libs and isinstance(l, SharedLibrary): + libs[key] = l + if key not in libs: + libs[key] = l + candidates = list(libs.values()) + if force_static and not candidates: + if dir: + raise SandboxValidationError( + '%s contains "static:%s", but there is no static ' + '"%s" %s in %s.' + % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir), + context, + ) + raise SandboxValidationError( + '%s contains "static:%s", but there is no static "%s" ' + "%s in the tree" + % (variable, name, name, self.LIBRARY_NAME_VAR[obj.KIND]), + context, + ) + + if not candidates: + raise SandboxValidationError( + '%s contains "%s", which does not match any %s in the tree.' + % (variable, path, self.LIBRARY_NAME_VAR[obj.KIND]), + context, + ) + + elif len(candidates) > 1: + paths = (mozpath.join(l.relsrcdir, "moz.build") for l in candidates) + raise SandboxValidationError( + '%s contains "%s", which matches a %s defined in multiple ' + "places:\n %s" + % ( + variable, + path, + self.LIBRARY_NAME_VAR[obj.KIND], + "\n ".join(paths), + ), + context, + ) + + elif force_static and not isinstance(candidates[0], StaticLibrary): + raise SandboxValidationError( + '%s contains "static:%s", but there is only a shared "%s" ' + "in %s. You may want to add FORCE_STATIC_LIB=True in " + '%s/moz.build, or remove "static:".' + % ( + variable, + path, + name, + candidates[0].relobjdir, + candidates[0].relobjdir, + ), + context, + ) + + elif isinstance(obj, StaticLibrary) and isinstance( + candidates[0], SharedLibrary + ): + self._static_linking_shared.add(obj) + obj.link_library(candidates[0]) + + @memoize + def _get_external_library(self, dir, name, force_static): + # Create ExternalStaticLibrary or ExternalSharedLibrary object with a + # context more or less truthful about where the external library is. + context = Context(config=self.config) + context.add_source(mozpath.join(self.config.topsrcdir, dir, "dummy")) + if force_static: + return ExternalStaticLibrary(context, name) + else: + return ExternalSharedLibrary(context, name) + + def _parse_cargo_file(self, context): + """Parse the Cargo.toml file in context and return a Python object + representation of it. Raise a SandboxValidationError if the Cargo.toml + file does not exist. Return a tuple of (config, cargo_file).""" + cargo_file = mozpath.join(context.srcdir, "Cargo.toml") + if not os.path.exists(cargo_file): + raise SandboxValidationError( + "No Cargo.toml file found in %s" % cargo_file, context + ) + with open(cargo_file, "r") as f: + return toml.load(f), cargo_file + + def _verify_deps( + self, context, crate_dir, crate_name, dependencies, description="Dependency" + ): + """Verify that a crate's dependencies all specify local paths.""" + for dep_crate_name, values in six.iteritems(dependencies): + # A simple version number. + if isinstance(values, (six.binary_type, six.text_type)): + raise SandboxValidationError( + "%s %s of crate %s does not list a path" + % (description, dep_crate_name, crate_name), + context, + ) + + dep_path = values.get("path", None) + if not dep_path: + raise SandboxValidationError( + "%s %s of crate %s does not list a path" + % (description, dep_crate_name, crate_name), + context, + ) + + # Try to catch the case where somebody listed a + # local path for development. + if os.path.isabs(dep_path): + raise SandboxValidationError( + "%s %s of crate %s has a non-relative path" + % (description, dep_crate_name, crate_name), + context, + ) + + if not os.path.exists( + mozpath.join(context.config.topsrcdir, crate_dir, dep_path) + ): + raise SandboxValidationError( + "%s %s of crate %s refers to a non-existent path" + % (description, dep_crate_name, crate_name), + context, + ) + + def _rust_library( + self, context, libname, static_args, is_gkrust=False, cls=RustLibrary + ): + # We need to note any Rust library for linking purposes. + config, cargo_file = self._parse_cargo_file(context) + crate_name = config["package"]["name"] + + if crate_name != libname: + raise SandboxValidationError( + "library %s does not match Cargo.toml-defined package %s" + % (libname, crate_name), + context, + ) + + # Check that the [lib.crate-type] field is correct + lib_section = config.get("lib", None) + if not lib_section: + raise SandboxValidationError( + "Cargo.toml for %s has no [lib] section" % libname, context + ) + + crate_type = lib_section.get("crate-type", None) + if not crate_type: + raise SandboxValidationError( + "Can't determine a crate-type for %s from Cargo.toml" % libname, context + ) + + crate_type = crate_type[0] + if crate_type != "staticlib": + raise SandboxValidationError( + "crate-type %s is not permitted for %s" % (crate_type, libname), context + ) + + dependencies = set(six.iterkeys(config.get("dependencies", {}))) + + features = context.get(cls.FEATURES_VAR, []) + unique_features = set(features) + if len(features) != len(unique_features): + raise SandboxValidationError( + "features for %s should not contain duplicates: %s" + % (libname, features), + context, + ) + + return cls( + context, + libname, + cargo_file, + crate_type, + dependencies, + features, + is_gkrust, + **static_args, + ) + + def _handle_linkables(self, context, passthru, generated_files): + linkables = [] + host_linkables = [] + wasm_linkables = [] + + def add_program(prog, var): + if var.startswith("HOST_"): + host_linkables.append(prog) + else: + linkables.append(prog) + + def check_unique_binary(program, kind): + if program in self._binaries: + raise SandboxValidationError( + 'Cannot use "%s" as %s name, ' + "because it is already used in %s" + % (program, kind, self._binaries[program].relsrcdir), + context, + ) + + for kind, cls in [("PROGRAM", Program), ("HOST_PROGRAM", HostProgram)]: + program = context.get(kind) + if program: + check_unique_binary(program, kind) + self._binaries[program] = cls(context, program) + self._linkage.append( + ( + context, + self._binaries[program], + kind.replace("PROGRAM", "USE_LIBS"), + ) + ) + add_program(self._binaries[program], kind) + + all_rust_programs = [] + for kind, cls in [ + ("RUST_PROGRAMS", RustProgram), + ("HOST_RUST_PROGRAMS", HostRustProgram), + ]: + programs = context[kind] + if not programs: + continue + + all_rust_programs.append((programs, kind, cls)) + + # Verify Rust program definitions. + if all_rust_programs: + config, cargo_file = self._parse_cargo_file(context) + bin_section = config.get("bin", None) + if not bin_section: + raise SandboxValidationError( + "Cargo.toml in %s has no [bin] section" % context.srcdir, context + ) + + defined_binaries = {b["name"] for b in bin_section} + + for programs, kind, cls in all_rust_programs: + for program in programs: + if program not in defined_binaries: + raise SandboxValidationError( + "Cannot find Cargo.toml definition for %s" % program, + context, + ) + + check_unique_binary(program, kind) + self._binaries[program] = cls(context, program, cargo_file) + add_program(self._binaries[program], kind) + + for kind, cls in [ + ("SIMPLE_PROGRAMS", SimpleProgram), + ("CPP_UNIT_TESTS", SimpleProgram), + ("HOST_SIMPLE_PROGRAMS", HostSimpleProgram), + ]: + for program in context[kind]: + if program in self._binaries: + raise SandboxValidationError( + 'Cannot use "%s" in %s, ' + "because it is already used in %s" + % (program, kind, self._binaries[program].relsrcdir), + context, + ) + self._binaries[program] = cls( + context, program, is_unit_test=kind == "CPP_UNIT_TESTS" + ) + self._linkage.append( + ( + context, + self._binaries[program], + "HOST_USE_LIBS" + if kind == "HOST_SIMPLE_PROGRAMS" + else "USE_LIBS", + ) + ) + add_program(self._binaries[program], kind) + + host_libname = context.get("HOST_LIBRARY_NAME") + libname = context.get("LIBRARY_NAME") + + if host_libname: + if host_libname == libname: + raise SandboxValidationError( + "LIBRARY_NAME and HOST_LIBRARY_NAME must have a different value", + context, + ) + + is_rust_library = context.get("IS_RUST_LIBRARY") + if is_rust_library: + lib = self._rust_library(context, host_libname, {}, cls=HostRustLibrary) + elif context.get("FORCE_SHARED_LIB"): + lib = HostSharedLibrary(context, host_libname) + else: + lib = HostLibrary(context, host_libname) + self._libs[host_libname].append(lib) + self._linkage.append((context, lib, "HOST_USE_LIBS")) + host_linkables.append(lib) + + final_lib = context.get("FINAL_LIBRARY") + if not libname and final_lib: + # If no LIBRARY_NAME is given, create one. + libname = context.relsrcdir.replace("/", "_") + + static_lib = context.get("FORCE_STATIC_LIB") + shared_lib = context.get("FORCE_SHARED_LIB") + + static_name = context.get("STATIC_LIBRARY_NAME") + shared_name = context.get("SHARED_LIBRARY_NAME") + + is_framework = context.get("IS_FRAMEWORK") + + soname = context.get("SONAME") + + lib_defines = context.get("LIBRARY_DEFINES") + + wasm_lib = context.get("SANDBOXED_WASM_LIBRARY_NAME") + + shared_args = {} + static_args = {} + + if final_lib: + if static_lib: + raise SandboxValidationError( + "FINAL_LIBRARY implies FORCE_STATIC_LIB. " + "Please remove the latter.", + context, + ) + if shared_lib: + raise SandboxValidationError( + "FINAL_LIBRARY conflicts with FORCE_SHARED_LIB. " + "Please remove one.", + context, + ) + if is_framework: + raise SandboxValidationError( + "FINAL_LIBRARY conflicts with IS_FRAMEWORK. " "Please remove one.", + context, + ) + static_args["link_into"] = final_lib + static_lib = True + + if libname: + if is_framework: + if soname: + raise SandboxValidationError( + "IS_FRAMEWORK conflicts with SONAME. " "Please remove one.", + context, + ) + shared_lib = True + shared_args["variant"] = SharedLibrary.FRAMEWORK + + if not static_lib and not shared_lib: + static_lib = True + + if static_name: + if not static_lib: + raise SandboxValidationError( + "STATIC_LIBRARY_NAME requires FORCE_STATIC_LIB", context + ) + static_args["real_name"] = static_name + + if shared_name: + if not shared_lib: + raise SandboxValidationError( + "SHARED_LIBRARY_NAME requires FORCE_SHARED_LIB", context + ) + shared_args["real_name"] = shared_name + + if soname: + if not shared_lib: + raise SandboxValidationError( + "SONAME requires FORCE_SHARED_LIB", context + ) + shared_args["soname"] = soname + + if context.get("NO_EXPAND_LIBS"): + if not static_lib: + raise SandboxValidationError( + "NO_EXPAND_LIBS can only be set for static libraries.", context + ) + static_args["no_expand_lib"] = True + + if shared_lib and static_lib: + if not static_name and not shared_name: + raise SandboxValidationError( + "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, " + "but neither STATIC_LIBRARY_NAME or " + "SHARED_LIBRARY_NAME is set. At least one is required.", + context, + ) + if static_name and not shared_name and static_name == libname: + raise SandboxValidationError( + "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, " + "but STATIC_LIBRARY_NAME is the same as LIBRARY_NAME, " + "and SHARED_LIBRARY_NAME is unset. Please either " + "change STATIC_LIBRARY_NAME or LIBRARY_NAME, or set " + "SHARED_LIBRARY_NAME.", + context, + ) + if shared_name and not static_name and shared_name == libname: + raise SandboxValidationError( + "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, " + "but SHARED_LIBRARY_NAME is the same as LIBRARY_NAME, " + "and STATIC_LIBRARY_NAME is unset. Please either " + "change SHARED_LIBRARY_NAME or LIBRARY_NAME, or set " + "STATIC_LIBRARY_NAME.", + context, + ) + if shared_name and static_name and shared_name == static_name: + raise SandboxValidationError( + "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, " + "but SHARED_LIBRARY_NAME is the same as " + "STATIC_LIBRARY_NAME. Please change one of them.", + context, + ) + + symbols_file = context.get("SYMBOLS_FILE") + if symbols_file: + if not shared_lib: + raise SandboxValidationError( + "SYMBOLS_FILE can only be used with a SHARED_LIBRARY.", context + ) + if context.get("DEFFILE"): + raise SandboxValidationError( + "SYMBOLS_FILE cannot be used along DEFFILE.", context + ) + if isinstance(symbols_file, SourcePath): + if not os.path.exists(symbols_file.full_path): + raise SandboxValidationError( + "Path specified in SYMBOLS_FILE does not exist: %s " + "(resolved to %s)" % (symbols_file, symbols_file.full_path), + context, + ) + shared_args["symbols_file"] = True + else: + if symbols_file.target_basename not in generated_files: + raise SandboxValidationError( + ( + "Objdir file specified in SYMBOLS_FILE not in " + + "GENERATED_FILES: %s" + ) + % (symbols_file,), + context, + ) + shared_args["symbols_file"] = symbols_file.target_basename + + if shared_lib: + lib = SharedLibrary(context, libname, **shared_args) + self._libs[libname].append(lib) + self._linkage.append((context, lib, "USE_LIBS")) + linkables.append(lib) + if not lib.installed: + generated_files.add(lib.lib_name) + if symbols_file and isinstance(symbols_file, SourcePath): + script = mozpath.join( + mozpath.dirname(mozpath.dirname(__file__)), + "action", + "generate_symbols_file.py", + ) + defines = () + if lib.defines: + defines = lib.defines.get_defines() + yield GeneratedFile( + context, + script, + "generate_symbols_file", + lib.symbols_file, + [symbols_file], + defines, + required_during_compile=[lib.symbols_file], + ) + if static_lib: + is_rust_library = context.get("IS_RUST_LIBRARY") + if is_rust_library: + lib = self._rust_library( + context, + libname, + static_args, + is_gkrust=bool(context.get("IS_GKRUST")), + ) + else: + lib = StaticLibrary(context, libname, **static_args) + self._libs[libname].append(lib) + self._linkage.append((context, lib, "USE_LIBS")) + linkables.append(lib) + + if lib_defines: + if not libname: + raise SandboxValidationError( + "LIBRARY_DEFINES needs a " "LIBRARY_NAME to take effect", + context, + ) + lib.lib_defines.update(lib_defines) + + if wasm_lib: + if wasm_lib == libname: + raise SandboxValidationError( + "SANDBOXED_WASM_LIBRARY_NAME and LIBRARY_NAME must have a " + "different value.", + context, + ) + if wasm_lib == host_libname: + raise SandboxValidationError( + "SANDBOXED_WASM_LIBRARY_NAME and HOST_LIBRARY_NAME must " + "have a different value.", + context, + ) + if wasm_lib == shared_name: + raise SandboxValidationError( + "SANDBOXED_WASM_LIBRARY_NAME and SHARED_NAME must have a " + "different value.", + context, + ) + if wasm_lib == static_name: + raise SandboxValidationError( + "SANDBOXED_WASM_LIBRARY_NAME and STATIC_NAME must have a " + "different value.", + context, + ) + lib = SandboxedWasmLibrary(context, wasm_lib) + self._libs[libname].append(lib) + wasm_linkables.append(lib) + self._wasm_compile_dirs.add(context.objdir) + + seen = {} + for symbol in ("SOURCES", "UNIFIED_SOURCES"): + for src in context.get(symbol, []): + basename = os.path.splitext(os.path.basename(src))[0] + if basename in seen: + other_src, where = seen[basename] + extra = "" + if "UNIFIED_SOURCES" in (symbol, where): + extra = " in non-unified builds" + raise SandboxValidationError( + f"{src} from {symbol} would have the same object name " + f"as {other_src} from {where}{extra}.", + context, + ) + seen[basename] = (src, symbol) + + # Only emit sources if we have linkables defined in the same context. + # Note the linkables are not emitted in this function, but much later, + # after aggregation (because of e.g. USE_LIBS processing). + if not (linkables or host_linkables or wasm_linkables): + return + + # TODO: objdirs with only host things in them shouldn't need target + # flags, but there's at least one Makefile.in (in + # build/unix/elfhack) that relies on the value of LDFLAGS being + # passed to one-off rules. + self._compile_dirs.add(context.objdir) + + if host_linkables or any( + isinstance(l, (RustLibrary, RustProgram)) for l in linkables + ): + self._host_compile_dirs.add(context.objdir) + + sources = defaultdict(list) + gen_sources = defaultdict(list) + all_flags = {} + for symbol in ("SOURCES", "HOST_SOURCES", "UNIFIED_SOURCES", "WASM_SOURCES"): + srcs = sources[symbol] + gen_srcs = gen_sources[symbol] + context_srcs = context.get(symbol, []) + seen_sources = set() + for f in context_srcs: + if f in seen_sources: + raise SandboxValidationError( + "Source file should only " + "be added to %s once: %s" % (symbol, f), + context, + ) + seen_sources.add(f) + full_path = f.full_path + if isinstance(f, SourcePath): + srcs.append(full_path) + else: + assert isinstance(f, Path) + gen_srcs.append(full_path) + if symbol == "SOURCES": + context_flags = context_srcs[f] + if context_flags: + all_flags[full_path] = context_flags + + if isinstance(f, SourcePath) and not os.path.exists(full_path): + raise SandboxValidationError( + "File listed in %s does not " + "exist: '%s'" % (symbol, full_path), + context, + ) + + # Process the .cpp files generated by IPDL as generated sources within + # the context which declared the IPDL_SOURCES attribute. + ipdl_root = self.config.substs.get("IPDL_ROOT") + for symbol in ("IPDL_SOURCES", "PREPROCESSED_IPDL_SOURCES"): + context_srcs = context.get(symbol, []) + for f in context_srcs: + root, ext = mozpath.splitext(mozpath.basename(f)) + + suffix_map = { + ".ipdlh": [".cpp"], + ".ipdl": [".cpp", "Child.cpp", "Parent.cpp"], + } + if ext not in suffix_map: + raise SandboxValidationError( + "Unexpected extension for IPDL source %s" % ext + ) + + gen_sources["UNIFIED_SOURCES"].extend( + mozpath.join(ipdl_root, root + suffix) for suffix in suffix_map[ext] + ) + + no_pgo = context.get("NO_PGO") + no_pgo_sources = [f for f, flags in six.iteritems(all_flags) if flags.no_pgo] + if no_pgo: + if no_pgo_sources: + raise SandboxValidationError( + "NO_PGO and SOURCES[...].no_pgo " "cannot be set at the same time", + context, + ) + passthru.variables["NO_PROFILE_GUIDED_OPTIMIZE"] = no_pgo + if no_pgo_sources: + passthru.variables["NO_PROFILE_GUIDED_OPTIMIZE"] = no_pgo_sources + + # A map from "canonical suffixes" for a particular source file + # language to the range of suffixes associated with that language. + # + # We deliberately don't list the canonical suffix in the suffix list + # in the definition; we'll add it in programmatically after defining + # things. + suffix_map = { + ".s": set([".asm"]), + ".c": set(), + ".m": set(), + ".mm": set(), + ".cpp": set([".cc", ".cxx"]), + ".S": set(), + } + + # The inverse of the above, mapping suffixes to their canonical suffix. + canonicalized_suffix_map = {} + for suffix, alternatives in six.iteritems(suffix_map): + alternatives.add(suffix) + for a in alternatives: + canonicalized_suffix_map[a] = suffix + + # A map from moz.build variables to the canonical suffixes of file + # kinds that can be listed therein. + all_suffixes = list(suffix_map.keys()) + varmap = dict( + SOURCES=(Sources, all_suffixes), + HOST_SOURCES=(HostSources, [".c", ".mm", ".cpp"]), + UNIFIED_SOURCES=(UnifiedSources, [".c", ".mm", ".m", ".cpp"]), + ) + # Only include a WasmSources context if there are any WASM_SOURCES. + # (This is going to matter later because we inject an extra .c file to + # compile with the wasm compiler if, and only if, there are any WASM + # sources.) + if sources["WASM_SOURCES"] or gen_sources["WASM_SOURCES"]: + varmap["WASM_SOURCES"] = (WasmSources, [".c", ".cpp"]) + # Track whether there are any C++ source files. + # Technically this won't do the right thing for SIMPLE_PROGRAMS in + # a directory with mixed C and C++ source, but it's not that important. + cxx_sources = defaultdict(bool) + + # Source files to track for linkables associated with this context. + ctxt_sources = defaultdict(lambda: defaultdict(list)) + + for variable, (klass, suffixes) in varmap.items(): + # Group static and generated files by their canonical suffixes, and + # ensure we haven't been given filetypes that we don't recognize. + by_canonical_suffix = defaultdict(lambda: {"static": [], "generated": []}) + for srcs, key in ( + (sources[variable], "static"), + (gen_sources[variable], "generated"), + ): + for f in srcs: + canonical_suffix = canonicalized_suffix_map.get( + mozpath.splitext(f)[1] + ) + if canonical_suffix not in suffixes: + raise SandboxValidationError( + "%s has an unknown file type." % f, context + ) + by_canonical_suffix[canonical_suffix][key].append(f) + + # Yield an object for each canonical suffix, grouping generated and + # static sources together to allow them to be unified together. + for canonical_suffix in sorted(by_canonical_suffix.keys()): + if canonical_suffix in (".cpp", ".mm"): + cxx_sources[variable] = True + elif canonical_suffix in (".s", ".S"): + self._asm_compile_dirs.add(context.objdir) + src_group = by_canonical_suffix[canonical_suffix] + obj = klass( + context, + src_group["static"], + src_group["generated"], + canonical_suffix, + ) + srcs = list(obj.files) + if isinstance(obj, UnifiedSources) and obj.have_unified_mapping: + srcs = sorted(dict(obj.unified_source_mapping).keys()) + ctxt_sources[variable][canonical_suffix] += srcs + yield obj + + if ctxt_sources: + for linkable in linkables: + for target_var in ("SOURCES", "UNIFIED_SOURCES"): + for suffix, srcs in ctxt_sources[target_var].items(): + linkable.sources[suffix] += srcs + for host_linkable in host_linkables: + for suffix, srcs in ctxt_sources["HOST_SOURCES"].items(): + host_linkable.sources[suffix] += srcs + for wasm_linkable in wasm_linkables: + for suffix, srcs in ctxt_sources["WASM_SOURCES"].items(): + wasm_linkable.sources[suffix] += srcs + + for f, flags in sorted(six.iteritems(all_flags)): + if flags.flags: + ext = mozpath.splitext(f)[1] + yield PerSourceFlag(context, f, flags.flags) + + # If there are any C++ sources, set all the linkables defined here + # to require the C++ linker. + for vars, linkable_items in ( + (("SOURCES", "UNIFIED_SOURCES"), linkables), + (("HOST_SOURCES",), host_linkables), + ): + for var in vars: + if cxx_sources[var]: + for l in linkable_items: + l.cxx_link = True + break + + def emit_from_context(self, context): + """Convert a Context to tree metadata objects. + + This is a generator of mozbuild.frontend.data.ContextDerived instances. + """ + + # We only want to emit an InstallationTarget if one of the consulted + # variables is defined. Later on, we look up FINAL_TARGET, which has + # the side-effect of populating it. So, we need to do this lookup + # early. + if any(k in context for k in ("FINAL_TARGET", "XPI_NAME", "DIST_SUBDIR")): + yield InstallationTarget(context) + + # We always emit a directory traversal descriptor. This is needed by + # the recursive make backend. + for o in self._emit_directory_traversal_from_context(context): + yield o + + for obj in self._process_xpidl(context): + yield obj + + computed_flags = ComputedFlags(context, context["COMPILE_FLAGS"]) + computed_link_flags = ComputedFlags(context, context["LINK_FLAGS"]) + computed_host_flags = ComputedFlags(context, context["HOST_COMPILE_FLAGS"]) + computed_as_flags = ComputedFlags(context, context["ASM_FLAGS"]) + computed_wasm_flags = ComputedFlags(context, context["WASM_FLAGS"]) + + # Proxy some variables as-is until we have richer classes to represent + # them. We should aim to keep this set small because it violates the + # desired abstraction of the build definition away from makefiles. + passthru = VariablePassthru(context) + varlist = [ + "EXTRA_DSO_LDOPTS", + "RCFILE", + "RCINCLUDE", + "WIN32_EXE_LDFLAGS", + "USE_EXTENSION_MANIFEST", + ] + for v in varlist: + if v in context and context[v]: + passthru.variables[v] = context[v] + + if ( + context.config.substs.get("OS_TARGET") == "WINNT" + and context["DELAYLOAD_DLLS"] + ): + if context.config.substs.get("CC_TYPE") != "clang": + context["LDFLAGS"].extend( + [("-DELAYLOAD:%s" % dll) for dll in context["DELAYLOAD_DLLS"]] + ) + else: + context["LDFLAGS"].extend( + [ + ("-Wl,-Xlink=-DELAYLOAD:%s" % dll) + for dll in context["DELAYLOAD_DLLS"] + ] + ) + context["OS_LIBS"].append("delayimp") + + for v in ["CMFLAGS", "CMMFLAGS"]: + if v in context and context[v]: + passthru.variables["MOZBUILD_" + v] = context[v] + + for v in ["CXXFLAGS", "CFLAGS"]: + if v in context and context[v]: + computed_flags.resolve_flags("MOZBUILD_%s" % v, context[v]) + + for v in ["WASM_CFLAGS", "WASM_CXXFLAGS"]: + if v in context and context[v]: + computed_wasm_flags.resolve_flags("MOZBUILD_%s" % v, context[v]) + + for v in ["HOST_CXXFLAGS", "HOST_CFLAGS"]: + if v in context and context[v]: + computed_host_flags.resolve_flags("MOZBUILD_%s" % v, context[v]) + + if "LDFLAGS" in context and context["LDFLAGS"]: + computed_link_flags.resolve_flags("MOZBUILD", context["LDFLAGS"]) + + deffile = context.get("DEFFILE") + if deffile and context.config.substs.get("OS_TARGET") == "WINNT": + if isinstance(deffile, SourcePath): + if not os.path.exists(deffile.full_path): + raise SandboxValidationError( + "Path specified in DEFFILE does not exist: %s " + "(resolved to %s)" % (deffile, deffile.full_path), + context, + ) + path = mozpath.relpath(deffile.full_path, context.objdir) + else: + path = deffile.target_basename + + if context.config.substs.get("GNU_CC"): + computed_link_flags.resolve_flags("DEFFILE", [path]) + else: + computed_link_flags.resolve_flags("DEFFILE", ["-DEF:" + path]) + + dist_install = context["DIST_INSTALL"] + if dist_install is True: + passthru.variables["DIST_INSTALL"] = True + elif dist_install is False: + passthru.variables["NO_DIST_INSTALL"] = True + + # Ideally, this should be done in templates, but this is difficult at + # the moment because USE_STATIC_LIBS can be set after a template + # returns. Eventually, with context-based templates, it will be + # possible. + if context.config.substs.get( + "OS_ARCH" + ) == "WINNT" and not context.config.substs.get("GNU_CC"): + use_static_lib = context.get( + "USE_STATIC_LIBS" + ) and not context.config.substs.get("MOZ_ASAN") + rtl_flag = "-MT" if use_static_lib else "-MD" + if context.config.substs.get("MOZ_DEBUG") and not context.config.substs.get( + "MOZ_NO_DEBUG_RTL" + ): + rtl_flag += "d" + computed_flags.resolve_flags("RTL", [rtl_flag]) + if not context.config.substs.get("CROSS_COMPILE"): + computed_host_flags.resolve_flags("RTL", [rtl_flag]) + + generated_files = set() + localized_generated_files = set() + for obj in self._process_generated_files(context): + for f in obj.outputs: + generated_files.add(f) + if obj.localized: + localized_generated_files.add(f) + yield obj + + for path in context["CONFIGURE_SUBST_FILES"]: + sub = self._create_substitution(ConfigFileSubstitution, context, path) + generated_files.add(str(sub.relpath)) + yield sub + + for defines_var, cls, backend_flags in ( + ("DEFINES", Defines, (computed_flags, computed_as_flags)), + ("HOST_DEFINES", HostDefines, (computed_host_flags,)), + ("WASM_DEFINES", WasmDefines, (computed_wasm_flags,)), + ): + defines = context.get(defines_var) + if defines: + defines_obj = cls(context, defines) + if isinstance(defines_obj, Defines): + # DEFINES have consumers outside the compile command line, + # HOST_DEFINES do not. + yield defines_obj + else: + # If we don't have explicitly set defines we need to make sure + # initialized values if present end up in computed flags. + defines_obj = cls(context, context[defines_var]) + + defines_from_obj = list(defines_obj.get_defines()) + if defines_from_obj: + for flags in backend_flags: + flags.resolve_flags(defines_var, defines_from_obj) + + idl_vars = ( + "GENERATED_EVENTS_WEBIDL_FILES", + "GENERATED_WEBIDL_FILES", + "PREPROCESSED_TEST_WEBIDL_FILES", + "PREPROCESSED_WEBIDL_FILES", + "TEST_WEBIDL_FILES", + "WEBIDL_FILES", + "IPDL_SOURCES", + "PREPROCESSED_IPDL_SOURCES", + "XPCOM_MANIFESTS", + ) + for context_var in idl_vars: + for name in context.get(context_var, []): + self._idls[context_var].add(mozpath.join(context.srcdir, name)) + # WEBIDL_EXAMPLE_INTERFACES do not correspond to files. + for name in context.get("WEBIDL_EXAMPLE_INTERFACES", []): + self._idls["WEBIDL_EXAMPLE_INTERFACES"].add(name) + + local_includes = [] + for local_include in context.get("LOCAL_INCLUDES", []): + full_path = local_include.full_path + if not isinstance(local_include, ObjDirPath): + if not os.path.exists(full_path): + raise SandboxValidationError( + "Path specified in LOCAL_INCLUDES does not exist: %s (resolved to %s)" + % (local_include, full_path), + context, + ) + if not os.path.isdir(full_path): + raise SandboxValidationError( + "Path specified in LOCAL_INCLUDES " + "is a filename, but a directory is required: %s " + "(resolved to %s)" % (local_include, full_path), + context, + ) + if ( + full_path == context.config.topsrcdir + or full_path == context.config.topobjdir + ): + raise SandboxValidationError( + "Path specified in LOCAL_INCLUDES " + "(%s) resolves to the topsrcdir or topobjdir (%s), which is " + "not allowed" % (local_include, full_path), + context, + ) + include_obj = LocalInclude(context, local_include) + local_includes.append(include_obj.path.full_path) + yield include_obj + + computed_flags.resolve_flags( + "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes] + ) + computed_as_flags.resolve_flags( + "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes] + ) + computed_host_flags.resolve_flags( + "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes] + ) + computed_wasm_flags.resolve_flags( + "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes] + ) + + for obj in self._handle_linkables(context, passthru, generated_files): + yield obj + + generated_files.update( + [ + "%s%s" % (k, self.config.substs.get("BIN_SUFFIX", "")) + for k in self._binaries.keys() + ] + ) + + components = [] + for var, cls in ( + ("EXPORTS", Exports), + ("FINAL_TARGET_FILES", FinalTargetFiles), + ("FINAL_TARGET_PP_FILES", FinalTargetPreprocessedFiles), + ("LOCALIZED_FILES", LocalizedFiles), + ("LOCALIZED_PP_FILES", LocalizedPreprocessedFiles), + ("OBJDIR_FILES", ObjdirFiles), + ("OBJDIR_PP_FILES", ObjdirPreprocessedFiles), + ("TEST_HARNESS_FILES", TestHarnessFiles), + ): + all_files = context.get(var) + if not all_files: + continue + if dist_install is False and var != "TEST_HARNESS_FILES": + raise SandboxValidationError( + "%s cannot be used with DIST_INSTALL = False" % var, context + ) + has_prefs = False + has_resources = False + for base, files in all_files.walk(): + if var == "TEST_HARNESS_FILES" and not base: + raise SandboxValidationError( + "Cannot install files to the root of TEST_HARNESS_FILES", + context, + ) + if base == "components": + components.extend(files) + if base == "defaults/pref": + has_prefs = True + if mozpath.split(base)[0] == "res": + has_resources = True + for f in files: + if ( + var + in ( + "FINAL_TARGET_PP_FILES", + "OBJDIR_PP_FILES", + "LOCALIZED_PP_FILES", + ) + and not isinstance(f, SourcePath) + ): + raise SandboxValidationError( + ("Only source directory paths allowed in " + "%s: %s") + % (var, f), + context, + ) + if var.startswith("LOCALIZED_"): + if isinstance(f, SourcePath): + if f.startswith("en-US/"): + pass + elif "locales/en-US/" in f: + pass + else: + raise SandboxValidationError( + "%s paths must start with `en-US/` or " + "contain `locales/en-US/`: %s" % (var, f), + context, + ) + + if not isinstance(f, ObjDirPath): + path = f.full_path + if "*" not in path and not os.path.exists(path): + raise SandboxValidationError( + "File listed in %s does not exist: %s" % (var, path), + context, + ) + else: + # TODO: Bug 1254682 - The '/' check is to allow + # installing files generated from other directories, + # which is done occasionally for tests. However, it + # means we don't fail early if the file isn't actually + # created by the other moz.build file. + if f.target_basename not in generated_files and "/" not in f: + raise SandboxValidationError( + ( + "Objdir file listed in %s not in " + + "GENERATED_FILES: %s" + ) + % (var, f), + context, + ) + + if var.startswith("LOCALIZED_"): + # Further require that LOCALIZED_FILES are from + # LOCALIZED_GENERATED_FILES. + if f.target_basename not in localized_generated_files: + raise SandboxValidationError( + ( + "Objdir file listed in %s not in " + + "LOCALIZED_GENERATED_FILES: %s" + ) + % (var, f), + context, + ) + else: + # Additionally, don't allow LOCALIZED_GENERATED_FILES to be used + # in anything *but* LOCALIZED_FILES. + if f.target_basename in localized_generated_files: + raise SandboxValidationError( + ( + "Outputs of LOCALIZED_GENERATED_FILES cannot " + "be used in %s: %s" + ) + % (var, f), + context, + ) + + # Addons (when XPI_NAME is defined) and Applications (when + # DIST_SUBDIR is defined) use a different preferences directory + # (default/preferences) from the one the GRE uses (defaults/pref). + # Hence, we move the files from the latter to the former in that + # case. + if has_prefs and (context.get("XPI_NAME") or context.get("DIST_SUBDIR")): + all_files.defaults.preferences += all_files.defaults.pref + del all_files.defaults._children["pref"] + + if has_resources and ( + context.get("DIST_SUBDIR") or context.get("XPI_NAME") + ): + raise SandboxValidationError( + "RESOURCES_FILES cannot be used with DIST_SUBDIR or " "XPI_NAME.", + context, + ) + + yield cls(context, all_files) + + for c in components: + if c.endswith(".manifest"): + yield ChromeManifestEntry( + context, + "chrome.manifest", + Manifest("components", mozpath.basename(c)), + ) + + rust_tests = context.get("RUST_TESTS", []) + if rust_tests: + # TODO: more sophisticated checking of the declared name vs. + # contents of the Cargo.toml file. + features = context.get("RUST_TEST_FEATURES", []) + + yield RustTests(context, rust_tests, features) + + for obj in self._process_test_manifests(context): + yield obj + + for obj in self._process_jar_manifests(context): + yield obj + + computed_as_flags.resolve_flags("MOZBUILD", context.get("ASFLAGS")) + + if context.get("USE_NASM") is True: + nasm = context.config.substs.get("NASM") + if not nasm: + raise SandboxValidationError("nasm is not available", context) + passthru.variables["AS"] = nasm + passthru.variables["AS_DASH_C_FLAG"] = "" + passthru.variables["ASOUTOPTION"] = "-o " + computed_as_flags.resolve_flags( + "OS", context.config.substs.get("NASM_ASFLAGS", []) + ) + + if context.get("USE_INTEGRATED_CLANGCL_AS") is True: + if context.config.substs.get("CC_TYPE") != "clang-cl": + raise SandboxValidationError("clang-cl is not available", context) + passthru.variables["AS"] = context.config.substs.get("CC") + passthru.variables["AS_DASH_C_FLAG"] = "-c" + passthru.variables["ASOUTOPTION"] = "-o " + + if passthru.variables: + yield passthru + + if context.objdir in self._compile_dirs: + self._compile_flags[context.objdir] = computed_flags + yield computed_link_flags + + if context.objdir in self._asm_compile_dirs: + self._compile_as_flags[context.objdir] = computed_as_flags + + if context.objdir in self._host_compile_dirs: + yield computed_host_flags + + if context.objdir in self._wasm_compile_dirs: + yield computed_wasm_flags + + def _create_substitution(self, cls, context, path): + sub = cls(context) + sub.input_path = "%s.in" % path.full_path + sub.output_path = path.translated + sub.relpath = path + + return sub + + def _process_xpidl(self, context): + # XPIDL source files get processed and turned into .h and .xpt files. + # If there are multiple XPIDL files in a directory, they get linked + # together into a final .xpt, which has the name defined by + # XPIDL_MODULE. + xpidl_module = context["XPIDL_MODULE"] + + if not xpidl_module: + if context["XPIDL_SOURCES"]: + raise SandboxValidationError( + "XPIDL_MODULE must be defined if " "XPIDL_SOURCES is defined.", + context, + ) + return + + if not context["XPIDL_SOURCES"]: + raise SandboxValidationError( + "XPIDL_MODULE cannot be defined " "unless there are XPIDL_SOURCES", + context, + ) + + if context["DIST_INSTALL"] is False: + self.log( + logging.WARN, + "mozbuild_warning", + dict(path=context.main_path), + "{path}: DIST_INSTALL = False has no effect on XPIDL_SOURCES.", + ) + + for idl in context["XPIDL_SOURCES"]: + if not os.path.exists(idl.full_path): + raise SandboxValidationError( + "File %s from XPIDL_SOURCES " "does not exist" % idl.full_path, + context, + ) + + yield XPIDLModule(context, xpidl_module, context["XPIDL_SOURCES"]) + + def _process_generated_files(self, context): + for path in context["CONFIGURE_DEFINE_FILES"]: + script = mozpath.join( + mozpath.dirname(mozpath.dirname(__file__)), + "action", + "process_define_files.py", + ) + yield GeneratedFile( + context, + script, + "process_define_file", + six.text_type(path), + [Path(context, path + ".in")], + ) + + generated_files = context.get("GENERATED_FILES") or [] + localized_generated_files = context.get("LOCALIZED_GENERATED_FILES") or [] + if not (generated_files or localized_generated_files): + return + + for (localized, gen) in ( + (False, generated_files), + (True, localized_generated_files), + ): + for f in gen: + flags = gen[f] + outputs = f + inputs = [] + if flags.script: + method = "main" + script = SourcePath(context, flags.script).full_path + + # Deal with cases like "C:\\path\\to\\script.py:function". + if ".py:" in script: + script, method = script.rsplit(".py:", 1) + script += ".py" + + if not os.path.exists(script): + raise SandboxValidationError( + "Script for generating %s does not exist: %s" % (f, script), + context, + ) + if os.path.splitext(script)[1] != ".py": + raise SandboxValidationError( + "Script for generating %s does not end in .py: %s" + % (f, script), + context, + ) + else: + script = None + method = None + + for i in flags.inputs: + p = Path(context, i) + if isinstance(p, SourcePath) and not os.path.exists(p.full_path): + raise SandboxValidationError( + "Input for generating %s does not exist: %s" + % (f, p.full_path), + context, + ) + inputs.append(p) + + yield GeneratedFile( + context, + script, + method, + outputs, + inputs, + flags.flags, + localized=localized, + force=flags.force, + ) + + def _process_test_manifests(self, context): + for prefix, info in TEST_MANIFESTS.items(): + for path, manifest in context.get("%s_MANIFESTS" % prefix, []): + for obj in self._process_test_manifest(context, info, path, manifest): + yield obj + + for flavor in REFTEST_FLAVORS: + for path, manifest in context.get("%s_MANIFESTS" % flavor.upper(), []): + for obj in self._process_reftest_manifest( + context, flavor, path, manifest + ): + yield obj + + def _process_test_manifest(self, context, info, manifest_path, mpmanifest): + flavor, install_root, install_subdir, package_tests = info + + path = manifest_path.full_path + manifest_dir = mozpath.dirname(path) + manifest_reldir = mozpath.dirname( + mozpath.relpath(path, context.config.topsrcdir) + ) + manifest_sources = [ + mozpath.relpath(pth, context.config.topsrcdir) + for pth in mpmanifest.source_files + ] + install_prefix = mozpath.join(install_root, install_subdir) + + try: + if not mpmanifest.tests: + raise SandboxValidationError("Empty test manifest: %s" % path, context) + + defaults = mpmanifest.manifest_defaults[os.path.normpath(path)] + obj = TestManifest( + context, + path, + mpmanifest, + flavor=flavor, + install_prefix=install_prefix, + relpath=mozpath.join(manifest_reldir, mozpath.basename(path)), + sources=manifest_sources, + dupe_manifest="dupe-manifest" in defaults, + ) + + filtered = mpmanifest.tests + + missing = [t["name"] for t in filtered if not os.path.exists(t["path"])] + if missing: + raise SandboxValidationError( + "Test manifest (%s) lists " + "test that does not exist: %s" % (path, ", ".join(missing)), + context, + ) + + out_dir = mozpath.join(install_prefix, manifest_reldir) + + def process_support_files(test): + install_info = self._test_files_converter.convert_support_files( + test, install_root, manifest_dir, out_dir + ) + + obj.pattern_installs.extend(install_info.pattern_installs) + for source, dest in install_info.installs: + obj.installs[source] = (dest, False) + obj.external_installs |= install_info.external_installs + for install_path in install_info.deferred_installs: + if all( + [ + "*" not in install_path, + not os.path.isfile( + mozpath.join(context.config.topsrcdir, install_path[2:]) + ), + install_path not in install_info.external_installs, + ] + ): + raise SandboxValidationError( + "Error processing test " + "manifest %s: entry in support-files not present " + "in the srcdir: %s" % (path, install_path), + context, + ) + + obj.deferred_installs |= install_info.deferred_installs + + for test in filtered: + obj.tests.append(test) + + # Some test files are compiled and should not be copied into the + # test package. They function as identifiers rather than files. + if package_tests: + manifest_relpath = mozpath.relpath( + test["path"], mozpath.dirname(test["manifest"]) + ) + obj.installs[mozpath.normpath(test["path"])] = ( + (mozpath.join(out_dir, manifest_relpath)), + True, + ) + + process_support_files(test) + + for path, m_defaults in mpmanifest.manifest_defaults.items(): + process_support_files(m_defaults) + + # We also copy manifests into the output directory, + # including manifests from [include:foo] directives. + for mpath in mpmanifest.manifests(): + mpath = mozpath.normpath(mpath) + out_path = mozpath.join(out_dir, mozpath.basename(mpath)) + obj.installs[mpath] = (out_path, False) + + # Some manifests reference files that are auto generated as + # part of the build or shouldn't be installed for some + # reason. Here, we prune those files from the install set. + # FUTURE we should be able to detect autogenerated files from + # other build metadata. Once we do that, we can get rid of this. + for f in defaults.get("generated-files", "").split(): + # We re-raise otherwise the stack trace isn't informative. + try: + del obj.installs[mozpath.join(manifest_dir, f)] + except KeyError: + raise SandboxValidationError( + "Error processing test " + "manifest %s: entry in generated-files not present " + "elsewhere in manifest: %s" % (path, f), + context, + ) + + yield obj + except (AssertionError, Exception): + raise SandboxValidationError( + "Error processing test " + "manifest file %s: %s" + % (path, "\n".join(traceback.format_exception(*sys.exc_info()))), + context, + ) + + def _process_reftest_manifest(self, context, flavor, manifest_path, manifest): + manifest_full_path = manifest_path.full_path + manifest_reldir = mozpath.dirname( + mozpath.relpath(manifest_full_path, context.config.topsrcdir) + ) + + # reftest manifests don't come from manifest parser. But they are + # similar enough that we can use the same emitted objects. Note + # that we don't perform any installs for reftests. + obj = TestManifest( + context, + manifest_full_path, + manifest, + flavor=flavor, + install_prefix="%s/" % flavor, + relpath=mozpath.join(manifest_reldir, mozpath.basename(manifest_path)), + ) + obj.tests = list(sorted(manifest.tests, key=lambda t: t["path"])) + + yield obj + + def _process_jar_manifests(self, context): + jar_manifests = context.get("JAR_MANIFESTS", []) + if len(jar_manifests) > 1: + raise SandboxValidationError( + "While JAR_MANIFESTS is a list, " + "it is currently limited to one value.", + context, + ) + + for path in jar_manifests: + yield JARManifest(context, path) + + # Temporary test to look for jar.mn files that creep in without using + # the new declaration. Before, we didn't require jar.mn files to + # declared anywhere (they were discovered). This will detect people + # relying on the old behavior. + if os.path.exists(os.path.join(context.srcdir, "jar.mn")): + if "jar.mn" not in jar_manifests: + raise SandboxValidationError( + "A jar.mn exists but it " + "is not referenced in the moz.build file. " + "Please define JAR_MANIFESTS.", + context, + ) + + def _emit_directory_traversal_from_context(self, context): + o = DirectoryTraversal(context) + o.dirs = context.get("DIRS", []) + + # Some paths have a subconfigure, yet also have a moz.build. Those + # shouldn't end up in self._external_paths. + if o.objdir: + self._external_paths -= {o.relobjdir} + + yield o diff --git a/python/mozbuild/mozbuild/frontend/gyp_reader.py b/python/mozbuild/mozbuild/frontend/gyp_reader.py new file mode 100644 index 0000000000..cd69dfddce --- /dev/null +++ b/python/mozbuild/mozbuild/frontend/gyp_reader.py @@ -0,0 +1,497 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import sys +import time + +import gyp +import gyp.msvs_emulation +import mozpack.path as mozpath +import six +from mozpack.files import FileFinder + +from mozbuild import shellutil +from mozbuild.util import expand_variables + +from .context import VARIABLES, ObjDirPath, SourcePath, TemplateContext +from .sandbox import alphabetical_sorted + +# Define this module as gyp.generator.mozbuild so that gyp can use it +# as a generator under the name "mozbuild". +sys.modules["gyp.generator.mozbuild"] = sys.modules[__name__] + +# build/gyp_chromium does this: +# script_dir = os.path.dirname(os.path.realpath(__file__)) +# chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir)) +# sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib')) +# We're not importing gyp_chromium, but we want both script_dir and +# chrome_src for the default includes, so go backwards from the pylib +# directory, which is the parent directory of gyp module. +chrome_src = mozpath.abspath( + mozpath.join(mozpath.dirname(gyp.__file__), "../../../../..") +) +script_dir = mozpath.join(chrome_src, "build") + + +# Default variables gyp uses when evaluating gyp files. +generator_default_variables = {} +for dirname in [ + "INTERMEDIATE_DIR", + "SHARED_INTERMEDIATE_DIR", + "PRODUCT_DIR", + "LIB_DIR", + "SHARED_LIB_DIR", +]: + # Some gyp steps fail if these are empty(!). + generator_default_variables[dirname] = "$" + dirname + +for unused in [ + "RULE_INPUT_PATH", + "RULE_INPUT_ROOT", + "RULE_INPUT_NAME", + "RULE_INPUT_DIRNAME", + "RULE_INPUT_EXT", + "EXECUTABLE_PREFIX", + "EXECUTABLE_SUFFIX", + "STATIC_LIB_PREFIX", + "STATIC_LIB_SUFFIX", + "SHARED_LIB_PREFIX", + "SHARED_LIB_SUFFIX", + "LINKER_SUPPORTS_ICF", +]: + generator_default_variables[unused] = "" + + +class GypContext(TemplateContext): + """Specialized Context for use with data extracted from Gyp. + + config is the ConfigEnvironment for this context. + relobjdir is the object directory that will be used for this context, + relative to the topobjdir defined in the ConfigEnvironment. + """ + + def __init__(self, config, relobjdir): + self._relobjdir = relobjdir + TemplateContext.__init__( + self, template="Gyp", allowed_variables=VARIABLES, config=config + ) + + +def handle_actions(actions, context, action_overrides): + idir = "$INTERMEDIATE_DIR/" + for action in actions: + name = action["action_name"] + if name not in action_overrides: + raise RuntimeError("GYP action %s not listed in action_overrides" % name) + outputs = action["outputs"] + if len(outputs) > 1: + raise NotImplementedError( + "GYP actions with more than one output not supported: %s" % name + ) + output = outputs[0] + if not output.startswith(idir): + raise NotImplementedError( + "GYP actions outputting to somewhere other than " + "<(INTERMEDIATE_DIR) not supported: %s" % output + ) + output = output[len(idir) :] + context["GENERATED_FILES"] += [output] + g = context["GENERATED_FILES"][output] + g.script = action_overrides[name] + g.inputs = action["inputs"] + + +def handle_copies(copies, context): + dist = "$PRODUCT_DIR/dist/" + for copy in copies: + dest = copy["destination"] + if not dest.startswith(dist): + raise NotImplementedError( + "GYP copies to somewhere other than <(PRODUCT_DIR)/dist not supported: %s" + % dest + ) + dest_paths = dest[len(dist) :].split("/") + exports = context["EXPORTS"] + while dest_paths: + exports = getattr(exports, dest_paths.pop(0)) + exports += sorted(copy["files"], key=lambda x: x.lower()) + + +def process_gyp_result( + gyp_result, + gyp_dir_attrs, + path, + config, + output, + non_unified_sources, + action_overrides, +): + flat_list, targets, data = gyp_result + no_chromium = gyp_dir_attrs.no_chromium + no_unified = gyp_dir_attrs.no_unified + + # Process all targets from the given gyp files and its dependencies. + # The path given to AllTargets needs to use os.sep, while the frontend code + # gives us paths normalized with forward slash separator. + for target in sorted( + gyp.common.AllTargets(flat_list, targets, path.replace("/", os.sep)) + ): + build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(target) + + # Each target is given its own objdir. The base of that objdir + # is derived from the relative path from the root gyp file path + # to the current build_file, placed under the given output + # directory. Since several targets can be in a given build_file, + # separate them in subdirectories using the build_file basename + # and the target_name. + reldir = mozpath.relpath(mozpath.dirname(build_file), mozpath.dirname(path)) + subdir = "%s_%s" % ( + mozpath.splitext(mozpath.basename(build_file))[0], + target_name, + ) + # Emit a context for each target. + context = GypContext( + config, + mozpath.relpath(mozpath.join(output, reldir, subdir), config.topobjdir), + ) + context.add_source(mozpath.abspath(build_file)) + # The list of included files returned by gyp are relative to build_file + for f in data[build_file]["included_files"]: + context.add_source( + mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f)) + ) + + spec = targets[target] + + # Derive which gyp configuration to use based on MOZ_DEBUG. + c = "Debug" if config.substs.get("MOZ_DEBUG") else "Release" + if c not in spec["configurations"]: + raise RuntimeError( + "Missing %s gyp configuration for target %s " + "in %s" % (c, target_name, build_file) + ) + target_conf = spec["configurations"][c] + + if "actions" in spec: + handle_actions(spec["actions"], context, action_overrides) + if "copies" in spec: + handle_copies(spec["copies"], context) + + use_libs = [] + libs = [] + + def add_deps(s): + for t in s.get("dependencies", []) + s.get("dependencies_original", []): + ty = targets[t]["type"] + if ty in ("static_library", "shared_library"): + l = targets[t]["target_name"] + if l not in use_libs: + use_libs.append(l) + # Manually expand out transitive dependencies-- + # gyp won't do this for static libs or none targets. + if ty in ("static_library", "none"): + add_deps(targets[t]) + libs.extend(spec.get("libraries", [])) + + # XXX: this sucks, but webrtc breaks with this right now because + # it builds a library called 'gtest' and we just get lucky + # that it isn't in USE_LIBS by that name anywhere. + if no_chromium: + add_deps(spec) + + os_libs = [] + for l in libs: + if l.startswith("-"): + if l.startswith("-l"): + # Remove "-l" for consumption in OS_LIBS. Other flags + # are passed through unchanged. + l = l[2:] + if l not in os_libs: + os_libs.append(l) + elif l.endswith(".lib"): + l = l[:-4] + if l not in os_libs: + os_libs.append(l) + elif l: + # For library names passed in from moz.build. + l = os.path.basename(l) + if l not in use_libs: + use_libs.append(l) + + if spec["type"] == "none": + if not ("actions" in spec or "copies" in spec): + continue + elif spec["type"] in ("static_library", "shared_library", "executable"): + # Remove leading 'lib' from the target_name if any, and use as + # library name. + name = six.ensure_text(spec["target_name"]) + if spec["type"] in ("static_library", "shared_library"): + if name.startswith("lib"): + name = name[3:] + context["LIBRARY_NAME"] = name + else: + context["PROGRAM"] = name + if spec["type"] == "shared_library": + context["FORCE_SHARED_LIB"] = True + elif ( + spec["type"] == "static_library" + and spec.get("variables", {}).get("no_expand_libs", "0") == "1" + ): + # PSM links a NSS static library, but our folded libnss + # doesn't actually export everything that all of the + # objects within would need, so that one library + # should be built as a real static library. + context["NO_EXPAND_LIBS"] = True + if use_libs: + context["USE_LIBS"] = sorted(use_libs, key=lambda s: s.lower()) + if os_libs: + context["OS_LIBS"] = os_libs + # gyp files contain headers and asm sources in sources lists. + sources = [] + unified_sources = [] + extensions = set() + use_defines_in_asflags = False + for f in spec.get("sources", []): + ext = mozpath.splitext(f)[-1] + extensions.add(ext) + if f.startswith("$INTERMEDIATE_DIR/"): + s = ObjDirPath(context, f.replace("$INTERMEDIATE_DIR/", "!")) + else: + s = SourcePath(context, f) + if ext == ".h": + continue + if ext == ".def": + context["SYMBOLS_FILE"] = s + elif ext != ".S" and not no_unified and s not in non_unified_sources: + unified_sources.append(s) + else: + sources.append(s) + # The Mozilla build system doesn't use DEFINES for building + # ASFILES. + if ext == ".s": + use_defines_in_asflags = True + + # The context expects alphabetical order when adding sources + context["SOURCES"] = alphabetical_sorted(sources) + context["UNIFIED_SOURCES"] = alphabetical_sorted(unified_sources) + + defines = target_conf.get("defines", []) + if config.substs["CC_TYPE"] == "clang-cl" and no_chromium: + msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, {}) + # Hack: MsvsSettings._TargetConfig tries to compare a str to an int, + # so convert manually. + msvs_settings.vs_version.short_name = int( + msvs_settings.vs_version.short_name + ) + defines.extend(msvs_settings.GetComputedDefines(c)) + for define in defines: + if "=" in define: + name, value = define.split("=", 1) + context["DEFINES"][name] = value + else: + context["DEFINES"][define] = True + + product_dir_dist = "$PRODUCT_DIR/dist/" + for include in target_conf.get("include_dirs", []): + if include.startswith(product_dir_dist): + # special-case includes of <(PRODUCT_DIR)/dist/ to match + # handle_copies above. This is used for NSS' exports. + include = "!/dist/include/" + include[len(product_dir_dist) :] + elif include.startswith(config.topobjdir): + # NSPR_INCLUDE_DIR gets passed into the NSS build this way. + include = "!/" + mozpath.relpath(include, config.topobjdir) + else: + # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do. + # + # NB: gyp files sometimes have actual absolute paths (e.g. + # /usr/include32) and sometimes paths that moz.build considers + # absolute, i.e. starting from topsrcdir. There's no good way + # to tell them apart here, and the actual absolute paths are + # likely bogus. In any event, actual absolute paths will be + # filtered out by trying to find them in topsrcdir. + # + # We do allow !- and %-prefixed paths, assuming they come + # from moz.build and will be handled the same way as if they + # were given to LOCAL_INCLUDES in moz.build. + if include.startswith("/"): + resolved = mozpath.abspath( + mozpath.join(config.topsrcdir, include[1:]) + ) + elif not include.startswith(("!", "%")): + resolved = mozpath.abspath( + mozpath.join(mozpath.dirname(build_file), include) + ) + if not include.startswith(("!", "%")) and not os.path.exists( + resolved + ): + continue + context["LOCAL_INCLUDES"] += [include] + + context["ASFLAGS"] = target_conf.get("asflags_mozilla", []) + if use_defines_in_asflags and defines: + context["ASFLAGS"] += ["-D" + d for d in defines] + if config.substs["OS_TARGET"] == "SunOS": + context["LDFLAGS"] = target_conf.get("ldflags", []) + flags = target_conf.get("cflags_mozilla", []) + if flags: + suffix_map = { + ".c": "CFLAGS", + ".cpp": "CXXFLAGS", + ".cc": "CXXFLAGS", + ".m": "CMFLAGS", + ".mm": "CMMFLAGS", + } + variables = (suffix_map[e] for e in extensions if e in suffix_map) + for var in variables: + for f in flags: + # We may be getting make variable references out of the + # gyp data, and we don't want those in emitted data, so + # substitute them with their actual value. + f = expand_variables(f, config.substs).split() + if not f: + continue + # the result may be a string or a list. + if isinstance(f, six.string_types): + context[var].append(f) + else: + context[var].extend(f) + else: + # Ignore other types because we don't have + # anything using them, and we're not testing them. They can be + # added when that becomes necessary. + raise NotImplementedError("Unsupported gyp target type: %s" % spec["type"]) + + if not no_chromium: + # Add some features to all contexts. Put here in case LOCAL_INCLUDES + # order matters. + context["LOCAL_INCLUDES"] += [ + "!/ipc/ipdl/_ipdlheaders", + "/ipc/chromium/src", + ] + # These get set via VC project file settings for normal GYP builds. + if config.substs["OS_TARGET"] == "WINNT": + context["DEFINES"]["UNICODE"] = True + context["DEFINES"]["_UNICODE"] = True + context["COMPILE_FLAGS"]["OS_INCLUDES"] = [] + + for key, value in gyp_dir_attrs.sandbox_vars.items(): + if context.get(key) and isinstance(context[key], list): + # If we have a key from sanbox_vars that's also been + # populated here we use the value from sandbox_vars as our + # basis rather than overriding outright. + context[key] = value + context[key] + elif context.get(key) and isinstance(context[key], dict): + context[key].update(value) + else: + context[key] = value + + yield context + + +# A version of gyp.Load that doesn't return the generator (because module objects +# aren't Pickle-able, and we don't use it anyway). +def load_gyp(*args): + _, flat_list, targets, data = gyp.Load(*args) + return flat_list, targets, data + + +class GypProcessor(object): + """Reads a gyp configuration in the background using the given executor and + emits GypContexts for the backend to process. + + config is a ConfigEnvironment, path is the path to a root gyp configuration + file, and output is the base path under which the objdir for the various + gyp dependencies will be. gyp_dir_attrs are attributes set for the dir + from moz.build. + """ + + def __init__( + self, + config, + gyp_dir_attrs, + path, + output, + executor, + action_overrides, + non_unified_sources, + ): + self._path = path + self._config = config + self._output = output + self._non_unified_sources = non_unified_sources + self._gyp_dir_attrs = gyp_dir_attrs + self._action_overrides = action_overrides + self.execution_time = 0.0 + self._results = [] + + # gyp expects plain str instead of unicode. The frontend code gives us + # unicode strings, so convert them. + if config.substs["CC_TYPE"] == "clang-cl": + # This isn't actually used anywhere in this generator, but it's needed + # to override the registry detection of VC++ in gyp. + os.environ.update( + { + "GYP_MSVS_OVERRIDE_PATH": "fake_path", + "GYP_MSVS_VERSION": config.substs["MSVS_VERSION"], + } + ) + + params = { + "parallel": False, + "generator_flags": {}, + "build_files": [path], + "root_targets": None, + } + # The NSS gyp configuration uses CC and CFLAGS to determine the + # floating-point ABI on arm. + os.environ.update( + CC=config.substs["CC"], + CFLAGS=shellutil.quote(*config.substs["CC_BASE_FLAGS"]), + ) + + if gyp_dir_attrs.no_chromium: + includes = [] + depth = mozpath.dirname(path) + else: + depth = chrome_src + # Files that gyp_chromium always includes + includes = [mozpath.join(script_dir, "gyp_includes", "common.gypi")] + finder = FileFinder(chrome_src) + includes.extend( + mozpath.join(chrome_src, name) + for name, _ in finder.find("*/supplement.gypi") + ) + + str_vars = dict(gyp_dir_attrs.variables) + str_vars["python"] = sys.executable + self._gyp_loader_future = executor.submit( + load_gyp, [path], "mozbuild", str_vars, includes, depth, params + ) + + @property + def results(self): + if self._results: + for res in self._results: + yield res + else: + # We report our execution time as the time spent blocked in a call + # to `result`, which is the only case a gyp processor will + # contribute significantly to total wall time. + t0 = time.monotonic() + flat_list, targets, data = self._gyp_loader_future.result() + self.execution_time += time.monotonic() - t0 + results = [] + for res in process_gyp_result( + (flat_list, targets, data), + self._gyp_dir_attrs, + self._path, + self._config, + self._output, + self._non_unified_sources, + self._action_overrides, + ): + results.append(res) + yield res + self._results = results diff --git a/python/mozbuild/mozbuild/frontend/mach_commands.py b/python/mozbuild/mozbuild/frontend/mach_commands.py new file mode 100644 index 0000000000..6d379977df --- /dev/null +++ b/python/mozbuild/mozbuild/frontend/mach_commands.py @@ -0,0 +1,338 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import json +import os +import sys +from collections import defaultdict + +import mozpack.path as mozpath +from mach.decorators import Command, CommandArgument, SubCommand + +TOPSRCDIR = os.path.abspath(os.path.join(__file__, "../../../../../")) + + +class InvalidPathException(Exception): + """Represents an error due to an invalid path.""" + + +@Command( + "mozbuild-reference", + category="build-dev", + description="View reference documentation on mozbuild files.", + virtualenv_name="docs", +) +@CommandArgument( + "symbol", + default=None, + nargs="*", + help="Symbol to view help on. If not specified, all will be shown.", +) +@CommandArgument( + "--name-only", + "-n", + default=False, + action="store_true", + help="Print symbol names only.", +) +def reference(command_context, symbol, name_only=False): + import mozbuild.frontend.context as m + from mozbuild.sphinx import ( + format_module, + function_reference, + special_reference, + variable_reference, + ) + + if name_only: + for s in sorted(m.VARIABLES.keys()): + print(s) + + for s in sorted(m.FUNCTIONS.keys()): + print(s) + + for s in sorted(m.SPECIAL_VARIABLES.keys()): + print(s) + + return 0 + + if len(symbol): + for s in symbol: + if s in m.VARIABLES: + for line in variable_reference(s, *m.VARIABLES[s]): + print(line) + continue + elif s in m.FUNCTIONS: + for line in function_reference(s, *m.FUNCTIONS[s]): + print(line) + continue + elif s in m.SPECIAL_VARIABLES: + for line in special_reference(s, *m.SPECIAL_VARIABLES[s]): + print(line) + continue + + print("Could not find symbol: %s" % s) + return 1 + + return 0 + + for line in format_module(m): + print(line) + + return 0 + + +@Command( + "file-info", category="build-dev", description="Query for metadata about files." +) +def file_info(command_context): + """Show files metadata derived from moz.build files. + + moz.build files contain "Files" sub-contexts for declaring metadata + against file patterns. This command suite is used to query that data. + """ + + +@SubCommand( + "file-info", + "bugzilla-component", + "Show Bugzilla component info for files listed.", +) +@CommandArgument("-r", "--rev", help="Version control revision to look up info from") +@CommandArgument( + "--format", + choices={"json", "plain"}, + default="plain", + help="Output format", + dest="fmt", +) +@CommandArgument("paths", nargs="+", help="Paths whose data to query") +def file_info_bugzilla(command_context, paths, rev=None, fmt=None): + """Show Bugzilla component for a set of files. + + Given a requested set of files (which can be specified using + wildcards), print the Bugzilla component for each file. + """ + components = defaultdict(set) + try: + for p, m in _get_files_info(command_context, paths, rev=rev).items(): + components[m.get("BUG_COMPONENT")].add(p) + except InvalidPathException as e: + print(e) + return 1 + + if fmt == "json": + data = {} + for component, files in components.items(): + if not component: + continue + for f in files: + data[f] = [component.product, component.component] + + json.dump(data, sys.stdout, sort_keys=True, indent=2) + return + elif fmt == "plain": + comp_to_file = sorted( + ( + "UNKNOWN" + if component is None + else "%s :: %s" % (component.product, component.component), + sorted(files), + ) + for component, files in components.items() + ) + for component, files in comp_to_file: + print(component) + for f in files: + print(" %s" % f) + else: + print("unhandled output format: %s" % fmt) + return 1 + + +@SubCommand( + "file-info", "missing-bugzilla", "Show files missing Bugzilla component info" +) +@CommandArgument("-r", "--rev", help="Version control revision to look up info from") +@CommandArgument( + "--format", + choices={"json", "plain"}, + dest="fmt", + default="plain", + help="Output format", +) +@CommandArgument("paths", nargs="+", help="Paths whose data to query") +def file_info_missing_bugzilla(command_context, paths, rev=None, fmt=None): + missing = set() + + try: + for p, m in _get_files_info(command_context, paths, rev=rev).items(): + if "BUG_COMPONENT" not in m: + missing.add(p) + except InvalidPathException as e: + print(e) + return 1 + + if fmt == "json": + json.dump({"missing": sorted(missing)}, sys.stdout, indent=2) + return + elif fmt == "plain": + for f in sorted(missing): + print(f) + else: + print("unhandled output format: %s" % fmt) + return 1 + + +@SubCommand( + "file-info", + "bugzilla-automation", + "Perform Bugzilla metadata analysis as required for automation", +) +@CommandArgument("out_dir", help="Where to write files") +def bugzilla_automation(command_context, out_dir): + """Analyze and validate Bugzilla metadata as required by automation. + + This will write out JSON and gzipped JSON files for Bugzilla metadata. + + The exit code will be non-0 if Bugzilla metadata fails validation. + """ + import gzip + + missing_component = set() + seen_components = set() + component_by_path = {} + + # TODO operate in VCS space. This requires teaching the VCS reader + # to understand wildcards and/or for the relative path issue in the + # VCS finder to be worked out. + for p, m in sorted(_get_files_info(command_context, ["**"]).items()): + if "BUG_COMPONENT" not in m: + missing_component.add(p) + print( + "FileToBugzillaMappingError: Missing Bugzilla component: " + "%s - Set the BUG_COMPONENT in the moz.build file to fix " + "the issue." % p + ) + continue + + c = m["BUG_COMPONENT"] + seen_components.add(c) + component_by_path[p] = [c.product, c.component] + + print("Examined %d files" % len(component_by_path)) + + # We also have a normalized versions of the file to components mapping + # that requires far less storage space by eliminating redundant strings. + indexed_components = { + i: [c.product, c.component] for i, c in enumerate(sorted(seen_components)) + } + components_index = {tuple(v): k for k, v in indexed_components.items()} + normalized_component = {"components": indexed_components, "paths": {}} + + for p, c in component_by_path.items(): + d = normalized_component["paths"] + while "/" in p: + base, p = p.split("/", 1) + d = d.setdefault(base, {}) + + d[p] = components_index[tuple(c)] + + if not os.path.exists(out_dir): + os.makedirs(out_dir) + + components_json = os.path.join(out_dir, "components.json") + print("Writing %s" % components_json) + with open(components_json, "w") as fh: + json.dump(component_by_path, fh, sort_keys=True, indent=2) + + missing_json = os.path.join(out_dir, "missing.json") + print("Writing %s" % missing_json) + with open(missing_json, "w") as fh: + json.dump({"missing": sorted(missing_component)}, fh, indent=2) + + indexed_components_json = os.path.join(out_dir, "components-normalized.json") + print("Writing %s" % indexed_components_json) + with open(indexed_components_json, "w") as fh: + # Don't indent so file is as small as possible. + json.dump(normalized_component, fh, sort_keys=True) + + # Write compressed versions of JSON files. + for p in (components_json, indexed_components_json, missing_json): + gzip_path = "%s.gz" % p + print("Writing %s" % gzip_path) + with open(p, "rb") as ifh, gzip.open(gzip_path, "wb") as ofh: + while True: + data = ifh.read(32768) + if not data: + break + ofh.write(data) + + # Causes CI task to fail if files are missing Bugzilla annotation. + if missing_component: + return 1 + + +def _get_files_info(command_context, paths, rev=None): + reader = command_context.mozbuild_reader(config_mode="empty", vcs_revision=rev) + + # Normalize to relative from topsrcdir. + relpaths = [] + for p in paths: + a = mozpath.abspath(p) + if not mozpath.basedir(a, [command_context.topsrcdir]): + raise InvalidPathException("path is outside topsrcdir: %s" % p) + + relpaths.append(mozpath.relpath(a, command_context.topsrcdir)) + + # Expand wildcards. + # One variable is for ordering. The other for membership tests. + # (Membership testing on a list can be slow.) + allpaths = [] + all_paths_set = set() + for p in relpaths: + if "*" not in p: + if p not in all_paths_set: + if not os.path.exists(mozpath.join(command_context.topsrcdir, p)): + print("(%s does not exist; ignoring)" % p, file=sys.stderr) + continue + + all_paths_set.add(p) + allpaths.append(p) + continue + + if rev: + raise InvalidPathException("cannot use wildcard in version control mode") + + # finder is rooted at / for now. + # TODO bug 1171069 tracks changing to relative. + search = mozpath.join(command_context.topsrcdir, p)[1:] + for path, f in reader.finder.find(search): + path = path[len(command_context.topsrcdir) :] + if path not in all_paths_set: + all_paths_set.add(path) + allpaths.append(path) + + return reader.files_info(allpaths) + + +@SubCommand( + "file-info", "schedules", "Show the combined SCHEDULES for the files listed." +) +@CommandArgument("paths", nargs="+", help="Paths whose data to query") +def file_info_schedules(command_context, paths): + """Show what is scheduled by the given files. + + Given a requested set of files (which can be specified using + wildcards), print the total set of scheduled components. + """ + from mozbuild.frontend.reader import BuildReader, EmptyConfig + + config = EmptyConfig(TOPSRCDIR) + reader = BuildReader(config) + schedules = set() + for p, m in reader.files_info(paths).items(): + schedules |= set(m["SCHEDULES"].components) + + print(", ".join(schedules)) diff --git a/python/mozbuild/mozbuild/frontend/reader.py b/python/mozbuild/mozbuild/frontend/reader.py new file mode 100644 index 0000000000..9d624b37ec --- /dev/null +++ b/python/mozbuild/mozbuild/frontend/reader.py @@ -0,0 +1,1432 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# This file contains code for reading metadata from the build system into +# data structures. + +r"""Read build frontend files into data structures. + +In terms of code architecture, the main interface is BuildReader. BuildReader +starts with a root mozbuild file. It creates a new execution environment for +this file, which is represented by the Sandbox class. The Sandbox class is used +to fill a Context, representing the output of an individual mozbuild file. The + +The BuildReader contains basic logic for traversing a tree of mozbuild files. +It does this by examining specific variables populated during execution. +""" + +import ast +import inspect +import logging +import os +import sys +import textwrap +import time +import traceback +import types +from collections import OrderedDict, defaultdict +from concurrent.futures.process import ProcessPoolExecutor +from io import StringIO +from itertools import chain +from multiprocessing import cpu_count + +import mozpack.path as mozpath +import six +from mozpack.files import FileFinder +from six import string_types + +from mozbuild.backend.configenvironment import ConfigEnvironment +from mozbuild.base import ExecutionSummary +from mozbuild.util import ( + EmptyValue, + HierarchicalStringList, + ReadOnlyDefaultDict, + memoize, +) + +from .context import ( + DEPRECATION_HINTS, + FUNCTIONS, + SPECIAL_VARIABLES, + SUBCONTEXTS, + VARIABLES, + Context, + ContextDerivedValue, + Files, + SourcePath, + SubContext, + TemplateContext, +) +from .sandbox import ( + Sandbox, + SandboxError, + SandboxExecutionError, + SandboxLoadError, + default_finder, +) + +if six.PY2: + type_type = types.TypeType +else: + type_type = type + + +def log(logger, level, action, params, formatter): + logger.log(level, formatter, extra={"action": action, "params": params}) + + +class EmptyConfig(object): + """A config object that is empty. + + This config object is suitable for using with a BuildReader on a vanilla + checkout, without any existing configuration. The config is simply + bootstrapped from a top source directory path. + """ + + class PopulateOnGetDict(ReadOnlyDefaultDict): + """A variation on ReadOnlyDefaultDict that populates during .get(). + + This variation is needed because CONFIG uses .get() to access members. + Without it, None (instead of our EmptyValue types) would be returned. + """ + + def get(self, key, default=None): + return self[key] + + default_substs = { + # These 2 variables are used semi-frequently and it isn't worth + # changing all the instances. + "MOZ_APP_NAME": "empty", + "MOZ_CHILD_PROCESS_NAME": "empty", + # Needed to prevent js/src's config.status from loading. + "JS_STANDALONE": "1", + } + + def __init__(self, topsrcdir, substs=None): + self.topsrcdir = topsrcdir + self.topobjdir = "" + + self.substs = self.PopulateOnGetDict(EmptyValue, substs or self.default_substs) + self.defines = self.substs + self.error_is_fatal = False + + +def is_read_allowed(path, config): + """Whether we are allowed to load a mozbuild file at the specified path. + + This is used as cheap security to ensure the build is isolated to known + source directories. + + We are allowed to read from the main source directory and any defined + external source directories. The latter is to allow 3rd party applications + to hook into our build system. + """ + assert os.path.isabs(path) + assert os.path.isabs(config.topsrcdir) + + path = mozpath.normpath(path) + topsrcdir = mozpath.normpath(config.topsrcdir) + + if mozpath.basedir(path, [topsrcdir]): + return True + + return False + + +class SandboxCalledError(SandboxError): + """Represents an error resulting from calling the error() function.""" + + def __init__(self, file_stack, message): + SandboxError.__init__(self, file_stack) + self.message = message + + +class MozbuildSandbox(Sandbox): + """Implementation of a Sandbox tailored for mozbuild files. + + We expose a few useful functions and expose the set of variables defining + Mozilla's build system. + + context is a Context instance. + + metadata is a dict of metadata that can be used during the sandbox + evaluation. + """ + + def __init__(self, context, metadata={}, finder=default_finder): + assert isinstance(context, Context) + + Sandbox.__init__(self, context, finder=finder) + + self._log = logging.getLogger(__name__) + + self.metadata = dict(metadata) + exports = self.metadata.get("exports", {}) + self.exports = set(exports.keys()) + context.update(exports) + self.templates = self.metadata.setdefault("templates", {}) + self.special_variables = self.metadata.setdefault( + "special_variables", SPECIAL_VARIABLES + ) + self.functions = self.metadata.setdefault("functions", FUNCTIONS) + self.subcontext_types = self.metadata.setdefault("subcontexts", SUBCONTEXTS) + + def __getitem__(self, key): + if key in self.special_variables: + return self.special_variables[key][0](self._context) + if key in self.functions: + return self._create_function(self.functions[key]) + if key in self.subcontext_types: + return self._create_subcontext(self.subcontext_types[key]) + if key in self.templates: + return self._create_template_wrapper(self.templates[key]) + return Sandbox.__getitem__(self, key) + + def __contains__(self, key): + if any( + key in d + for d in ( + self.special_variables, + self.functions, + self.subcontext_types, + self.templates, + ) + ): + return True + + return Sandbox.__contains__(self, key) + + def __setitem__(self, key, value): + if key in self.special_variables and value is self[key]: + return + if ( + key in self.special_variables + or key in self.functions + or key in self.subcontext_types + ): + raise KeyError('Cannot set "%s" because it is a reserved keyword' % key) + if key in self.exports: + self._context[key] = value + self.exports.remove(key) + return + Sandbox.__setitem__(self, key, value) + + def exec_file(self, path): + """Override exec_file to normalize paths and restrict file loading. + + Paths will be rejected if they do not fall under topsrcdir or one of + the external roots. + """ + + # realpath() is needed for true security. But, this isn't for security + # protection, so it is omitted. + if not is_read_allowed(path, self._context.config): + raise SandboxLoadError( + self._context.source_stack, sys.exc_info()[2], illegal_path=path + ) + + Sandbox.exec_file(self, path) + + def _export(self, varname): + """Export the variable to all subdirectories of the current path.""" + + exports = self.metadata.setdefault("exports", dict()) + if varname in exports: + raise Exception("Variable has already been exported: %s" % varname) + + try: + # Doing a regular self._context[varname] causes a set as a side + # effect. By calling the dict method instead, we don't have any + # side effects. + exports[varname] = dict.__getitem__(self._context, varname) + except KeyError: + self.last_name_error = KeyError("global_ns", "get_unknown", varname) + raise self.last_name_error + + def recompute_exports(self): + """Recompute the variables to export to subdirectories with the current + values in the subdirectory.""" + + if "exports" in self.metadata: + for key in self.metadata["exports"]: + self.metadata["exports"][key] = self[key] + + def _include(self, path): + """Include and exec another file within the context of this one.""" + + # path is a SourcePath + self.exec_file(path.full_path) + + def _warning(self, message): + # FUTURE consider capturing warnings in a variable instead of printing. + print("WARNING: %s" % message, file=sys.stderr) + + def _error(self, message): + if self._context.error_is_fatal: + raise SandboxCalledError(self._context.source_stack, message) + else: + self._warning(message) + + def _template_decorator(self, func): + """Registers a template function.""" + + if not inspect.isfunction(func): + raise Exception( + "`template` is a function decorator. You must " + "use it as `@template` preceding a function declaration." + ) + + name = func.__name__ + + if name in self.templates: + raise KeyError( + 'A template named "%s" was already declared in %s.' + % (name, self.templates[name].path) + ) + + if name.islower() or name.isupper() or name[0].islower(): + raise NameError("Template function names must be CamelCase.") + + self.templates[name] = TemplateFunction(func, self) + + @memoize + def _create_subcontext(self, cls): + """Return a function object that creates SubContext instances.""" + + def fn(*args, **kwargs): + return cls(self._context, *args, **kwargs) + + return fn + + @memoize + def _create_function(self, function_def): + """Returns a function object for use within the sandbox for the given + function definition. + + The wrapper function does type coercion on the function arguments + """ + func, args_def, doc = function_def + + def function(*args): + def coerce(arg, type): + if not isinstance(arg, type): + if issubclass(type, ContextDerivedValue): + arg = type(self._context, arg) + else: + arg = type(arg) + return arg + + args = [coerce(arg, type) for arg, type in zip(args, args_def)] + return func(self)(*args) + + return function + + @memoize + def _create_template_wrapper(self, template): + """Returns a function object for use within the sandbox for the given + TemplateFunction instance.. + + When a moz.build file contains a reference to a template call, the + sandbox needs a function to execute. This is what this method returns. + That function creates a new sandbox for execution of the template. + After the template is executed, the data from its execution is merged + with the context of the calling sandbox. + """ + + def template_wrapper(*args, **kwargs): + context = TemplateContext( + template=template.name, + allowed_variables=self._context._allowed_variables, + config=self._context.config, + ) + context.add_source(self._context.current_path) + for p in self._context.all_paths: + context.add_source(p) + + sandbox = MozbuildSandbox( + context, + metadata={ + # We should arguably set these defaults to something else. + # Templates, for example, should arguably come from the state + # of the sandbox from when the template was declared, not when + # it was instantiated. Bug 1137319. + "functions": self.metadata.get("functions", {}), + "special_variables": self.metadata.get("special_variables", {}), + "subcontexts": self.metadata.get("subcontexts", {}), + "templates": self.metadata.get("templates", {}), + }, + finder=self._finder, + ) + + template.exec_in_sandbox(sandbox, *args, **kwargs) + + # This is gross, but allows the merge to happen. Eventually, the + # merging will go away and template contexts emitted independently. + klass = self._context.__class__ + self._context.__class__ = TemplateContext + # The sandbox will do all the necessary checks for these merges. + for key, value in context.items(): + if isinstance(value, dict): + self[key].update(value) + elif isinstance(value, (list, HierarchicalStringList)): + self[key] += value + else: + self[key] = value + self._context.__class__ = klass + + for p in context.all_paths: + self._context.add_source(p) + + return template_wrapper + + +class TemplateFunction(object): + def __init__(self, func, sandbox): + self.path = func.__code__.co_filename + self.name = func.__name__ + + code = func.__code__ + firstlineno = code.co_firstlineno + lines = sandbox._current_source.splitlines(True) + if lines: + # Older versions of python 2.7 had a buggy inspect.getblock() that + # would ignore the last line if it didn't terminate with a newline. + if not lines[-1].endswith("\n"): + lines[-1] += "\n" + lines = inspect.getblock(lines[firstlineno - 1 :]) + + # The code lines we get out of inspect.getsourcelines look like + # @template + # def Template(*args, **kwargs): + # VAR = 'value' + # ... + func_ast = ast.parse("".join(lines), self.path) + # Remove decorators + func_ast.body[0].decorator_list = [] + # Adjust line numbers accordingly + ast.increment_lineno(func_ast, firstlineno - 1) + + # When using a custom dictionary for function globals/locals, Cpython + # actually never calls __getitem__ and __setitem__, so we need to + # modify the AST so that accesses to globals are properly directed + # to a dict. AST wants binary_type for this in Py2 and text_type for + # this in Py3, so cast to str. + self._global_name = str("_data") + # In case '_data' is a name used for a variable in the function code, + # prepend more underscores until we find an unused name. + while ( + self._global_name in code.co_names or self._global_name in code.co_varnames + ): + self._global_name += str("_") + func_ast = self.RewriteName(sandbox, self._global_name).visit(func_ast) + + # Execute the rewritten code. That code now looks like: + # def Template(*args, **kwargs): + # _data['VAR'] = 'value' + # ... + # The result of executing this code is the creation of a 'Template' + # function object in the global namespace. + glob = {"__builtins__": sandbox._builtins} + func = types.FunctionType( + compile(func_ast, self.path, "exec"), + glob, + self.name, + func.__defaults__, + func.__closure__, + ) + func() + + self._func = glob[self.name] + + def exec_in_sandbox(self, sandbox, *args, **kwargs): + """Executes the template function in the given sandbox.""" + # Create a new function object associated with the execution sandbox + glob = {self._global_name: sandbox, "__builtins__": sandbox._builtins} + func = types.FunctionType( + self._func.__code__, + glob, + self.name, + self._func.__defaults__, + self._func.__closure__, + ) + sandbox.exec_function(func, args, kwargs, self.path, becomes_current_path=False) + + class RewriteName(ast.NodeTransformer): + """AST Node Transformer to rewrite variable accesses to go through + a dict. + """ + + def __init__(self, sandbox, global_name): + self._sandbox = sandbox + self._global_name = global_name + + def visit_Str(self, node): + node.s = six.ensure_text(node.s) + return node + + def visit_Name(self, node): + # Modify uppercase variable references and names known to the + # sandbox as if they were retrieved from a dict instead. + if not node.id.isupper() and node.id not in self._sandbox: + return node + + def c(new_node): + return ast.copy_location(new_node, node) + + return c( + ast.Subscript( + value=c(ast.Name(id=self._global_name, ctx=ast.Load())), + slice=c(ast.Index(value=c(ast.Str(s=node.id)))), + ctx=node.ctx, + ) + ) + + +class SandboxValidationError(Exception): + """Represents an error encountered when validating sandbox results.""" + + def __init__(self, message, context): + Exception.__init__(self, message) + self.context = context + + def __str__(self): + s = StringIO() + + delim = "=" * 30 + s.write("\n%s\nFATAL ERROR PROCESSING MOZBUILD FILE\n%s\n\n" % (delim, delim)) + + s.write("The error occurred while processing the following file or ") + s.write("one of the files it includes:\n") + s.write("\n") + s.write(" %s/moz.build\n" % self.context.srcdir) + s.write("\n") + + s.write("The error occurred when validating the result of ") + s.write("the execution. The reported error is:\n") + s.write("\n") + s.write( + "".join( + " %s\n" % l + for l in super(SandboxValidationError, self).__str__().splitlines() + ) + ) + s.write("\n") + + return s.getvalue() + + +class BuildReaderError(Exception): + """Represents errors encountered during BuildReader execution. + + The main purpose of this class is to facilitate user-actionable error + messages. Execution errors should say: + + - Why they failed + - Where they failed + - What can be done to prevent the error + + A lot of the code in this class should arguably be inside sandbox.py. + However, extraction is somewhat difficult given the additions + MozbuildSandbox has over Sandbox (e.g. the concept of included files - + which affect error messages, of course). + """ + + def __init__( + self, + file_stack, + trace, + sandbox_exec_error=None, + sandbox_load_error=None, + validation_error=None, + other_error=None, + sandbox_called_error=None, + ): + + self.file_stack = file_stack + self.trace = trace + self.sandbox_called_error = sandbox_called_error + self.sandbox_exec = sandbox_exec_error + self.sandbox_load = sandbox_load_error + self.validation_error = validation_error + self.other = other_error + + @property + def main_file(self): + return self.file_stack[-1] + + @property + def actual_file(self): + # We report the file that called out to the file that couldn't load. + if self.sandbox_load is not None: + if len(self.sandbox_load.file_stack) > 1: + return self.sandbox_load.file_stack[-2] + + if len(self.file_stack) > 1: + return self.file_stack[-2] + + if self.sandbox_error is not None and len(self.sandbox_error.file_stack): + return self.sandbox_error.file_stack[-1] + + return self.file_stack[-1] + + @property + def sandbox_error(self): + return self.sandbox_exec or self.sandbox_load or self.sandbox_called_error + + def __str__(self): + s = StringIO() + + delim = "=" * 30 + s.write("\n%s\nFATAL ERROR PROCESSING MOZBUILD FILE\n%s\n\n" % (delim, delim)) + + s.write("The error occurred while processing the following file:\n") + s.write("\n") + s.write(" %s\n" % self.actual_file) + s.write("\n") + + if self.actual_file != self.main_file and not self.sandbox_load: + s.write("This file was included as part of processing:\n") + s.write("\n") + s.write(" %s\n" % self.main_file) + s.write("\n") + + if self.sandbox_error is not None: + self._print_sandbox_error(s) + elif self.validation_error is not None: + s.write("The error occurred when validating the result of ") + s.write("the execution. The reported error is:\n") + s.write("\n") + s.write( + "".join( + " %s\n" % l + for l in six.text_type(self.validation_error).splitlines() + ) + ) + s.write("\n") + else: + s.write("The error appears to be part of the %s " % __name__) + s.write("Python module itself! It is possible you have stumbled ") + s.write("across a legitimate bug.\n") + s.write("\n") + + for l in traceback.format_exception( + type(self.other), self.other, self.trace + ): + s.write(six.ensure_text(l)) + + return s.getvalue() + + def _print_sandbox_error(self, s): + # Try to find the frame of the executed code. + script_frame = None + + # We don't currently capture the trace for SandboxCalledError. + # Therefore, we don't get line numbers from the moz.build file. + # FUTURE capture this. + trace = getattr(self.sandbox_error, "trace", None) + frames = [] + if trace: + frames = traceback.extract_tb(trace) + for frame in frames: + if frame[0] == self.actual_file: + script_frame = frame + + # Reset if we enter a new execution context. This prevents errors + # in this module from being attributes to a script. + elif frame[0] == __file__ and frame[2] == "exec_function": + script_frame = None + + if script_frame is not None: + s.write("The error was triggered on line %d " % script_frame[1]) + s.write("of this file:\n") + s.write("\n") + s.write(" %s\n" % script_frame[3]) + s.write("\n") + + if self.sandbox_called_error is not None: + self._print_sandbox_called_error(s) + return + + if self.sandbox_load is not None: + self._print_sandbox_load_error(s) + return + + self._print_sandbox_exec_error(s) + + def _print_sandbox_called_error(self, s): + assert self.sandbox_called_error is not None + + s.write("A moz.build file called the error() function.\n") + s.write("\n") + s.write("The error it encountered is:\n") + s.write("\n") + s.write(" %s\n" % self.sandbox_called_error.message) + s.write("\n") + s.write("Correct the error condition and try again.\n") + + def _print_sandbox_load_error(self, s): + assert self.sandbox_load is not None + + if self.sandbox_load.illegal_path is not None: + s.write("The underlying problem is an illegal file access. ") + s.write("This is likely due to trying to access a file ") + s.write("outside of the top source directory.\n") + s.write("\n") + s.write("The path whose access was denied is:\n") + s.write("\n") + s.write(" %s\n" % self.sandbox_load.illegal_path) + s.write("\n") + s.write("Modify the script to not access this file and ") + s.write("try again.\n") + return + + if self.sandbox_load.read_error is not None: + if not os.path.exists(self.sandbox_load.read_error): + s.write("The underlying problem is we referenced a path ") + s.write("that does not exist. That path is:\n") + s.write("\n") + s.write(" %s\n" % self.sandbox_load.read_error) + s.write("\n") + s.write("Either create the file if it needs to exist or ") + s.write("do not reference it.\n") + else: + s.write("The underlying problem is a referenced path could ") + s.write("not be read. The trouble path is:\n") + s.write("\n") + s.write(" %s\n" % self.sandbox_load.read_error) + s.write("\n") + s.write("It is possible the path is not correct. Is it ") + s.write("pointing to a directory? It could also be a file ") + s.write("permissions issue. Ensure that the file is ") + s.write("readable.\n") + + return + + # This module is buggy if you see this. + raise AssertionError("SandboxLoadError with unhandled properties!") + + def _print_sandbox_exec_error(self, s): + assert self.sandbox_exec is not None + + inner = self.sandbox_exec.exc_value + + if isinstance(inner, SyntaxError): + s.write("The underlying problem is a Python syntax error ") + s.write("on line %d:\n" % inner.lineno) + s.write("\n") + s.write(" %s\n" % inner.text) + if inner.offset: + s.write((" " * (inner.offset + 4)) + "^\n") + s.write("\n") + s.write("Fix the syntax error and try again.\n") + return + + if isinstance(inner, KeyError): + self._print_keyerror(inner, s) + elif isinstance(inner, ValueError): + self._print_valueerror(inner, s) + else: + self._print_exception(inner, s) + + def _print_keyerror(self, inner, s): + if not inner.args or inner.args[0] not in ("global_ns", "local_ns"): + self._print_exception(inner, s) + return + + if inner.args[0] == "global_ns": + import difflib + + verb = None + if inner.args[1] == "get_unknown": + verb = "read" + elif inner.args[1] == "set_unknown": + verb = "write" + elif inner.args[1] == "reassign": + s.write("The underlying problem is an attempt to reassign ") + s.write("a reserved UPPERCASE variable.\n") + s.write("\n") + s.write("The reassigned variable causing the error is:\n") + s.write("\n") + s.write(" %s\n" % inner.args[2]) + s.write("\n") + s.write('Maybe you meant "+=" instead of "="?\n') + return + else: + raise AssertionError("Unhandled global_ns: %s" % inner.args[1]) + + s.write("The underlying problem is an attempt to %s " % verb) + s.write("a reserved UPPERCASE variable that does not exist.\n") + s.write("\n") + s.write("The variable %s causing the error is:\n" % verb) + s.write("\n") + s.write(" %s\n" % inner.args[2]) + s.write("\n") + close_matches = difflib.get_close_matches( + inner.args[2], VARIABLES.keys(), 2 + ) + if close_matches: + s.write("Maybe you meant %s?\n" % " or ".join(close_matches)) + s.write("\n") + + if inner.args[2] in DEPRECATION_HINTS: + s.write( + "%s\n" % textwrap.dedent(DEPRECATION_HINTS[inner.args[2]]).strip() + ) + return + + s.write("Please change the file to not use this variable.\n") + s.write("\n") + s.write("For reference, the set of valid variables is:\n") + s.write("\n") + s.write(", ".join(sorted(VARIABLES.keys())) + "\n") + return + + s.write("The underlying problem is a reference to an undefined ") + s.write("local variable:\n") + s.write("\n") + s.write(" %s\n" % inner.args[2]) + s.write("\n") + s.write("Please change the file to not reference undefined ") + s.write("variables and try again.\n") + + def _print_valueerror(self, inner, s): + if not inner.args or inner.args[0] not in ("global_ns", "local_ns"): + self._print_exception(inner, s) + return + + assert inner.args[1] == "set_type" + + s.write("The underlying problem is an attempt to write an illegal ") + s.write("value to a special variable.\n") + s.write("\n") + s.write("The variable whose value was rejected is:\n") + s.write("\n") + s.write(" %s" % inner.args[2]) + s.write("\n") + s.write("The value being written to it was of the following type:\n") + s.write("\n") + s.write(" %s\n" % type(inner.args[3]).__name__) + s.write("\n") + s.write("This variable expects the following type(s):\n") + s.write("\n") + if type(inner.args[4]) == type_type: + s.write(" %s\n" % inner.args[4].__name__) + else: + for t in inner.args[4]: + s.write(" %s\n" % t.__name__) + s.write("\n") + s.write("Change the file to write a value of the appropriate type ") + s.write("and try again.\n") + + def _print_exception(self, e, s): + s.write("An error was encountered as part of executing the file ") + s.write("itself. The error appears to be the fault of the script.\n") + s.write("\n") + s.write("The error as reported by Python is:\n") + s.write("\n") + s.write(" %s\n" % traceback.format_exception_only(type(e), e)) + + +class BuildReader(object): + """Read a tree of mozbuild files into data structures. + + This is where the build system starts. You give it a tree configuration + (the output of configuration) and it executes the moz.build files and + collects the data they define. + + The reader can optionally call a callable after each sandbox is evaluated + but before its evaluated content is processed. This gives callers the + opportunity to modify contexts before side-effects occur from their + content. This callback receives the ``Context`` containing the result of + each sandbox evaluation. Its return value is ignored. + """ + + def __init__(self, config, finder=default_finder): + self.config = config + + self._log = logging.getLogger(__name__) + self._read_files = set() + self._execution_stack = [] + self.finder = finder + + # Finder patterns to ignore when searching for moz.build files. + ignores = { + # Ignore fake moz.build files used for testing moz.build. + "python/mozbuild/mozbuild/test", + "testing/mozbase/moztest/tests/data", + # Ignore object directories. + "obj*", + } + + self._relevant_mozbuild_finder = FileFinder( + self.config.topsrcdir, ignore=ignores + ) + + # Also ignore any other directories that could be objdirs, they don't + # necessarily start with the string 'obj'. + for path, f in self._relevant_mozbuild_finder.find("*/config.status"): + self._relevant_mozbuild_finder.ignore.add(os.path.dirname(path)) + + max_workers = cpu_count() + if sys.platform.startswith("win"): + # In python 3, on Windows, ProcessPoolExecutor uses + # _winapi.WaitForMultipleObjects, which doesn't work on large + # number of objects. It also has some automatic capping to avoid + # _winapi.WaitForMultipleObjects being unhappy as a consequence, + # but that capping is actually insufficient in python 3.7 and 3.8 + # (as well as inexistent in older versions). So we cap ourselves + # to 60, see https://bugs.python.org/issue26903#msg365886. + max_workers = min(max_workers, 60) + self._gyp_worker_pool = ProcessPoolExecutor(max_workers=max_workers) + self._gyp_processors = [] + self._execution_time = 0.0 + self._file_count = 0 + self._gyp_execution_time = 0.0 + self._gyp_file_count = 0 + + def summary(self): + return ExecutionSummary( + "Finished reading {file_count:d} moz.build files in " + "{execution_time:.2f}s", + file_count=self._file_count, + execution_time=self._execution_time, + ) + + def gyp_summary(self): + return ExecutionSummary( + "Read {file_count:d} gyp files in parallel contributing " + "{execution_time:.2f}s to total wall time", + file_count=self._gyp_file_count, + execution_time=self._gyp_execution_time, + ) + + def read_topsrcdir(self): + """Read the tree of linked moz.build files. + + This starts with the tree's top-most moz.build file and descends into + all linked moz.build files until all relevant files have been evaluated. + + This is a generator of Context instances. As each moz.build file is + read, a new Context is created and emitted. + """ + path = mozpath.join(self.config.topsrcdir, "moz.build") + for r in self.read_mozbuild(path, self.config): + yield r + all_gyp_paths = set() + for g in self._gyp_processors: + for gyp_context in g.results: + all_gyp_paths |= gyp_context.all_paths + yield gyp_context + self._gyp_execution_time += g.execution_time + self._gyp_file_count += len(all_gyp_paths) + self._gyp_worker_pool.shutdown() + + def all_mozbuild_paths(self): + """Iterator over all available moz.build files. + + This method has little to do with the reader. It should arguably belong + elsewhere. + """ + # In the future, we may traverse moz.build files by looking + # for DIRS references in the AST, even if a directory is added behind + # a conditional. For now, just walk the filesystem. + for path, f in self._relevant_mozbuild_finder.find("**/moz.build"): + yield path + + def find_variables_from_ast(self, variables, path=None): + """Finds all assignments to the specified variables by parsing + moz.build abstract syntax trees. + + This function only supports two cases, as detailed below. + + 1) A dict. Keys and values should both be strings, e.g: + + VARIABLE['foo'] = 'bar' + + This is an `Assign` node with a `Subscript` target. The `Subscript`'s + value is a `Name` node with id "VARIABLE". The slice of this target is + an `Index` node and its value is a `Str` with value "foo". + + 2) A simple list. Values should be strings, e.g: The target of the + assignment should be a Name node. Values should be a List node, + whose elements are Str nodes. e.g: + + VARIABLE += ['foo'] + + This is an `AugAssign` node with a `Name` target with id "VARIABLE". + The value is a `List` node containing one `Str` element whose value is + "foo". + + With a little work, this function could support other types of + assignment. But if we end up writing a lot of AST code, it might be + best to import a high-level AST manipulation library into the tree. + + Args: + variables (list): A list of variable assignments to capture. + path (str): A path relative to the source dir. If specified, only + `moz.build` files relevant to this path will be parsed. Otherwise + all `moz.build` files are parsed. + + Returns: + A generator that generates tuples of the form `(, + , , )`. The `key` will only be + defined if the variable is an object, otherwise it is `None`. + """ + + if isinstance(variables, string_types): + variables = [variables] + + def assigned_variable(node): + # This is not correct, but we don't care yet. + if hasattr(node, "targets"): + # Nothing in moz.build does multi-assignment (yet). So error if + # we see it. + assert len(node.targets) == 1 + + target = node.targets[0] + else: + target = node.target + + if isinstance(target, ast.Subscript): + if not isinstance(target.value, ast.Name): + return None, None + name = target.value.id + elif isinstance(target, ast.Name): + name = target.id + else: + return None, None + + if name not in variables: + return None, None + + key = None + if isinstance(target, ast.Subscript): + # We need to branch to deal with python version differences. + if isinstance(target.slice, ast.Constant): + # Python >= 3.9 + assert isinstance(target.slice.value, str) + key = target.slice.value + else: + # Others + assert isinstance(target.slice, ast.Index) + assert isinstance(target.slice.value, ast.Str) + key = target.slice.value.s + + return name, key + + def assigned_values(node): + value = node.value + if isinstance(value, ast.List): + for v in value.elts: + assert isinstance(v, ast.Str) + yield v.s + else: + assert isinstance(value, ast.Str) + yield value.s + + assignments = [] + + class Visitor(ast.NodeVisitor): + def helper(self, node): + name, key = assigned_variable(node) + if not name: + return + + for v in assigned_values(node): + assignments.append((name, key, v)) + + def visit_Assign(self, node): + self.helper(node) + + def visit_AugAssign(self, node): + self.helper(node) + + if path: + mozbuild_paths = chain(*self._find_relevant_mozbuilds([path]).values()) + else: + mozbuild_paths = self.all_mozbuild_paths() + + for p in mozbuild_paths: + assignments[:] = [] + full = os.path.join(self.config.topsrcdir, p) + + with open(full, "rb") as fh: + source = fh.read() + + tree = ast.parse(source, full) + Visitor().visit(tree) + + for name, key, value in assignments: + yield p, name, key, value + + def read_mozbuild(self, path, config, descend=True, metadata={}): + """Read and process a mozbuild file, descending into children. + + This starts with a single mozbuild file, executes it, and descends into + other referenced files per our traversal logic. + + The traversal logic is to iterate over the ``*DIRS`` variables, treating + each element as a relative directory path. For each encountered + directory, we will open the moz.build file located in that + directory in a new Sandbox and process it. + + If descend is True (the default), we will descend into child + directories and files per variable values. + + Arbitrary metadata in the form of a dict can be passed into this + function. This feature is intended to facilitate the build reader + injecting state and annotations into moz.build files that is + independent of the sandbox's execution context. + + Traversal is performed depth first (for no particular reason). + """ + self._execution_stack.append(path) + try: + for s in self._read_mozbuild( + path, config, descend=descend, metadata=metadata + ): + yield s + + except BuildReaderError as bre: + raise bre + + except SandboxCalledError as sce: + raise BuildReaderError( + list(self._execution_stack), sys.exc_info()[2], sandbox_called_error=sce + ) + + except SandboxExecutionError as se: + raise BuildReaderError( + list(self._execution_stack), sys.exc_info()[2], sandbox_exec_error=se + ) + + except SandboxLoadError as sle: + raise BuildReaderError( + list(self._execution_stack), sys.exc_info()[2], sandbox_load_error=sle + ) + + except SandboxValidationError as ve: + raise BuildReaderError( + list(self._execution_stack), sys.exc_info()[2], validation_error=ve + ) + + except Exception as e: + raise BuildReaderError( + list(self._execution_stack), sys.exc_info()[2], other_error=e + ) + + def _read_mozbuild(self, path, config, descend, metadata): + path = mozpath.normpath(path) + log( + self._log, + logging.DEBUG, + "read_mozbuild", + {"path": path}, + "Reading file: {path}".format(path=path), + ) + + if path in self._read_files: + log( + self._log, + logging.WARNING, + "read_already", + {"path": path}, + "File already read. Skipping: {path}".format(path=path), + ) + return + + self._read_files.add(path) + + time_start = time.monotonic() + + topobjdir = config.topobjdir + + relpath = mozpath.relpath(path, config.topsrcdir) + reldir = mozpath.dirname(relpath) + + if mozpath.dirname(relpath) == "js/src" and not config.substs.get( + "JS_STANDALONE" + ): + config = ConfigEnvironment.from_config_status( + mozpath.join(topobjdir, reldir, "config.status") + ) + config.topobjdir = topobjdir + + context = Context(VARIABLES, config, self.finder) + sandbox = MozbuildSandbox(context, metadata=metadata, finder=self.finder) + sandbox.exec_file(path) + self._execution_time += time.monotonic() - time_start + self._file_count += len(context.all_paths) + + # Yield main context before doing any processing. This gives immediate + # consumers an opportunity to change state before our remaining + # processing is performed. + yield context + + # We need the list of directories pre-gyp processing for later. + dirs = list(context.get("DIRS", [])) + + curdir = mozpath.dirname(path) + + for target_dir in context.get("GYP_DIRS", []): + gyp_dir = context["GYP_DIRS"][target_dir] + for v in ("input", "variables"): + if not getattr(gyp_dir, v): + raise SandboxValidationError( + "Missing value for " 'GYP_DIRS["%s"].%s' % (target_dir, v), + context, + ) + + # The make backend assumes contexts for sub-directories are + # emitted after their parent, so accumulate the gyp contexts. + # We could emit the parent context before processing gyp + # configuration, but we need to add the gyp objdirs to that context + # first. + from .gyp_reader import GypProcessor + + non_unified_sources = set() + for s in gyp_dir.non_unified_sources: + source = SourcePath(context, s) + if not self.finder.get(source.full_path): + raise SandboxValidationError("Cannot find %s." % source, context) + non_unified_sources.add(source) + action_overrides = {} + for action, script in six.iteritems(gyp_dir.action_overrides): + action_overrides[action] = SourcePath(context, script) + + gyp_processor = GypProcessor( + context.config, + gyp_dir, + mozpath.join(curdir, gyp_dir.input), + mozpath.join(context.objdir, target_dir), + self._gyp_worker_pool, + action_overrides, + non_unified_sources, + ) + self._gyp_processors.append(gyp_processor) + + for subcontext in sandbox.subcontexts: + yield subcontext + + # Traverse into referenced files. + + # It's very tempting to use a set here. Unfortunately, the recursive + # make backend needs order preserved. Once we autogenerate all backend + # files, we should be able to convert this to a set. + recurse_info = OrderedDict() + for d in dirs: + if d in recurse_info: + raise SandboxValidationError( + "Directory (%s) registered multiple times" + % (mozpath.relpath(d.full_path, context.srcdir)), + context, + ) + + recurse_info[d] = {} + for key in sandbox.metadata: + if key == "exports": + sandbox.recompute_exports() + + recurse_info[d][key] = dict(sandbox.metadata[key]) + + for path, child_metadata in recurse_info.items(): + child_path = path.join("moz.build").full_path + + # Ensure we don't break out of the topsrcdir. We don't do realpath + # because it isn't necessary. If there are symlinks in the srcdir, + # that's not our problem. We're not a hosted application: we don't + # need to worry about security too much. + if not is_read_allowed(child_path, context.config): + raise SandboxValidationError( + "Attempting to process file outside of allowed paths: %s" + % child_path, + context, + ) + + if not descend: + continue + + for res in self.read_mozbuild( + child_path, context.config, metadata=child_metadata + ): + yield res + + self._execution_stack.pop() + + def _find_relevant_mozbuilds(self, paths): + """Given a set of filesystem paths, find all relevant moz.build files. + + We assume that a moz.build file in the directory ancestry of a given path + is relevant to that path. Let's say we have the following files on disk:: + + moz.build + foo/moz.build + foo/baz/moz.build + foo/baz/file1 + other/moz.build + other/file2 + + If ``foo/baz/file1`` is passed in, the relevant moz.build files are + ``moz.build``, ``foo/moz.build``, and ``foo/baz/moz.build``. For + ``other/file2``, the relevant moz.build files are ``moz.build`` and + ``other/moz.build``. + + Returns a dict of input paths to a list of relevant moz.build files. + The root moz.build file is first and the leaf-most moz.build is last. + """ + root = self.config.topsrcdir + result = {} + + @memoize + def exists(path): + return self._relevant_mozbuild_finder.get(path) is not None + + def itermozbuild(path): + subpath = "" + yield "moz.build" + for part in mozpath.split(path): + subpath = mozpath.join(subpath, part) + yield mozpath.join(subpath, "moz.build") + + for path in sorted(paths): + path = mozpath.normpath(path) + if os.path.isabs(path): + if not mozpath.basedir(path, [root]): + raise Exception("Path outside topsrcdir: %s" % path) + path = mozpath.relpath(path, root) + + result[path] = [p for p in itermozbuild(path) if exists(p)] + + return result + + def read_relevant_mozbuilds(self, paths): + """Read and process moz.build files relevant for a set of paths. + + For an iterable of relative-to-root filesystem paths ``paths``, + find all moz.build files that may apply to them based on filesystem + hierarchy and read those moz.build files. + + The return value is a 2-tuple. The first item is a dict mapping each + input filesystem path to a list of Context instances that are relevant + to that path. The second item is a list of all Context instances. Each + Context instance is in both data structures. + """ + relevants = self._find_relevant_mozbuilds(paths) + + topsrcdir = self.config.topsrcdir + + # Source moz.build file to directories to traverse. + dirs = defaultdict(set) + # Relevant path to absolute paths of relevant contexts. + path_mozbuilds = {} + + # There is room to improve this code (and the code in + # _find_relevant_mozbuilds) to better handle multiple files in the same + # directory. Bug 1136966 tracks. + for path, mbpaths in relevants.items(): + path_mozbuilds[path] = [mozpath.join(topsrcdir, p) for p in mbpaths] + + for i, mbpath in enumerate(mbpaths[0:-1]): + source_dir = mozpath.dirname(mbpath) + target_dir = mozpath.dirname(mbpaths[i + 1]) + + d = mozpath.normpath(mozpath.join(topsrcdir, mbpath)) + dirs[d].add(mozpath.relpath(target_dir, source_dir)) + + # Exporting doesn't work reliably in tree traversal mode. Override + # the function to no-op. + functions = dict(FUNCTIONS) + + def export(sandbox): + return lambda varname: None + + functions["export"] = tuple([export] + list(FUNCTIONS["export"][1:])) + + metadata = { + "functions": functions, + } + + contexts = defaultdict(list) + all_contexts = [] + for context in self.read_mozbuild( + mozpath.join(topsrcdir, "moz.build"), self.config, metadata=metadata + ): + # Explicitly set directory traversal variables to override default + # traversal rules. + if not isinstance(context, SubContext): + for v in ("DIRS", "GYP_DIRS"): + context[v][:] = [] + + context["DIRS"] = sorted(dirs[context.main_path]) + + contexts[context.main_path].append(context) + all_contexts.append(context) + + result = {} + for path, paths in path_mozbuilds.items(): + result[path] = six.moves.reduce( + lambda x, y: x + y, (contexts[p] for p in paths), [] + ) + + return result, all_contexts + + def files_info(self, paths): + """Obtain aggregate data from Files for a set of files. + + Given a set of input paths, determine which moz.build files may + define metadata for them, evaluate those moz.build files, and + apply file metadata rules defined within to determine metadata + values for each file requested. + + Essentially, for each input path: + + 1. Determine the set of moz.build files relevant to that file by + looking for moz.build files in ancestor directories. + 2. Evaluate moz.build files starting with the most distant. + 3. Iterate over Files sub-contexts. + 4. If the file pattern matches the file we're seeking info on, + apply attribute updates. + 5. Return the most recent value of attributes. + """ + paths, _ = self.read_relevant_mozbuilds(paths) + + r = {} + + # Only do wildcard matching if the '*' character is present. + # Otherwise, mozpath.match will match directories, which we've + # arbitrarily chosen to not allow. + def path_matches_pattern(relpath, pattern): + if pattern == relpath: + return True + + return "*" in pattern and mozpath.match(relpath, pattern) + + for path, ctxs in paths.items(): + # Should be normalized by read_relevant_mozbuilds. + assert "\\" not in path + + flags = Files(Context()) + + for ctx in ctxs: + if not isinstance(ctx, Files): + continue + + # read_relevant_mozbuilds() normalizes paths and ensures that + # the contexts have paths in the ancestry of the path. When + # iterating over tens of thousands of paths, mozpath.relpath() + # can be very expensive. So, given our assumptions about paths, + # we implement an optimized version. + ctx_rel_dir = ctx.relsrcdir + if ctx_rel_dir: + assert path.startswith(ctx_rel_dir) + relpath = path[len(ctx_rel_dir) + 1 :] + else: + relpath = path + + if any(path_matches_pattern(relpath, p) for p in ctx.patterns): + flags += ctx + + r[path] = flags + + return r diff --git a/python/mozbuild/mozbuild/frontend/sandbox.py b/python/mozbuild/mozbuild/frontend/sandbox.py new file mode 100644 index 0000000000..088e817cb0 --- /dev/null +++ b/python/mozbuild/mozbuild/frontend/sandbox.py @@ -0,0 +1,313 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +r"""Python sandbox implementation for build files. + +This module contains classes for Python sandboxes that execute in a +highly-controlled environment. + +The main class is `Sandbox`. This provides an execution environment for Python +code and is used to fill a Context instance for the takeaway information from +the execution. + +Code in this module takes a different approach to exception handling compared +to what you'd see elsewhere in Python. Arguments to built-in exceptions like +KeyError are machine parseable. This machine-friendly data is used to present +user-friendly error messages in the case of errors. +""" + +import os +import sys +import weakref + +import six +from mozpack.files import FileFinder + +from mozbuild.util import ReadOnlyDict, exec_ + +from .context import Context + +default_finder = FileFinder("/") + + +def alphabetical_sorted(iterable, key=lambda x: x.lower(), reverse=False): + """sorted() replacement for the sandbox, ordering alphabetically by + default. + """ + return sorted(iterable, key=key, reverse=reverse) + + +class SandboxError(Exception): + def __init__(self, file_stack): + self.file_stack = file_stack + + +class SandboxExecutionError(SandboxError): + """Represents errors encountered during execution of a Sandbox. + + This is a simple container exception. It's purpose is to capture state + so something else can report on it. + """ + + def __init__(self, file_stack, exc_type, exc_value, trace): + SandboxError.__init__(self, file_stack) + + self.exc_type = exc_type + self.exc_value = exc_value + self.trace = trace + + +class SandboxLoadError(SandboxError): + """Represents errors encountered when loading a file for execution. + + This exception represents errors in a Sandbox that occurred as part of + loading a file. The error could have occurred in the course of executing + a file. If so, the file_stack will be non-empty and the file that caused + the load will be on top of the stack. + """ + + def __init__(self, file_stack, trace, illegal_path=None, read_error=None): + SandboxError.__init__(self, file_stack) + + self.trace = trace + self.illegal_path = illegal_path + self.read_error = read_error + + +class Sandbox(dict): + """Represents a sandbox for executing Python code. + + This class provides a sandbox for execution of a single mozbuild frontend + file. The results of that execution is stored in the Context instance given + as the ``context`` argument. + + Sandbox is effectively a glorified wrapper around compile() + exec(). You + point it at some Python code and it executes it. The main difference from + executing Python code like normal is that the executed code is very limited + in what it can do: the sandbox only exposes a very limited set of Python + functionality. Only specific types and functions are available. This + prevents executed code from doing things like import modules, open files, + etc. + + Sandbox instances act as global namespace for the sandboxed execution + itself. They shall not be used to access the results of the execution. + Those results are available in the given Context instance after execution. + + The Sandbox itself is responsible for enforcing rules such as forbidding + reassignment of variables. + + Implementation note: Sandbox derives from dict because exec() insists that + what it is given for namespaces is a dict. + """ + + # The default set of builtins. + BUILTINS = ReadOnlyDict( + { + # Only real Python built-ins should go here. + "None": None, + "False": False, + "True": True, + "sorted": alphabetical_sorted, + "int": int, + "set": set, + "tuple": tuple, + } + ) + + def __init__(self, context, finder=default_finder): + """Initialize a Sandbox ready for execution.""" + self._builtins = self.BUILTINS + dict.__setitem__(self, "__builtins__", self._builtins) + + assert isinstance(self._builtins, ReadOnlyDict) + assert isinstance(context, Context) + + # Contexts are modeled as a stack because multiple context managers + # may be active. + self._active_contexts = [context] + + # Seen sub-contexts. Will be populated with other Context instances + # that were related to execution of this instance. + self.subcontexts = [] + + # We need to record this because it gets swallowed as part of + # evaluation. + self._last_name_error = None + + # Current literal source being executed. + self._current_source = None + + self._finder = finder + + @property + def _context(self): + return self._active_contexts[-1] + + def exec_file(self, path): + """Execute code at a path in the sandbox. + + The path must be absolute. + """ + assert os.path.isabs(path) + + try: + source = six.ensure_text(self._finder.get(path).read()) + except Exception: + raise SandboxLoadError( + self._context.source_stack, sys.exc_info()[2], read_error=path + ) + + self.exec_source(source, path) + + def exec_source(self, source, path=""): + """Execute Python code within a string. + + The passed string should contain Python code to be executed. The string + will be compiled and executed. + + You should almost always go through exec_file() because exec_source() + does not perform extra path normalization. This can cause relative + paths to behave weirdly. + """ + + def execute(): + # compile() inherits the __future__ from the module by default. We + # do want Unicode literals. + code = compile(source, path, "exec") + # We use ourself as the global namespace for the execution. There + # is no need for a separate local namespace as moz.build execution + # is flat, namespace-wise. + old_source = self._current_source + self._current_source = source + try: + exec_(code, self) + finally: + self._current_source = old_source + + self.exec_function(execute, path=path) + + def exec_function( + self, func, args=(), kwargs={}, path="", becomes_current_path=True + ): + """Execute function with the given arguments in the sandbox.""" + if path and becomes_current_path: + self._context.push_source(path) + + old_sandbox = self._context._sandbox + self._context._sandbox = weakref.ref(self) + + # We don't have to worry about bytecode generation here because we are + # too low-level for that. However, we could add bytecode generation via + # the marshall module if parsing performance were ever an issue. + + old_source = self._current_source + self._current_source = None + try: + func(*args, **kwargs) + except SandboxError as e: + raise e + except NameError as e: + # A NameError is raised when a variable could not be found. + # The original KeyError has been dropped by the interpreter. + # However, we should have it cached in our instance! + + # Unless a script is doing something wonky like catching NameError + # itself (that would be silly), if there is an exception on the + # global namespace, that's our error. + actual = e + + if self._last_name_error is not None: + actual = self._last_name_error + source_stack = self._context.source_stack + if not becomes_current_path: + # Add current file to the stack because it wasn't added before + # sandbox execution. + source_stack.append(path) + raise SandboxExecutionError( + source_stack, type(actual), actual, sys.exc_info()[2] + ) + + except Exception: + # Need to copy the stack otherwise we get a reference and that is + # mutated during the finally. + exc = sys.exc_info() + source_stack = self._context.source_stack + if not becomes_current_path: + # Add current file to the stack because it wasn't added before + # sandbox execution. + source_stack.append(path) + raise SandboxExecutionError(source_stack, exc[0], exc[1], exc[2]) + finally: + self._current_source = old_source + self._context._sandbox = old_sandbox + if path and becomes_current_path: + self._context.pop_source() + + def push_subcontext(self, context): + """Push a SubContext onto the execution stack. + + When called, the active context will be set to the specified context, + meaning all variable accesses will go through it. We also record this + SubContext as having been executed as part of this sandbox. + """ + self._active_contexts.append(context) + if context not in self.subcontexts: + self.subcontexts.append(context) + + def pop_subcontext(self, context): + """Pop a SubContext off the execution stack. + + SubContexts must be pushed and popped in opposite order. This is + validated as part of the function call to ensure proper consumer API + use. + """ + popped = self._active_contexts.pop() + assert popped == context + + def __getitem__(self, key): + if key.isupper(): + try: + return self._context[key] + except Exception as e: + self._last_name_error = e + raise + + return dict.__getitem__(self, key) + + def __setitem__(self, key, value): + if key in self._builtins or key == "__builtins__": + raise KeyError("Cannot reassign builtins") + + if key.isupper(): + # Forbid assigning over a previously set value. Interestingly, when + # doing FOO += ['bar'], python actually does something like: + # foo = namespace.__getitem__('FOO') + # foo.__iadd__(['bar']) + # namespace.__setitem__('FOO', foo) + # This means __setitem__ is called with the value that is already + # in the dict, when doing +=, which is permitted. + if key in self._context and self._context[key] is not value: + raise KeyError("global_ns", "reassign", key) + + if ( + key not in self._context + and isinstance(value, (list, dict)) + and not value + ): + raise KeyError("Variable %s assigned an empty value." % key) + + self._context[key] = value + else: + dict.__setitem__(self, key, value) + + def get(self, key, default=None): + raise NotImplementedError("Not supported") + + def __iter__(self): + raise NotImplementedError("Not supported") + + def __contains__(self, key): + if key.isupper(): + return key in self._context + return dict.__contains__(self, key) diff --git a/python/mozbuild/mozbuild/gen_test_backend.py b/python/mozbuild/mozbuild/gen_test_backend.py new file mode 100644 index 0000000000..ce499fe90a --- /dev/null +++ b/python/mozbuild/mozbuild/gen_test_backend.py @@ -0,0 +1,53 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import sys + +import mozpack.path as mozpath + +from mozbuild.backend.test_manifest import TestManifestBackend +from mozbuild.base import BuildEnvironmentNotFoundException, MozbuildObject +from mozbuild.frontend.emitter import TreeMetadataEmitter +from mozbuild.frontend.reader import BuildReader, EmptyConfig + + +def gen_test_backend(): + build_obj = MozbuildObject.from_environment() + try: + config = build_obj.config_environment + except BuildEnvironmentNotFoundException: + # Create a stub config.status file, since the TestManifest backend needs + # to be re-created if configure runs. If the file doesn't exist, + # mozbuild continually thinks the TestManifest backend is out of date + # and tries to regenerate it. + + if not os.path.isdir(build_obj.topobjdir): + os.makedirs(build_obj.topobjdir) + + config_status = mozpath.join(build_obj.topobjdir, "config.status") + open(config_status, "w").close() + + print("No build detected, test metadata may be incomplete.") + + # If 'JS_STANDALONE' is set, tests that don't require an objdir won't + # be picked up due to bug 1345209. + substs = EmptyConfig.default_substs + if "JS_STANDALONE" in substs: + del substs["JS_STANDALONE"] + + config = EmptyConfig(build_obj.topsrcdir, substs) + config.topobjdir = build_obj.topobjdir + + reader = BuildReader(config) + emitter = TreeMetadataEmitter(config) + backend = TestManifestBackend(config) + + context = reader.read_topsrcdir() + data = emitter.emit(context, emitfn=emitter._process_test_manifests) + backend.consume(data) + + +if __name__ == "__main__": + sys.exit(gen_test_backend()) diff --git a/python/mozbuild/mozbuild/generated_sources.py b/python/mozbuild/mozbuild/generated_sources.py new file mode 100644 index 0000000000..e22e71e5f6 --- /dev/null +++ b/python/mozbuild/mozbuild/generated_sources.py @@ -0,0 +1,75 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import hashlib +import json +import os + +import mozpack.path as mozpath +from mozpack.files import FileFinder + +GENERATED_SOURCE_EXTS = (".rs", ".c", ".h", ".cc", ".cpp") + + +def sha512_digest(data): + """ + Generate the SHA-512 digest of `data` and return it as a hex string. + """ + return hashlib.sha512(data).hexdigest() + + +def get_filename_with_digest(name, contents): + """ + Return the filename that will be used to store the generated file + in the S3 bucket, consisting of the SHA-512 digest of `contents` + joined with the relative path `name`. + """ + digest = sha512_digest(contents) + return mozpath.join(digest, name) + + +def get_generated_sources(): + """ + Yield tuples of `(objdir-rel-path, file)` for generated source files + in this objdir, where `file` is either an absolute path to the file or + a `mozpack.File` instance. + """ + import buildconfig + + # First, get the list of generated sources produced by the build backend. + gen_sources = os.path.join(buildconfig.topobjdir, "generated-sources.json") + with open(gen_sources, "r") as f: + data = json.load(f) + for f in data["sources"]: + # Exclute symverscript + if mozpath.basename(f) != "symverscript": + yield f, mozpath.join(buildconfig.topobjdir, f) + # Next, return all the files in $objdir/ipc/ipdl/_ipdlheaders. + base = "ipc/ipdl/_ipdlheaders" + finder = FileFinder(mozpath.join(buildconfig.topobjdir, base)) + for p, f in finder.find("**/*.h"): + yield mozpath.join(base, p), f + # Next, return any source files that were generated into the Rust + # object directory. + rust_build_kind = "debug" if buildconfig.substs.get("MOZ_DEBUG_RUST") else "release" + base = mozpath.join(buildconfig.substs["RUST_TARGET"], rust_build_kind, "build") + finder = FileFinder(mozpath.join(buildconfig.topobjdir, base)) + for p, f in finder: + if p.endswith(GENERATED_SOURCE_EXTS): + yield mozpath.join(base, p), f + + +def get_s3_region_and_bucket(): + """ + Return a tuple of (region, bucket) giving the AWS region and S3 + bucket to which generated sources should be uploaded. + """ + region = "us-west-2" + level = os.environ.get("MOZ_SCM_LEVEL", "1") + bucket = { + "1": "gecko-generated-sources-l1", + "2": "gecko-generated-sources-l2", + "3": "gecko-generated-sources", + }[level] + return (region, bucket) diff --git a/python/mozbuild/mozbuild/gn_processor.py b/python/mozbuild/mozbuild/gn_processor.py new file mode 100644 index 0000000000..b6c51ee010 --- /dev/null +++ b/python/mozbuild/mozbuild/gn_processor.py @@ -0,0 +1,788 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import json +import os +import subprocess +import sys +import tempfile +from collections import defaultdict, deque +from copy import deepcopy +from pathlib import Path +from shutil import which + +import mozpack.path as mozpath +import six + +from mozbuild.bootstrap import bootstrap_toolchain +from mozbuild.frontend.sandbox import alphabetical_sorted +from mozbuild.util import mkdir + +license_header = """# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" + +generated_header = """ + ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ### + ### DO NOT edit it by hand. ### +""" + + +class MozbuildWriter(object): + def __init__(self, fh): + self._fh = fh + self.indent = "" + self._indent_increment = 4 + + # We need to correlate a small amount of state here to figure out + # which library template to use ("Library()" or "SharedLibrary()") + self._library_name = None + self._shared_library = None + + def mb_serialize(self, v): + if isinstance(v, list): + if len(v) <= 1: + return repr(v) + # Pretty print a list + raw = json.dumps(v, indent=self._indent_increment) + # Add the indent of the current indentation level + return raw.replace("\n", "\n" + self.indent) + if isinstance(v, bool): + return repr(v) + return '"%s"' % v + + def finalize(self): + if self._library_name: + self.write("\n") + if self._shared_library: + self.write_ln( + "SharedLibrary(%s)" % self.mb_serialize(self._library_name) + ) + else: + self.write_ln("Library(%s)" % self.mb_serialize(self._library_name)) + + def write(self, content): + self._fh.write(content) + + def write_ln(self, line): + self.write(self.indent) + self.write(line) + self.write("\n") + + def write_attrs(self, context_attrs): + for k in sorted(context_attrs.keys()): + v = context_attrs[k] + if isinstance(v, (list, set)): + self.write_mozbuild_list(k, v) + elif isinstance(v, dict): + self.write_mozbuild_dict(k, v) + else: + self.write_mozbuild_value(k, v) + + def write_mozbuild_list(self, key, value): + if value: + self.write("\n") + self.write(self.indent + key) + self.write(" += [\n " + self.indent) + self.write( + (",\n " + self.indent).join( + alphabetical_sorted(self.mb_serialize(v) for v in value) + ) + ) + self.write("\n") + self.write_ln("]") + + def write_mozbuild_value(self, key, value): + if value: + if key == "LIBRARY_NAME": + self._library_name = value + elif key == "FORCE_SHARED_LIB": + self._shared_library = True + else: + self.write("\n") + self.write_ln("%s = %s" % (key, self.mb_serialize(value))) + self.write("\n") + + def write_mozbuild_dict(self, key, value): + # Templates we need to use instead of certain values. + replacements = ( + ( + ("COMPILE_FLAGS", '"WARNINGS_AS_ERRORS"', "[]"), + "AllowCompilerWarnings()", + ), + ) + if value: + self.write("\n") + if key == "GeneratedFile": + self.write_ln("GeneratedFile(") + self.indent += " " * self._indent_increment + for o in value["outputs"]: + self.write_ln("%s," % (self.mb_serialize(o))) + for k, v in sorted(value.items()): + if k == "outputs": + continue + self.write_ln("%s=%s," % (k, self.mb_serialize(v))) + self.indent = self.indent[self._indent_increment :] + self.write_ln(")") + return + for k in sorted(value.keys()): + v = value[k] + subst_vals = key, self.mb_serialize(k), self.mb_serialize(v) + wrote_ln = False + for flags, tmpl in replacements: + if subst_vals == flags: + self.write_ln(tmpl) + wrote_ln = True + + if not wrote_ln: + self.write_ln("%s[%s] = %s" % subst_vals) + + def write_condition(self, values): + def mk_condition(k, v): + if not v: + return 'not CONFIG["%s"]' % k + return 'CONFIG["%s"] == %s' % (k, self.mb_serialize(v)) + + self.write("\n") + self.write("if ") + self.write( + " and ".join(mk_condition(k, values[k]) for k in sorted(values.keys())) + ) + self.write(":\n") + self.indent += " " * self._indent_increment + + def terminate_condition(self): + assert len(self.indent) >= self._indent_increment + self.indent = self.indent[self._indent_increment :] + + +def find_deps(all_targets, target): + all_deps = set() + queue = deque([target]) + while queue: + item = queue.popleft() + all_deps.add(item) + for dep in all_targets[item]["deps"]: + if dep not in all_deps: + queue.append(dep) + return all_deps + + +def filter_gn_config(path, gn_result, sandbox_vars, input_vars, gn_target): + gen_path = path / "gen" + # Translates the raw output of gn into just what we'll need to generate a + # mozbuild configuration. + gn_out = {"targets": {}, "sandbox_vars": sandbox_vars} + + cpus = { + "arm64": "aarch64", + "x64": "x86_64", + "mipsel": "mips32", + "mips64el": "mips64", + } + oses = { + "android": "Android", + "linux": "Linux", + "mac": "Darwin", + "openbsd": "OpenBSD", + "win": "WINNT", + } + + mozbuild_args = { + "MOZ_DEBUG": "1" if input_vars.get("is_debug") else None, + "OS_TARGET": oses[input_vars["target_os"]], + "CPU_ARCH": cpus.get(input_vars["target_cpu"], input_vars["target_cpu"]), + } + if "use_x11" in input_vars: + mozbuild_args["MOZ_X11"] = "1" if input_vars["use_x11"] else None + + gn_out["mozbuild_args"] = mozbuild_args + all_deps = find_deps(gn_result["targets"], gn_target) + + for target_fullname in all_deps: + raw_spec = gn_result["targets"][target_fullname] + + if raw_spec["type"] == "action": + # Special handling for the action type to avoid putting empty + # arrays of args, script and outputs on all other types in `spec`. + spec = {} + for spec_attr in ( + "type", + "args", + "script", + "outputs", + ): + spec[spec_attr] = raw_spec.get(spec_attr, []) + if spec_attr == "outputs": + # Rebase outputs from an absolute path in the temp dir to a + # path relative to the target dir. + spec[spec_attr] = [ + mozpath.relpath(d, path) for d in spec[spec_attr] + ] + gn_out["targets"][target_fullname] = spec + + # TODO: 'executable' will need to be handled here at some point as well. + if raw_spec["type"] not in ("static_library", "shared_library", "source_set"): + continue + + spec = {} + for spec_attr in ( + "type", + "sources", + "defines", + "include_dirs", + "cflags", + "cflags_c", + "cflags_cc", + "cflags_objc", + "cflags_objcc", + "deps", + "libs", + ): + spec[spec_attr] = raw_spec.get(spec_attr, []) + if spec_attr == "defines": + spec[spec_attr] = [ + d + for d in spec[spec_attr] + if "CR_XCODE_VERSION" not in d + and "CR_SYSROOT_HASH" not in d + and "_FORTIFY_SOURCE" not in d + ] + if spec_attr == "include_dirs": + # Rebase outputs from an absolute path in the temp dir to a path + # relative to the target dir. + spec[spec_attr] = [ + d if gen_path != Path(d) else "!//gen" for d in spec[spec_attr] + ] + + gn_out["targets"][target_fullname] = spec + + return gn_out + + +def process_gn_config( + gn_config, topsrcdir, srcdir, non_unified_sources, sandbox_vars, mozilla_flags +): + # Translates a json gn config into attributes that can be used to write out + # moz.build files for this configuration. + + # Much of this code is based on similar functionality in `gyp_reader.py`. + + mozbuild_attrs = {"mozbuild_args": gn_config.get("mozbuild_args", None), "dirs": {}} + + targets = gn_config["targets"] + + project_relsrcdir = mozpath.relpath(srcdir, topsrcdir) + + non_unified_sources = set([mozpath.normpath(s) for s in non_unified_sources]) + + def target_info(fullname): + path, name = target_fullname.split(":") + # Stripping '//' gives us a path relative to the project root, + # adding a suffix avoids name collisions with libraries already + # in the tree (like "webrtc"). + return path.lstrip("//"), name + "_gn" + + def resolve_path(path): + # GN will have resolved all these paths relative to the root of the + # project indicated by "//". + if path.startswith("//"): + path = path[2:] + if not path.startswith("/"): + path = "/%s/%s" % (project_relsrcdir, path) + return path + + # Process all targets from the given gn project and its dependencies. + for target_fullname, spec in six.iteritems(targets): + + target_path, target_name = target_info(target_fullname) + context_attrs = {} + + # Remove leading 'lib' from the target_name if any, and use as + # library name. + name = target_name + if spec["type"] in ("static_library", "shared_library", "source_set", "action"): + if name.startswith("lib"): + name = name[3:] + context_attrs["LIBRARY_NAME"] = six.ensure_text(name) + else: + raise Exception( + "The following GN target type is not currently " + 'consumed by moz.build: "%s". It may need to be ' + "added, or you may need to re-run the " + "`GnConfigGen` step." % spec["type"] + ) + + if spec["type"] == "shared_library": + context_attrs["FORCE_SHARED_LIB"] = True + + if spec["type"] == "action" and "script" in spec: + flags = [ + resolve_path(spec["script"]), + resolve_path(""), + ] + spec.get("args", []) + context_attrs["GeneratedFile"] = { + "script": "/python/mozbuild/mozbuild/action/file_generate_wrapper.py", + "entry_point": "action", + "outputs": [resolve_path(f) for f in spec["outputs"]], + "flags": flags, + } + + sources = [] + unified_sources = [] + extensions = set() + use_defines_in_asflags = False + + for f in spec.get("sources", []): + f = f.lstrip("//") + ext = mozpath.splitext(f)[-1] + extensions.add(ext) + src = "%s/%s" % (project_relsrcdir, f) + if ext == ".h" or ext == ".inc": + continue + elif ext == ".def": + context_attrs["SYMBOLS_FILE"] = src + elif ext != ".S" and src not in non_unified_sources: + unified_sources.append("/%s" % src) + else: + sources.append("/%s" % src) + # The Mozilla build system doesn't use DEFINES for building + # ASFILES. + if ext == ".s": + use_defines_in_asflags = True + + context_attrs["SOURCES"] = sources + context_attrs["UNIFIED_SOURCES"] = unified_sources + + context_attrs["DEFINES"] = {} + for define in spec.get("defines", []): + if "=" in define: + name, value = define.split("=", 1) + context_attrs["DEFINES"][name] = value + else: + context_attrs["DEFINES"][define] = True + + context_attrs["LOCAL_INCLUDES"] = [] + for include in spec.get("include_dirs", []): + if include.startswith("!"): + include = "!" + resolve_path(include[1:]) + else: + include = resolve_path(include) + # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do. + resolved = mozpath.abspath(mozpath.join(topsrcdir, include[1:])) + if not os.path.exists(resolved): + # GN files may refer to include dirs that are outside of the + # tree or we simply didn't vendor. Print a warning in this case. + if not resolved.endswith("gn-output/gen"): + print( + "Included path: '%s' does not exist, dropping include from GN " + "configuration." % resolved, + file=sys.stderr, + ) + continue + context_attrs["LOCAL_INCLUDES"] += [include] + + context_attrs["ASFLAGS"] = spec.get("asflags_mozilla", []) + if use_defines_in_asflags and context_attrs["DEFINES"]: + context_attrs["ASFLAGS"] += ["-D" + d for d in context_attrs["DEFINES"]] + suffix_map = { + ".c": ("CFLAGS", ["cflags", "cflags_c"]), + ".cpp": ("CXXFLAGS", ["cflags", "cflags_cc"]), + ".cc": ("CXXFLAGS", ["cflags", "cflags_cc"]), + ".m": ("CMFLAGS", ["cflags", "cflags_objc"]), + ".mm": ("CMMFLAGS", ["cflags", "cflags_objcc"]), + } + variables = (suffix_map[e] for e in extensions if e in suffix_map) + for (var, flag_keys) in variables: + flags = [ + _f for _k in flag_keys for _f in spec.get(_k, []) if _f in mozilla_flags + ] + for f in flags: + # the result may be a string or a list. + if isinstance(f, six.string_types): + context_attrs.setdefault(var, []).append(f) + else: + context_attrs.setdefault(var, []).extend(f) + + context_attrs["OS_LIBS"] = [] + for lib in spec.get("libs", []): + lib_name = os.path.splitext(lib)[0] + if lib.endswith(".framework"): + context_attrs["OS_LIBS"] += ["-framework " + lib_name] + else: + context_attrs["OS_LIBS"] += [lib_name] + + # Add some features to all contexts. Put here in case LOCAL_INCLUDES + # order matters. + context_attrs["LOCAL_INCLUDES"] += [ + "!/ipc/ipdl/_ipdlheaders", + "/ipc/chromium/src", + "/tools/profiler/public", + ] + # These get set via VC project file settings for normal GYP builds. + # TODO: Determine if these defines are needed for GN builds. + if gn_config["mozbuild_args"]["OS_TARGET"] == "WINNT": + context_attrs["DEFINES"]["UNICODE"] = True + context_attrs["DEFINES"]["_UNICODE"] = True + + context_attrs["COMPILE_FLAGS"] = {"OS_INCLUDES": []} + + for key, value in sandbox_vars.items(): + if context_attrs.get(key) and isinstance(context_attrs[key], list): + # If we have a key from sandbox_vars that's also been + # populated here we use the value from sandbox_vars as our + # basis rather than overriding outright. + context_attrs[key] = value + context_attrs[key] + elif context_attrs.get(key) and isinstance(context_attrs[key], dict): + context_attrs[key].update(value) + else: + context_attrs[key] = value + + target_relsrcdir = mozpath.join(project_relsrcdir, target_path, target_name) + mozbuild_attrs["dirs"][target_relsrcdir] = context_attrs + + return mozbuild_attrs + + +def find_common_attrs(config_attributes): + # Returns the intersection of the given configs and prunes the inputs + # to no longer contain these common attributes. + + common_attrs = deepcopy(config_attributes[0]) + + def make_intersection(reference, input_attrs): + # Modifies `reference` so that after calling this function it only + # contains parts it had in common with in `input_attrs`. + + for k, input_value in input_attrs.items(): + # Anything in `input_attrs` must match what's already in + # `reference`. + common_value = reference.get(k) + if common_value: + if isinstance(input_value, list): + reference[k] = [ + i + for i in common_value + if input_value.count(i) == common_value.count(i) + ] + elif isinstance(input_value, dict): + reference[k] = { + key: value + for key, value in common_value.items() + if key in input_value and value == input_value[key] + } + elif input_value != common_value: + del reference[k] + elif k in reference: + del reference[k] + + # Additionally, any keys in `reference` that aren't in `input_attrs` + # must be deleted. + for k in set(reference.keys()) - set(input_attrs.keys()): + del reference[k] + + def make_difference(reference, input_attrs): + # Modifies `input_attrs` so that after calling this function it contains + # no parts it has in common with in `reference`. + for k, input_value in list(six.iteritems(input_attrs)): + common_value = reference.get(k) + if common_value: + if isinstance(input_value, list): + input_attrs[k] = [ + i + for i in input_value + if common_value.count(i) != input_value.count(i) + ] + elif isinstance(input_value, dict): + input_attrs[k] = { + key: value + for key, value in input_value.items() + if key not in common_value + } + else: + del input_attrs[k] + + for config_attr_set in config_attributes[1:]: + make_intersection(common_attrs, config_attr_set) + + for config_attr_set in config_attributes: + make_difference(common_attrs, config_attr_set) + + return common_attrs + + +def write_mozbuild( + topsrcdir, + srcdir, + non_unified_sources, + gn_configs, + mozilla_flags, + write_mozbuild_variables, +): + + all_mozbuild_results = [] + + for gn_config in gn_configs: + mozbuild_attrs = process_gn_config( + gn_config, + topsrcdir, + srcdir, + non_unified_sources, + gn_config["sandbox_vars"], + mozilla_flags, + ) + all_mozbuild_results.append(mozbuild_attrs) + + # Translate {config -> {dirs -> build info}} into + # {dirs -> [(config, build_info)]} + configs_by_dir = defaultdict(list) + for config_attrs in all_mozbuild_results: + mozbuild_args = config_attrs["mozbuild_args"] + dirs = config_attrs["dirs"] + for d, build_data in dirs.items(): + configs_by_dir[d].append((mozbuild_args, build_data)) + + mozbuilds = set() + for relsrcdir, configs in sorted(configs_by_dir.items()): + target_srcdir = mozpath.join(topsrcdir, relsrcdir) + mkdir(target_srcdir) + + target_mozbuild = mozpath.join(target_srcdir, "moz.build") + mozbuilds.add(target_mozbuild) + with open(target_mozbuild, "w") as fh: + mb = MozbuildWriter(fh) + mb.write(license_header) + mb.write("\n") + mb.write(generated_header) + + try: + if relsrcdir in write_mozbuild_variables["INCLUDE_TK_CFLAGS_DIRS"]: + mb.write('if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk":\n') + mb.write(' CXXFLAGS += CONFIG["MOZ_GTK3_CFLAGS"]\n') + except KeyError: + pass + + all_args = [args for args, _ in configs] + + # Start with attributes that will be a part of the mozconfig + # for every configuration, then factor by other potentially useful + # combinations. + # FIXME: this is a time-bomb. See bug 1775202. + for attrs in ( + (), + ("MOZ_DEBUG",), + ("OS_TARGET",), + ("CPU_ARCH",), + ("MOZ_DEBUG", "OS_TARGET"), + ("OS_TARGET", "MOZ_X11"), + ("OS_TARGET", "CPU_ARCH"), + ("OS_TARGET", "CPU_ARCH", "MOZ_X11"), + ("OS_TARGET", "CPU_ARCH", "MOZ_DEBUG"), + ("OS_TARGET", "CPU_ARCH", "MOZ_DEBUG", "MOZ_X11"), + ): + conditions = set() + for args in all_args: + cond = tuple(((k, args.get(k) or "") for k in attrs)) + conditions.add(cond) + + for cond in sorted(conditions): + common_attrs = find_common_attrs( + [ + attrs + for args, attrs in configs + if all((args.get(k) or "") == v for k, v in cond) + ] + ) + if any(common_attrs.values()): + if cond: + mb.write_condition(dict(cond)) + mb.write_attrs(common_attrs) + if cond: + mb.terminate_condition() + + mb.finalize() + + dirs_mozbuild = mozpath.join(srcdir, "moz.build") + mozbuilds.add(dirs_mozbuild) + with open(dirs_mozbuild, "w") as fh: + mb = MozbuildWriter(fh) + mb.write(license_header) + mb.write("\n") + mb.write(generated_header) + + # Not every srcdir is present for every config, which needs to be + # reflected in the generated root moz.build. + dirs_by_config = { + tuple(v["mozbuild_args"].items()): set(v["dirs"].keys()) + for v in all_mozbuild_results + } + + for attrs in ( + (), + ("OS_TARGET",), + ("OS_TARGET", "CPU_ARCH"), + ("OS_TARGET", "CPU_ARCH", "MOZ_X11"), + ): + + conditions = set() + for args in dirs_by_config.keys(): + cond = tuple(((k, dict(args).get(k) or "") for k in attrs)) + conditions.add(cond) + + for cond in sorted(conditions): + common_dirs = None + for args, dir_set in dirs_by_config.items(): + if all((dict(args).get(k) or "") == v for k, v in cond): + if common_dirs is None: + common_dirs = deepcopy(dir_set) + else: + common_dirs &= dir_set + + for args, dir_set in dirs_by_config.items(): + if all(dict(args).get(k) == v for k, v in cond): + dir_set -= common_dirs + + if common_dirs: + if cond: + mb.write_condition(dict(cond)) + mb.write_mozbuild_list("DIRS", ["/%s" % d for d in common_dirs]) + if cond: + mb.terminate_condition() + + # Remove possibly stale moz.builds + for root, dirs, files in os.walk(srcdir): + if "moz.build" in files: + file = os.path.join(root, "moz.build") + if file not in mozbuilds: + os.unlink(file) + + +def generate_gn_config( + srcdir, + gn_binary, + input_variables, + sandbox_variables, + gn_target, +): + def str_for_arg(v): + if v in (True, False): + return str(v).lower() + return '"%s"' % v + + input_variables = input_variables.copy() + input_variables.update( + { + "concurrent_links": 1, + "action_pool_depth": 1, + } + ) + + if input_variables["target_os"] == "win": + input_variables.update( + { + "visual_studio_path": "/", + "visual_studio_version": 2015, + "wdk_path": "/", + } + ) + if input_variables["target_os"] == "mac": + input_variables.update( + { + "mac_sdk_path": "/", + "enable_wmax_tokens": False, + } + ) + + gn_args = "--args=%s" % " ".join( + ["%s=%s" % (k, str_for_arg(v)) for k, v in six.iteritems(input_variables)] + ) + with tempfile.TemporaryDirectory() as tempdir: + # On Mac, `tempdir` starts with /var which is a symlink to /private/var. + # We resolve the symlinks in `tempdir` here so later usage with + # relpath() does not lead to unexpected results, should it be used + # together with another path that has symlinks resolved. + resolved_tempdir = Path(tempdir).resolve() + gen_args = [gn_binary, "gen", str(resolved_tempdir), gn_args, "--ide=json"] + print('Running "%s"' % " ".join(gen_args), file=sys.stderr) + subprocess.check_call(gen_args, cwd=srcdir, stderr=subprocess.STDOUT) + + gn_config_file = resolved_tempdir / "project.json" + + with open(gn_config_file, "r") as fh: + gn_out = json.load(fh) + gn_out = filter_gn_config( + resolved_tempdir, gn_out, sandbox_variables, input_variables, gn_target + ) + return gn_out + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("config", help="configuration in json format") + args = parser.parse_args() + + gn_binary = bootstrap_toolchain("gn/gn") or which("gn") + if not gn_binary: + raise Exception("The GN program must be present to generate GN configs.") + + with open(args.config, "r") as fh: + config = json.load(fh) + + topsrcdir = Path(__file__).parent.parent.parent.parent.resolve() + + vars_set = [] + for is_debug in (True, False): + for target_os in ("android", "linux", "mac", "openbsd", "win"): + target_cpus = ["x64"] + if target_os in ("android", "linux", "mac", "win", "openbsd"): + target_cpus.append("arm64") + if target_os in ("android", "linux"): + target_cpus.append("arm") + if target_os in ("android", "linux", "win"): + target_cpus.append("x86") + if target_os == "linux": + target_cpus.extend(["ppc64", "riscv64", "mipsel", "mips64el"]) + for target_cpu in target_cpus: + vars = { + "host_cpu": "x64", + "is_debug": is_debug, + "target_cpu": target_cpu, + "target_os": target_os, + } + if target_os == "linux": + for use_x11 in (True, False): + vars["use_x11"] = use_x11 + vars_set.append(vars.copy()) + else: + if target_os == "openbsd": + vars["use_x11"] = True + vars_set.append(vars) + + gn_configs = [] + for vars in vars_set: + gn_configs.append( + generate_gn_config( + topsrcdir / config["target_dir"], + gn_binary, + vars, + config["gn_sandbox_variables"], + config["gn_target"], + ) + ) + + print("Writing moz.build files") + write_mozbuild( + topsrcdir, + topsrcdir / config["target_dir"], + config["non_unified_sources"], + gn_configs, + config["mozilla_flags"], + config["write_mozbuild_variables"], + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/html_build_viewer.py b/python/mozbuild/mozbuild/html_build_viewer.py new file mode 100644 index 0000000000..0582e6f1be --- /dev/null +++ b/python/mozbuild/mozbuild/html_build_viewer.py @@ -0,0 +1,118 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# This module contains code for running an HTTP server to view build info. +import http.server +import json +import os + +import requests + + +class HTTPHandler(http.server.BaseHTTPRequestHandler): + def do_GET(self): + s = self.server.wrapper + p = self.path + + if p == "/build_resources.json": + self.send_response(200) + self.send_header("Content-Type", "application/json; charset=utf-8") + self.end_headers() + + keys = sorted(s.json_files.keys()) + s = json.dumps({"files": ["resources/%s" % k for k in keys]}) + self.wfile.write(s.encode("utf-8")) + return + + if p.startswith("/resources/"): + key = p[len("/resources/") :] + + if key not in s.json_files: + self.send_error(404) + return + + self.send_response(200) + self.send_header("Content-Type", "application/json; charset=utf-8") + self.end_headers() + + self.wfile.write(s.json_files[key]) + return + + if p == "/": + p = "/build_resources.html" + + self.serve_docroot(s.doc_root, p[1:]) + + def do_POST(self): + if self.path == "/shutdown": + self.server.wrapper.do_shutdown = True + self.send_response(200) + return + + self.send_error(404) + + def serve_docroot(self, root, path): + local_path = os.path.normpath(os.path.join(root, path)) + + # Cheap security. This doesn't resolve symlinks, etc. But, it should be + # acceptable since this server only runs locally. + if not local_path.startswith(root): + self.send_error(404) + + if not os.path.exists(local_path): + self.send_error(404) + return + + if os.path.isdir(local_path): + self.send_error(500) + return + + self.send_response(200) + ct = "text/plain" + if path.endswith(".html"): + ct = "text/html" + + self.send_header("Content-Type", ct) + self.end_headers() + + with open(local_path, "rb") as fh: + self.wfile.write(fh.read()) + + +class BuildViewerServer(object): + def __init__(self, address="localhost", port=0): + # TODO use pkg_resources to obtain HTML resources. + pkg_dir = os.path.dirname(os.path.abspath(__file__)) + doc_root = os.path.join(pkg_dir, "resources", "html-build-viewer") + assert os.path.isdir(doc_root) + + self.doc_root = doc_root + self.json_files = {} + + self.server = http.server.HTTPServer((address, port), HTTPHandler) + self.server.wrapper = self + self.do_shutdown = False + + @property + def url(self): + hostname, port = self.server.server_address + return "http://%s:%d/" % (hostname, port) + + def add_resource_json_file(self, key, path): + """Register a resource JSON file with the server. + + The file will be made available under the name/key specified.""" + with open(path, "rb") as fh: + self.json_files[key] = fh.read() + + def add_resource_json_url(self, key, url): + """Register a resource JSON file at a URL.""" + r = requests.get(url) + if r.status_code != 200: + raise Exception("Non-200 HTTP response code") + self.json_files[key] = r.text + + def run(self): + while not self.do_shutdown: + self.server.handle_request() diff --git a/python/mozbuild/mozbuild/jar.py b/python/mozbuild/mozbuild/jar.py new file mode 100644 index 0000000000..f7d10f7fed --- /dev/null +++ b/python/mozbuild/mozbuild/jar.py @@ -0,0 +1,648 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +"""jarmaker.py provides a python class to package up chrome content by +processing jar.mn files. + +See the documentation for jar.mn on MDC for further details on the format. +""" + +import errno +import io +import logging +import os +import re +import sys +from time import localtime + +import mozpack.path as mozpath +import six +from mozpack.files import FileFinder +from MozZipFile import ZipFile +from six import BytesIO + +from mozbuild.action.buildlist import addEntriesToListFile +from mozbuild.preprocessor import Preprocessor +from mozbuild.util import ensure_bytes + +if sys.platform == "win32": + from ctypes import WinError, windll + + CreateHardLink = windll.kernel32.CreateHardLinkA + +__all__ = ["JarMaker"] + + +class ZipEntry(object): + """Helper class for jar output. + + This class defines a simple file-like object for a zipfile.ZipEntry + so that we can consecutively write to it and then close it. + This methods hooks into ZipFile.writestr on close(). + """ + + def __init__(self, name, zipfile): + self._zipfile = zipfile + self._name = name + self._inner = BytesIO() + + def write(self, content): + """Append the given content to this zip entry""" + + self._inner.write(ensure_bytes(content)) + return + + def close(self): + """The close method writes the content back to the zip file.""" + + self._zipfile.writestr(self._name, self._inner.getvalue()) + + +def getModTime(aPath): + if not os.path.isfile(aPath): + return localtime(0) + mtime = os.stat(aPath).st_mtime + return localtime(mtime) + + +class JarManifestEntry(object): + def __init__(self, output, source, is_locale=False, preprocess=False): + self.output = output + self.source = source + self.is_locale = is_locale + self.preprocess = preprocess + + +class JarInfo(object): + def __init__(self, base_or_jarinfo, name=None): + if name is None: + assert isinstance(base_or_jarinfo, JarInfo) + self.base = base_or_jarinfo.base + self.name = base_or_jarinfo.name + else: + assert not isinstance(base_or_jarinfo, JarInfo) + self.base = base_or_jarinfo or "" + self.name = name + # For compatibility with existing jar.mn files, if there is no + # base, the jar name is under chrome/ + if not self.base: + self.name = mozpath.join("chrome", self.name) + self.relativesrcdir = None + self.chrome_manifests = [] + self.entries = [] + + +class DeprecatedJarManifest(Exception): + pass + + +class JarManifestParser(object): + + ignore = re.compile("\s*(\#.*)?$") + jarline = re.compile( + """ + (?: + (?:\[(?P[\w\d.\-\_\\\/{}@]+)\]\s*)? # optional [base/path] + (?P[\w\d.\-\_\\\/{}]+).jar\: # filename.jar: + | + (?:\s*(\#.*)?) # comment + )\s*$ # whitespaces + """, + re.VERBOSE, + ) + relsrcline = re.compile("relativesrcdir\s+(?P.+?):") + regline = re.compile("\%\s+(.*)$") + entryre = "(?P\*)?(?P\+?)\s+" + entryline = re.compile( + entryre + + ( + "(?P[\w\d.\-\_\\\/\+\@]+)\s*" + "(\((?P\%?)(?P[\w\d.\-\_\\\/\@\*]+)\))?\s*$" + ) + ) + + def __init__(self): + self._current_jar = None + self._jars = [] + + def write(self, line): + # A Preprocessor instance feeds the parser through calls to this method. + + # Ignore comments and empty lines + if self.ignore.match(line): + return + + # A jar manifest file can declare several different sections, each of + # which applies to a given "jar file". Each of those sections starts + # with ".jar:", in which case the path is assumed relative to + # a "chrome" directory, or "[] .jar:", where + # a base directory is given (usually pointing at the root of the + # application or addon) and the jar path is given relative to the base + # directory. + if self._current_jar is None: + m = self.jarline.match(line) + if not m: + raise RuntimeError(line) + if m.group("jarfile"): + self._current_jar = JarInfo(m.group("base"), m.group("jarfile")) + self._jars.append(self._current_jar) + return + + # Within each section, there can be three different types of entries: + + # - indications of the relative source directory we pretend to be in + # when considering localization files, in the following form; + # "relativesrcdir :" + m = self.relsrcline.match(line) + if m: + if self._current_jar.chrome_manifests or self._current_jar.entries: + self._current_jar = JarInfo(self._current_jar) + self._jars.append(self._current_jar) + self._current_jar.relativesrcdir = m.group("relativesrcdir") + return + + # - chrome manifest entries, prefixed with "%". + m = self.regline.match(line) + if m: + rline = " ".join(m.group(1).split()) + if rline not in self._current_jar.chrome_manifests: + self._current_jar.chrome_manifests.append(rline) + return + + # - entries indicating files to be part of the given jar. They are + # formed thusly: + # "" + # or + # " ()" + # The is where the file(s) will be put in the chrome jar. + # The is where the file(s) can be found in the source + # directory. The may start with a "%" for files part + # of a localization directory, in which case the "%" counts as the + # locale. + # Each entry can be prefixed with "*" for preprocessing. + m = self.entryline.match(line) + if m: + if m.group("optOverwrite"): + raise DeprecatedJarManifest('The "+" prefix is not supported anymore') + self._current_jar.entries.append( + JarManifestEntry( + m.group("output"), + m.group("source") or mozpath.basename(m.group("output")), + is_locale=bool(m.group("locale")), + preprocess=bool(m.group("optPreprocess")), + ) + ) + return + + self._current_jar = None + self.write(line) + + def __iter__(self): + return iter(self._jars) + + +class JarMaker(object): + """JarMaker reads jar.mn files and process those into jar files or + flat directories, along with chrome.manifest files. + """ + + def __init__( + self, outputFormat="flat", useJarfileManifest=True, useChromeManifest=False + ): + + self.outputFormat = outputFormat + self.useJarfileManifest = useJarfileManifest + self.useChromeManifest = useChromeManifest + self.pp = Preprocessor() + self.topsourcedir = None + self.sourcedirs = [] + self.localedirs = None + self.l10nbase = None + self.relativesrcdir = None + self.rootManifestAppId = None + self._seen_output = set() + + def getCommandLineParser(self): + """Get a optparse.OptionParser for jarmaker. + + This OptionParser has the options for jarmaker as well as + the options for the inner PreProcessor. + """ + + # HACK, we need to unescape the string variables we get, + # the perl versions didn't grok strings right + + p = self.pp.getCommandLineParser(unescapeDefines=True) + p.add_option( + "-f", + type="choice", + default="jar", + choices=("jar", "flat", "symlink"), + help="fileformat used for output", + metavar="[jar, flat, symlink]", + ) + p.add_option("-v", action="store_true", dest="verbose", help="verbose output") + p.add_option("-q", action="store_false", dest="verbose", help="verbose output") + p.add_option( + "-e", + action="store_true", + help="create chrome.manifest instead of jarfile.manifest", + ) + p.add_option( + "-s", type="string", action="append", default=[], help="source directory" + ) + p.add_option("-t", type="string", help="top source directory") + p.add_option( + "-c", + "--l10n-src", + type="string", + action="append", + help="localization directory", + ) + p.add_option( + "--l10n-base", + type="string", + action="store", + help="merged directory to be used for localization (requires relativesrcdir)", + ) + p.add_option( + "--relativesrcdir", + type="string", + help="relativesrcdir to be used for localization", + ) + p.add_option("-d", type="string", help="base directory") + p.add_option( + "--root-manifest-entry-appid", + type="string", + help="add an app id specific root chrome manifest entry.", + ) + return p + + def finalizeJar( + self, jardir, jarbase, jarname, chromebasepath, register, doZip=True + ): + """Helper method to write out the chrome registration entries to + jarfile.manifest or chrome.manifest, or both. + + The actual file processing is done in updateManifest. + """ + + # rewrite the manifest, if entries given + if not register: + return + + chromeManifest = os.path.join(jardir, jarbase, "chrome.manifest") + + if self.useJarfileManifest: + self.updateManifest( + os.path.join(jardir, jarbase, jarname + ".manifest"), + chromebasepath.format(""), + register, + ) + if jarname != "chrome": + addEntriesToListFile( + chromeManifest, ["manifest {0}.manifest".format(jarname)] + ) + if self.useChromeManifest: + chromebase = os.path.dirname(jarname) + "/" + self.updateManifest( + chromeManifest, chromebasepath.format(chromebase), register + ) + + # If requested, add a root chrome manifest entry (assumed to be in the parent directory + # of chromeManifest) with the application specific id. In cases where we're building + # lang packs, the root manifest must know about application sub directories. + + if self.rootManifestAppId: + rootChromeManifest = os.path.join( + os.path.normpath(os.path.dirname(chromeManifest)), + "..", + "chrome.manifest", + ) + rootChromeManifest = os.path.normpath(rootChromeManifest) + chromeDir = os.path.basename( + os.path.dirname(os.path.normpath(chromeManifest)) + ) + logging.info( + "adding '%s' entry to root chrome manifest appid=%s" + % (chromeDir, self.rootManifestAppId) + ) + addEntriesToListFile( + rootChromeManifest, + [ + "manifest %s/chrome.manifest application=%s" + % (chromeDir, self.rootManifestAppId) + ], + ) + + def updateManifest(self, manifestPath, chromebasepath, register): + """updateManifest replaces the % in the chrome registration entries + with the given chrome base path, and updates the given manifest file. + """ + myregister = dict.fromkeys( + map(lambda s: s.replace("%", chromebasepath), register) + ) + addEntriesToListFile(manifestPath, six.iterkeys(myregister)) + + def makeJar(self, infile, jardir): + """makeJar is the main entry point to JarMaker. + + It takes the input file, the output directory, the source dirs and the + top source dir as argument, and optionally the l10n dirs. + """ + + # making paths absolute, guess srcdir if file and add to sourcedirs + def _normpath(p): + return os.path.normpath(os.path.abspath(p)) + + self.topsourcedir = _normpath(self.topsourcedir) + self.sourcedirs = [_normpath(p) for p in self.sourcedirs] + if self.localedirs: + self.localedirs = [_normpath(p) for p in self.localedirs] + elif self.relativesrcdir: + self.localedirs = self.generateLocaleDirs(self.relativesrcdir) + if isinstance(infile, six.text_type): + logging.info("processing " + infile) + self.sourcedirs.append(_normpath(os.path.dirname(infile))) + pp = self.pp.clone() + pp.out = JarManifestParser() + pp.do_include(infile) + + for info in pp.out: + self.processJarSection(info, jardir) + + def generateLocaleDirs(self, relativesrcdir): + if os.path.basename(relativesrcdir) == "locales": + # strip locales + l10nrelsrcdir = os.path.dirname(relativesrcdir) + else: + l10nrelsrcdir = relativesrcdir + locdirs = [] + + # generate locales merge or en-US + if self.l10nbase: + locdirs.append(os.path.join(self.l10nbase, l10nrelsrcdir)) + else: + # add en-US if it's not l10n + locdirs.append(os.path.join(self.topsourcedir, relativesrcdir, "en-US")) + return locdirs + + def processJarSection(self, jarinfo, jardir): + """Internal method called by makeJar to actually process a section + of a jar.mn file. + """ + + # chromebasepath is used for chrome registration manifests + # {0} is getting replaced with chrome/ for chrome.manifest, and with + # an empty string for jarfile.manifest + + chromebasepath = "{0}" + os.path.basename(jarinfo.name) + if self.outputFormat == "jar": + chromebasepath = "jar:" + chromebasepath + ".jar!" + chromebasepath += "/" + + jarfile = os.path.join(jardir, jarinfo.base, jarinfo.name) + jf = None + if self.outputFormat == "jar": + # jar + jarfilepath = jarfile + ".jar" + try: + os.makedirs(os.path.dirname(jarfilepath)) + except OSError as error: + if error.errno != errno.EEXIST: + raise + jf = ZipFile(jarfilepath, "a", lock=True) + outHelper = self.OutputHelper_jar(jf) + else: + outHelper = getattr(self, "OutputHelper_" + self.outputFormat)(jarfile) + + if jarinfo.relativesrcdir: + self.localedirs = self.generateLocaleDirs(jarinfo.relativesrcdir) + + for e in jarinfo.entries: + self._processEntryLine(e, outHelper, jf) + + self.finalizeJar( + jardir, jarinfo.base, jarinfo.name, chromebasepath, jarinfo.chrome_manifests + ) + if jf is not None: + jf.close() + + def _processEntryLine(self, e, outHelper, jf): + out = e.output + src = e.source + + # pick the right sourcedir -- l10n, topsrc or src + + if e.is_locale: + # If the file is a Fluent l10n resource, we want to skip the + # 'en-US' fallbacking. + # + # To achieve that, we're testing if we have more than one localedir, + # and if the last of those has 'en-US' in it. + # If that's the case, we're removing the last one. + if ( + e.source.endswith(".ftl") + and len(self.localedirs) > 1 + and "en-US" in self.localedirs[-1] + ): + src_base = self.localedirs[:-1] + else: + src_base = self.localedirs + elif src.startswith("/"): + # path/in/jar/file_name.xul (/path/in/sourcetree/file_name.xul) + # refers to a path relative to topsourcedir, use that as base + # and strip the leading '/' + src_base = [self.topsourcedir] + src = src[1:] + else: + # use srcdirs and the objdir (current working dir) for relative paths + src_base = self.sourcedirs + [os.getcwd()] + + if "*" in src: + + def _prefix(s): + for p in s.split("/"): + if "*" not in p: + yield p + "/" + + prefix = "".join(_prefix(src)) + emitted = set() + for _srcdir in src_base: + finder = FileFinder(_srcdir) + for path, _ in finder.find(src): + # If the path was already seen in one of the other source + # directories, skip it. That matches the non-wildcard case + # below, where we pick the first existing file. + reduced_path = path[len(prefix) :] + if reduced_path in emitted: + continue + emitted.add(reduced_path) + e = JarManifestEntry( + mozpath.join(out, reduced_path), + path, + is_locale=e.is_locale, + preprocess=e.preprocess, + ) + self._processEntryLine(e, outHelper, jf) + return + + # check if the source file exists + realsrc = None + for _srcdir in src_base: + if os.path.isfile(os.path.join(_srcdir, src)): + realsrc = os.path.join(_srcdir, src) + break + if realsrc is None: + if jf is not None: + jf.close() + raise RuntimeError( + 'File "{0}" not found in {1}'.format(src, ", ".join(src_base)) + ) + + if out in self._seen_output: + raise RuntimeError("%s already added" % out) + self._seen_output.add(out) + + if e.preprocess: + outf = outHelper.getOutput(out, mode="w") + inf = io.open(realsrc, encoding="utf-8") + pp = self.pp.clone() + if src[-4:] == ".css": + pp.setMarker("%") + pp.out = outf + pp.do_include(inf) + pp.failUnused(realsrc) + outf.close() + inf.close() + return + + # copy or symlink if newer + + if getModTime(realsrc) > outHelper.getDestModTime(e.output): + if self.outputFormat == "symlink": + outHelper.symlink(realsrc, out) + return + outf = outHelper.getOutput(out) + + # open in binary mode, this can be images etc + + inf = open(realsrc, "rb") + outf.write(inf.read()) + outf.close() + inf.close() + + class OutputHelper_jar(object): + """Provide getDestModTime and getOutput for a given jarfile.""" + + def __init__(self, jarfile): + self.jarfile = jarfile + + def getDestModTime(self, aPath): + try: + info = self.jarfile.getinfo(aPath) + return info.date_time + except Exception: + return localtime(0) + + def getOutput(self, name, mode="wb"): + return ZipEntry(name, self.jarfile) + + class OutputHelper_flat(object): + """Provide getDestModTime and getOutput for a given flat + output directory. The helper method ensureDirFor is used by + the symlink subclass. + """ + + def __init__(self, basepath): + self.basepath = basepath + + def getDestModTime(self, aPath): + return getModTime(os.path.join(self.basepath, aPath)) + + def getOutput(self, name, mode="wb"): + out = self.ensureDirFor(name) + + # remove previous link or file + try: + os.remove(out) + except OSError as e: + if e.errno != errno.ENOENT: + raise + if "b" in mode: + return io.open(out, mode) + else: + return io.open(out, mode, encoding="utf-8", newline="\n") + + def ensureDirFor(self, name): + out = os.path.join(self.basepath, name) + outdir = os.path.dirname(out) + if not os.path.isdir(outdir): + try: + os.makedirs(outdir) + except OSError as error: + if error.errno != errno.EEXIST: + raise + return out + + class OutputHelper_symlink(OutputHelper_flat): + """Subclass of OutputHelper_flat that provides a helper for + creating a symlink including creating the parent directories. + """ + + def symlink(self, src, dest): + out = self.ensureDirFor(dest) + + # remove previous link or file + try: + os.remove(out) + except OSError as e: + if e.errno != errno.ENOENT: + raise + if sys.platform != "win32": + os.symlink(src, out) + else: + # On Win32, use ctypes to create a hardlink + rv = CreateHardLink(ensure_bytes(out), ensure_bytes(src), None) + if rv == 0: + raise WinError() + + +def main(args=None): + args = args or sys.argv + jm = JarMaker() + p = jm.getCommandLineParser() + (options, args) = p.parse_args(args) + jm.outputFormat = options.f + jm.sourcedirs = options.s + jm.topsourcedir = options.t + if options.e: + jm.useChromeManifest = True + jm.useJarfileManifest = False + if options.l10n_base: + if not options.relativesrcdir: + p.error("relativesrcdir required when using l10n-base") + if options.l10n_src: + p.error("both l10n-src and l10n-base are not supported") + jm.l10nbase = options.l10n_base + jm.relativesrcdir = options.relativesrcdir + jm.localedirs = options.l10n_src + if options.root_manifest_entry_appid: + jm.rootManifestAppId = options.root_manifest_entry_appid + noise = logging.INFO + if options.verbose is not None: + noise = options.verbose and logging.DEBUG or logging.WARN + if sys.version_info[:2] > (2, 3): + logging.basicConfig(format="%(message)s") + else: + logging.basicConfig() + logging.getLogger().setLevel(noise) + topsrc = options.t + topsrc = os.path.normpath(os.path.abspath(topsrc)) + if not args: + infile = sys.stdin + else: + (infile,) = args + infile = six.ensure_text(infile) + jm.makeJar(infile, options.d) diff --git a/python/mozbuild/mozbuild/mach_commands.py b/python/mozbuild/mozbuild/mach_commands.py new file mode 100644 index 0000000000..2297d586b8 --- /dev/null +++ b/python/mozbuild/mozbuild/mach_commands.py @@ -0,0 +1,2941 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, # You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import errno +import itertools +import json +import logging +import operator +import os +import os.path +import platform +import re +import shutil +import subprocess +import sys +import tempfile +import time +from os import path +from pathlib import Path + +import mozpack.path as mozpath +import yaml +from mach.decorators import ( + Command, + CommandArgument, + CommandArgumentGroup, + SettingsProvider, + SubCommand, +) +from voluptuous import All, Boolean, Required, Schema + +import mozbuild.settings # noqa need @SettingsProvider hook to execute +from mozbuild.base import ( + BinaryNotFoundException, + BuildEnvironmentNotFoundException, + MozbuildObject, +) +from mozbuild.base import MachCommandConditions as conditions +from mozbuild.util import MOZBUILD_METRICS_PATH + +here = os.path.abspath(os.path.dirname(__file__)) + +EXCESSIVE_SWAP_MESSAGE = """ +=================== +PERFORMANCE WARNING + +Your machine experienced a lot of swap activity during the build. This is +possibly a sign that your machine doesn't have enough physical memory or +not enough available memory to perform the build. It's also possible some +other system activity during the build is to blame. + +If you feel this message is not appropriate for your machine configuration, +please file a Firefox Build System :: General bug at +https://bugzilla.mozilla.org/enter_bug.cgi?product=Firefox%20Build%20System&component=General +and tell us about your machine and build configuration so we can adjust the +warning heuristic. +=================== +""" + + +class StoreDebugParamsAndWarnAction(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + sys.stderr.write( + "The --debugparams argument is deprecated. Please " + + "use --debugger-args instead.\n\n" + ) + setattr(namespace, self.dest, values) + + +@Command( + "watch", + category="post-build", + description="Watch and re-build (parts of) the tree.", + conditions=[conditions.is_firefox], + virtualenv_name="watch", +) +@CommandArgument( + "-v", + "--verbose", + action="store_true", + help="Verbose output for what commands the watcher is running.", +) +def watch(command_context, verbose=False): + """Watch and re-build (parts of) the source tree.""" + if not conditions.is_artifact_build(command_context): + print( + "WARNING: mach watch only rebuilds the `mach build faster` parts of the tree!" + ) + + if not command_context.substs.get("WATCHMAN", None): + print( + "mach watch requires watchman to be installed and found at configure time. See " + "https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Incremental_builds_with_filesystem_watching" # noqa + ) + return 1 + + from mozbuild.faster_daemon import Daemon + + daemon = Daemon(command_context.config_environment) + + try: + return daemon.watch() + except KeyboardInterrupt: + # Suppress ugly stack trace when user hits Ctrl-C. + sys.exit(3) + + +CARGO_CONFIG_NOT_FOUND_ERROR_MSG = """\ +The sub-command {subcommand} is not currently configured to be used with ./mach cargo. +To do so, add the corresponding file in /build/cargo, following other examples in this directory""" + + +def _cargo_config_yaml_schema(): + def starts_with_cargo(s): + if s.startswith("cargo-"): + return s + else: + raise ValueError + + return Schema( + { + # The name of the command (not checked for now, but maybe + # later) + Required("command"): All(str, starts_with_cargo), + # Whether `make` should stop immediately in case + # of error returned by the command. Default: False + "continue_on_error": Boolean, + # Whether this command requires pre_export and export build + # targets to have run. Defaults to bool(cargo_build_flags). + "requires_export": Boolean, + # Build flags to use. If this variable is not + # defined here, the build flags are generated automatically and are + # the same as for `cargo build`. See available substitutions at the + # end. + "cargo_build_flags": [str], + # Extra build flags to use. These flags are added + # after the cargo_build_flags both when they are provided or + # automatically generated. See available substitutions at the end. + "cargo_extra_flags": [str], + # Available substitutions for `cargo_*_flags`: + # * {arch}: architecture target + # * {crate}: current crate name + # * {directory}: Directory of the current crate within the source tree + # * {features}: Rust features (for `--features`) + # * {manifest}: full path of `Cargo.toml` file + # * {target}: `--lib` for library, `--bin CRATE` for executables + # * {topsrcdir}: Top directory of sources + } + ) + + +@Command( + "cargo", + category="build", + description="Run `cargo ` on a given crate. Defaults to gkrust.", + metrics_path=MOZBUILD_METRICS_PATH, +) +@CommandArgument( + "cargo_command", + default=None, + help="Target to cargo, must be one of the commands in config/cargo/", +) +@CommandArgument( + "--all-crates", + action="store_true", + help="Check all of the crates in the tree.", +) +@CommandArgument( + "-p", "--package", default=None, help="The specific crate name to check." +) +@CommandArgument( + "--jobs", + "-j", + default="0", + nargs="?", + metavar="jobs", + type=int, + help="Run the tests in parallel using multiple processes.", +) +@CommandArgument("-v", "--verbose", action="store_true", help="Verbose output.") +@CommandArgument( + "--message-format-json", + action="store_true", + help="Emit error messages as JSON.", +) +@CommandArgument( + "--continue-on-error", + action="store_true", + help="Do not return an error exit code if the subcommands errors out.", +) +@CommandArgument( + "subcommand_args", + nargs=argparse.REMAINDER, + help="These arguments are passed as-is to the cargo subcommand.", +) +def cargo( + command_context, + cargo_command, + all_crates=None, + package=None, + jobs=0, + verbose=False, + message_format_json=False, + continue_on_error=False, + subcommand_args=[], +): + + from mozbuild.controller.building import BuildDriver + + command_context.log_manager.enable_all_structured_loggers() + + topsrcdir = Path(mozpath.normpath(command_context.topsrcdir)) + cargodir = Path(topsrcdir / "build" / "cargo") + + cargo_command_basename = "cargo-" + cargo_command + ".yaml" + cargo_command_fullname = Path(cargodir / cargo_command_basename) + if path.exists(cargo_command_fullname): + with open(cargo_command_fullname) as fh: + yaml_config = yaml.load(fh, Loader=yaml.FullLoader) + schema = _cargo_config_yaml_schema() + schema(yaml_config) + if not yaml_config: + yaml_config = {} + else: + print(CARGO_CONFIG_NOT_FOUND_ERROR_MSG.format(subcommand=cargo_command)) + return 1 + + # print("yaml_config = ", yaml_config) + + yaml_config.setdefault("continue_on_error", False) + continue_on_error = continue_on_error or yaml_config["continue_on_error"] is True + + cargo_build_flags = yaml_config.get("cargo_build_flags") + if cargo_build_flags is not None: + cargo_build_flags = " ".join(cargo_build_flags) + cargo_extra_flags = yaml_config.get("cargo_extra_flags") + if cargo_extra_flags is not None: + cargo_extra_flags = " ".join(cargo_extra_flags) + requires_export = yaml_config.get("requires_export", bool(cargo_build_flags)) + + ret = 0 + if requires_export: + # This directory is created during export. If it's not there, + # export hasn't run already. + deps = Path(command_context.topobjdir) / ".deps" + if not deps.exists(): + build = command_context._spawn(BuildDriver) + ret = build.build( + command_context.metrics, + what=["pre-export", "export"], + jobs=jobs, + verbose=verbose, + mach_context=command_context._mach_context, + ) + else: + try: + command_context.config_environment + except BuildEnvironmentNotFoundException: + build = command_context._spawn(BuildDriver) + ret = build.configure( + command_context.metrics, + buildstatus_messages=False, + ) + if ret != 0: + return ret + + # XXX duplication with `mach vendor rust` + crates_and_roots = { + "gkrust": {"directory": "toolkit/library/rust", "library": True}, + "gkrust-gtest": {"directory": "toolkit/library/gtest/rust", "library": True}, + "geckodriver": {"directory": "testing/geckodriver", "library": False}, + } + + if all_crates: + crates = crates_and_roots.keys() + elif package: + crates = [package] + else: + crates = ["gkrust"] + + if subcommand_args: + subcommand_args = " ".join(subcommand_args) + + for crate in crates: + crate_info = crates_and_roots.get(crate, None) + if not crate_info: + print( + "Cannot locate crate %s. Please check your spelling or " + "add the crate information to the list." % crate + ) + return 1 + + targets = [ + "force-cargo-library-%s" % cargo_command, + "force-cargo-host-library-%s" % cargo_command, + "force-cargo-program-%s" % cargo_command, + "force-cargo-host-program-%s" % cargo_command, + ] + + directory = crate_info["directory"] + # you can use these variables in 'cargo_build_flags' + subst = { + "arch": '"$(RUST_TARGET)"', + "crate": crate, + "directory": directory, + "features": '"$(RUST_LIBRARY_FEATURES)"', + "manifest": str(Path(topsrcdir / directory / "Cargo.toml")), + "target": "--lib" if crate_info["library"] else "--bin " + crate, + "topsrcdir": str(topsrcdir), + } + + if subcommand_args: + targets = targets + [ + "cargo_extra_cli_flags=%s" % (subcommand_args.format(**subst)) + ] + if cargo_build_flags: + targets = targets + [ + "cargo_build_flags=%s" % (cargo_build_flags.format(**subst)) + ] + + append_env = {} + if cargo_extra_flags: + append_env["CARGO_EXTRA_FLAGS"] = cargo_extra_flags.format(**subst) + if message_format_json: + append_env["USE_CARGO_JSON_MESSAGE_FORMAT"] = "1" + if continue_on_error: + append_env["CARGO_CONTINUE_ON_ERROR"] = "1" + if cargo_build_flags: + append_env["CARGO_NO_AUTO_ARG"] = "1" + else: + append_env[ + "ADD_RUST_LTOABLE" + ] = "force-cargo-library-{s:s} force-cargo-program-{s:s}".format( + s=cargo_command + ) + + ret = command_context._run_make( + srcdir=False, + directory=directory, + ensure_exit_code=0, + silent=not verbose, + print_directory=False, + target=targets, + num_jobs=jobs, + append_env=append_env, + ) + if ret != 0: + return ret + + return 0 + + +@SubCommand( + "cargo", + "vet", + description="Run `cargo vet`.", +) +@CommandArgument("arguments", nargs=argparse.REMAINDER) +def cargo_vet(command_context, arguments, stdout=None, env=os.environ): + from mozbuild.bootstrap import bootstrap_toolchain + + # Logging of commands enables logging from `bootstrap_toolchain` that we + # don't want to expose. Disable them temporarily. + logger = logging.getLogger("gecko_taskgraph.generator") + level = logger.getEffectiveLevel() + logger.setLevel(logging.ERROR) + + env = env.copy() + cargo_vet = bootstrap_toolchain("cargo-vet") + if cargo_vet: + env["PATH"] = os.pathsep.join([cargo_vet, env["PATH"]]) + logger.setLevel(level) + try: + cargo = command_context.substs["CARGO"] + except (BuildEnvironmentNotFoundException, KeyError): + # Default if this tree isn't configured. + from mozfile import which + + cargo = which("cargo", path=env["PATH"]) + if not cargo: + raise OSError( + errno.ENOENT, + ( + "Could not find 'cargo' on your $PATH. " + "Hint: have you run `mach build` or `mach configure`?" + ), + ) + + locked = "--locked" in arguments + if locked: + # The use of --locked requires .cargo/config to exist, but other things, + # like cargo update, don't want it there, so remove it once we're done. + topsrcdir = Path(command_context.topsrcdir) + shutil.copyfile( + topsrcdir / ".cargo" / "config.in", topsrcdir / ".cargo" / "config" + ) + + try: + res = subprocess.run( + [cargo, "vet"] + arguments, + cwd=command_context.topsrcdir, + stdout=stdout, + env=env, + ) + finally: + if locked: + (topsrcdir / ".cargo" / "config").unlink() + + # When the function is invoked without stdout set (the default when running + # as a mach subcommand), exit with the returncode from cargo vet. + # When the function is invoked with stdout (direct function call), return + # the full result from subprocess.run. + return res if stdout else res.returncode + + +@Command( + "doctor", + category="devenv", + description="Diagnose and fix common development environment issues.", +) +@CommandArgument( + "--fix", + default=False, + action="store_true", + help="Attempt to fix found problems.", +) +@CommandArgument( + "--verbose", + default=False, + action="store_true", + help="Print verbose information found by checks.", +) +def doctor(command_context, fix=False, verbose=False): + """Diagnose common build environment problems""" + from mozbuild.doctor import run_doctor + + return run_doctor( + topsrcdir=command_context.topsrcdir, + topobjdir=command_context.topobjdir, + configure_args=command_context.mozconfig["configure_args"], + fix=fix, + verbose=verbose, + ) + + +CLOBBER_CHOICES = {"objdir", "python", "gradle"} + + +@Command( + "clobber", + category="build", + description="Clobber the tree (delete the object directory).", + no_auto_log=True, +) +@CommandArgument( + "what", + default=["objdir", "python"], + nargs="*", + help="Target to clobber, must be one of {{{}}} (default " + "objdir and python).".format(", ".join(CLOBBER_CHOICES)), +) +@CommandArgument("--full", action="store_true", help="Perform a full clobber") +def clobber(command_context, what, full=False): + """Clean up the source and object directories. + + Performing builds and running various commands generate various files. + + Sometimes it is necessary to clean up these files in order to make + things work again. This command can be used to perform that cleanup. + + The `objdir` target removes most files in the current object directory + (where build output is stored). Some files (like Visual Studio project + files) are not removed by default. If you would like to remove the + object directory in its entirety, run with `--full`. + + The `python` target will clean up Python's generated files (virtualenvs, + ".pyc", "__pycache__", etc). + + The `gradle` target will remove the "gradle" subdirectory of the object + directory. + + By default, the command clobbers the `objdir` and `python` targets. + """ + what = set(what) + invalid = what - CLOBBER_CHOICES + if invalid: + print( + "Unknown clobber target(s): {}. Choose from {{{}}}".format( + ", ".join(invalid), ", ".join(CLOBBER_CHOICES) + ) + ) + return 1 + + ret = 0 + if "objdir" in what: + from mozbuild.controller.clobber import Clobberer + + try: + substs = command_context.substs + except BuildEnvironmentNotFoundException: + substs = {} + + try: + Clobberer( + command_context.topsrcdir, command_context.topobjdir, substs + ).remove_objdir(full) + except OSError as e: + if sys.platform.startswith("win"): + if isinstance(e, WindowsError) and e.winerror in (5, 32): + command_context.log( + logging.ERROR, + "file_access_error", + {"error": e}, + "Could not clobber because a file was in use. If the " + "application is running, try closing it. {error}", + ) + return 1 + raise + + if "python" in what: + if conditions.is_hg(command_context): + cmd = [ + "hg", + "--config", + "extensions.purge=", + "purge", + "--all", + "-I", + "glob:**.py[cdo]", + "-I", + "glob:**/__pycache__", + ] + elif conditions.is_git(command_context): + cmd = ["git", "clean", "-d", "-f", "-x", "*.py[cdo]", "*/__pycache__/*"] + else: + cmd = ["find", ".", "-type", "f", "-name", "*.py[cdo]", "-delete"] + subprocess.call(cmd, cwd=command_context.topsrcdir) + cmd = [ + "find", + ".", + "-type", + "d", + "-name", + "__pycache__", + "-empty", + "-delete", + ] + ret = subprocess.call(cmd, cwd=command_context.topsrcdir) + shutil.rmtree( + mozpath.join(command_context.topobjdir, "_virtualenvs"), + ignore_errors=True, + ) + + if "gradle" in what: + shutil.rmtree( + mozpath.join(command_context.topobjdir, "gradle"), ignore_errors=True + ) + + return ret + + +@Command( + "show-log", category="post-build", description="Display mach logs", no_auto_log=True +) +@CommandArgument( + "log_file", + nargs="?", + type=argparse.FileType("rb"), + help="Filename to read log data from. Defaults to the log of the last " + "mach command.", +) +def show_log(command_context, log_file=None): + """Show mach logs + If we're in a terminal context, the log is piped to 'less' + for more convenient viewing. + (https://man7.org/linux/man-pages/man1/less.1.html) + """ + if not log_file: + path = command_context._get_state_filename("last_log.json") + log_file = open(path, "rb") + + if os.isatty(sys.stdout.fileno()): + env = dict(os.environ) + if "LESS" not in env: + # Sensible default flags if none have been set in the user environment. + env["LESS"] = "FRX" + less = subprocess.Popen( + ["less"], stdin=subprocess.PIPE, env=env, encoding="UTF-8" + ) + + try: + # Create a new logger handler with the stream being the stdin of our 'less' + # process so that we can pipe the logger output into 'less' + less_handler = logging.StreamHandler(stream=less.stdin) + less_handler.setFormatter( + command_context.log_manager.terminal_handler.formatter + ) + less_handler.setLevel(command_context.log_manager.terminal_handler.level) + + # replace the existing terminal handler with the new one for 'less' while + # still keeping the original one to set back later + original_handler = command_context.log_manager.replace_terminal_handler( + less_handler + ) + + # Save this value so we can set it back to the original value later + original_logging_raise_exceptions = logging.raiseExceptions + + # We need to explicitly disable raising exceptions inside logging so + # that we can catch them here ourselves to ignore the ones we want + logging.raiseExceptions = False + + # Parses the log file line by line and streams + # (to less.stdin) the relevant records we want + handle_log_file(command_context, log_file) + + # At this point we've piped the entire log file to + # 'less', so we can close the input stream + less.stdin.close() + + # Wait for the user to manually terminate `less` + less.wait() + except OSError as os_error: + # (POSIX) errno.EPIPE: BrokenPipeError: [Errno 32] Broken pipe + # (Windows) errno.EINVAL: OSError: [Errno 22] Invalid argument + if os_error.errno == errno.EPIPE or os_error.errno == errno.EINVAL: + # If the user manually terminates 'less' before the entire log file + # is piped (without scrolling close enough to the bottom) we will get + # one of these errors (depends on the OS) because the logger will still + # attempt to stream to the now invalid less.stdin. To prevent a bunch + # of errors being shown after a user terminates 'less', we just catch + # the first of those exceptions here, and stop parsing the log file. + pass + else: + raise + except Exception: + raise + finally: + # Ensure these values are changed back to the originals, regardless of outcome + command_context.log_manager.replace_terminal_handler(original_handler) + logging.raiseExceptions = original_logging_raise_exceptions + else: + # Not in a terminal context, so just handle the log file with the + # default stream without piping it to a pager (less) + handle_log_file(command_context, log_file) + + +def handle_log_file(command_context, log_file): + start_time = 0 + for line in log_file: + created, action, params = json.loads(line) + if not start_time: + start_time = created + command_context.log_manager.terminal_handler.formatter.start_time = created + if "line" in params: + record = logging.makeLogRecord( + { + "created": created, + "name": command_context._logger.name, + "levelno": logging.INFO, + "msg": "{line}", + "params": params, + "action": action, + } + ) + command_context._logger.handle(record) + + +# Provide commands for inspecting warnings. + + +def database_path(command_context): + return command_context._get_state_filename("warnings.json") + + +def get_warnings_database(command_context): + from mozbuild.compilation.warnings import WarningsDatabase + + path = database_path(command_context) + + database = WarningsDatabase() + + if os.path.exists(path): + database.load_from_file(path) + + return database + + +@Command( + "warnings-summary", + category="post-build", + description="Show a summary of compiler warnings.", +) +@CommandArgument( + "-C", + "--directory", + default=None, + help="Change to a subdirectory of the build directory first.", +) +@CommandArgument( + "report", + default=None, + nargs="?", + help="Warnings report to display. If not defined, show the most recent report.", +) +def summary(command_context, directory=None, report=None): + database = get_warnings_database(command_context) + + if directory: + dirpath = join_ensure_dir(command_context.topsrcdir, directory) + if not dirpath: + return 1 + else: + dirpath = None + + type_counts = database.type_counts(dirpath) + sorted_counts = sorted(type_counts.items(), key=operator.itemgetter(1)) + + total = 0 + for k, v in sorted_counts: + print("%d\t%s" % (v, k)) + total += v + + print("%d\tTotal" % total) + + +@Command( + "warnings-list", + category="post-build", + description="Show a list of compiler warnings.", +) +@CommandArgument( + "-C", + "--directory", + default=None, + help="Change to a subdirectory of the build directory first.", +) +@CommandArgument( + "--flags", default=None, nargs="+", help="Which warnings flags to match." +) +@CommandArgument( + "report", + default=None, + nargs="?", + help="Warnings report to display. If not defined, show the most recent report.", +) +def list_warnings(command_context, directory=None, flags=None, report=None): + database = get_warnings_database(command_context) + + by_name = sorted(database.warnings) + + topsrcdir = mozpath.normpath(command_context.topsrcdir) + + if directory: + directory = mozpath.normsep(directory) + dirpath = join_ensure_dir(topsrcdir, directory) + if not dirpath: + return 1 + + if flags: + # Flatten lists of flags. + flags = set(itertools.chain(*[flaglist.split(",") for flaglist in flags])) + + for warning in by_name: + filename = mozpath.normsep(warning["filename"]) + + if filename.startswith(topsrcdir): + filename = filename[len(topsrcdir) + 1 :] + + if directory and not filename.startswith(directory): + continue + + if flags and warning["flag"] not in flags: + continue + + if warning["column"] is not None: + print( + "%s:%d:%d [%s] %s" + % ( + filename, + warning["line"], + warning["column"], + warning["flag"], + warning["message"], + ) + ) + else: + print( + "%s:%d [%s] %s" + % (filename, warning["line"], warning["flag"], warning["message"]) + ) + + +def join_ensure_dir(dir1, dir2): + dir1 = mozpath.normpath(dir1) + dir2 = mozpath.normsep(dir2) + joined_path = mozpath.join(dir1, dir2) + if os.path.isdir(joined_path): + return joined_path + print("Specified directory not found.") + return None + + +@Command("gtest", category="testing", description="Run GTest unit tests (C++ tests).") +@CommandArgument( + "gtest_filter", + default="*", + nargs="?", + metavar="gtest_filter", + help="test_filter is a ':'-separated list of wildcard patterns " + "(called the positive patterns), optionally followed by a '-' " + "and another ':'-separated pattern list (called the negative patterns)." + "Test names are of the format SUITE.NAME. Use --list-tests to see all.", +) +@CommandArgument("--list-tests", action="store_true", help="list all available tests") +@CommandArgument( + "--jobs", + "-j", + default="1", + nargs="?", + metavar="jobs", + type=int, + help="Run the tests in parallel using multiple processes.", +) +@CommandArgument( + "--tbpl-parser", + "-t", + action="store_true", + help="Output test results in a format that can be parsed by TBPL.", +) +@CommandArgument( + "--shuffle", + "-s", + action="store_true", + help="Randomize the execution order of tests.", +) +@CommandArgument( + "--enable-webrender", + action="store_true", + default=False, + dest="enable_webrender", + help="Enable the WebRender compositor in Gecko.", +) +@CommandArgumentGroup("Android") +@CommandArgument( + "--package", + default="org.mozilla.geckoview.test_runner", + group="Android", + help="Package name of test app.", +) +@CommandArgument( + "--adbpath", dest="adb_path", group="Android", help="Path to adb binary." +) +@CommandArgument( + "--deviceSerial", + dest="device_serial", + group="Android", + help="adb serial number of remote device. " + "Required when more than one device is connected to the host. " + "Use 'adb devices' to see connected devices.", +) +@CommandArgument( + "--remoteTestRoot", + dest="remote_test_root", + group="Android", + help="Remote directory to use as test root (eg. /data/local/tmp/test_root).", +) +@CommandArgument( + "--libxul", dest="libxul_path", group="Android", help="Path to gtest libxul.so." +) +@CommandArgument( + "--no-install", + action="store_true", + default=False, + group="Android", + help="Skip the installation of the APK.", +) +@CommandArgumentGroup("debugging") +@CommandArgument( + "--debug", + action="store_true", + group="debugging", + help="Enable the debugger. Not specifying a --debugger option will result in " + "the default debugger being used.", +) +@CommandArgument( + "--debugger", + default=None, + type=str, + group="debugging", + help="Name of debugger to use.", +) +@CommandArgument( + "--debugger-args", + default=None, + metavar="params", + type=str, + group="debugging", + help="Command-line arguments to pass to the debugger itself; " + "split as the Bourne shell would.", +) +def gtest( + command_context, + shuffle, + jobs, + gtest_filter, + list_tests, + tbpl_parser, + enable_webrender, + package, + adb_path, + device_serial, + remote_test_root, + libxul_path, + no_install, + debug, + debugger, + debugger_args, +): + + # We lazy build gtest because it's slow to link + try: + command_context.config_environment + except Exception: + print("Please run |./mach build| before |./mach gtest|.") + return 1 + + res = command_context._mach_context.commands.dispatch( + "build", command_context._mach_context, what=["recurse_gtest"] + ) + if res: + print("Could not build xul-gtest") + return res + + if command_context.substs.get("MOZ_WIDGET_TOOLKIT") == "cocoa": + command_context._run_make( + directory="browser/app", target="repackage", ensure_exit_code=True + ) + + cwd = os.path.join(command_context.topobjdir, "_tests", "gtest") + + if not os.path.isdir(cwd): + os.makedirs(cwd) + + if conditions.is_android(command_context): + if jobs != 1: + print("--jobs is not supported on Android and will be ignored") + if debug or debugger or debugger_args: + print("--debug options are not supported on Android and will be ignored") + from mozrunner.devices.android_device import InstallIntent + + return android_gtest( + command_context, + cwd, + shuffle, + gtest_filter, + package, + adb_path, + device_serial, + remote_test_root, + libxul_path, + InstallIntent.NO if no_install else InstallIntent.YES, + ) + + if ( + package + or adb_path + or device_serial + or remote_test_root + or libxul_path + or no_install + ): + print("One or more Android-only options will be ignored") + + app_path = command_context.get_binary_path("app") + args = [app_path, "-unittest", "--gtest_death_test_style=threadsafe"] + + if ( + sys.platform.startswith("win") + and "MOZ_LAUNCHER_PROCESS" in command_context.defines + ): + args.append("--wait-for-browser") + + if list_tests: + args.append("--gtest_list_tests") + + if debug or debugger or debugger_args: + args = _prepend_debugger_args(args, debugger, debugger_args) + if not args: + return 1 + + # Use GTest environment variable to control test execution + # For details see: + # https://google.github.io/googletest/advanced.html#running-test-programs-advanced-options + gtest_env = {"GTEST_FILTER": gtest_filter} + + # Note: we must normalize the path here so that gtest on Windows sees + # a MOZ_GMP_PATH which has only Windows dir seperators, because + # nsIFile cannot open the paths with non-Windows dir seperators. + xre_path = os.path.join(os.path.normpath(command_context.topobjdir), "dist", "bin") + gtest_env["MOZ_XRE_DIR"] = xre_path + gtest_env["MOZ_GMP_PATH"] = os.pathsep.join( + os.path.join(xre_path, p, "1.0") for p in ("gmp-fake", "gmp-fakeopenh264") + ) + + gtest_env["MOZ_RUN_GTEST"] = "True" + + if shuffle: + gtest_env["GTEST_SHUFFLE"] = "True" + + if tbpl_parser: + gtest_env["MOZ_TBPL_PARSER"] = "True" + + if enable_webrender: + gtest_env["MOZ_WEBRENDER"] = "1" + gtest_env["MOZ_ACCELERATED"] = "1" + else: + gtest_env["MOZ_WEBRENDER"] = "0" + + if jobs == 1: + return command_context.run_process( + args=args, + append_env=gtest_env, + cwd=cwd, + ensure_exit_code=False, + pass_thru=True, + ) + + import functools + + from mozprocess import ProcessHandlerMixin + + def handle_line(job_id, line): + # Prepend the jobId + line = "[%d] %s" % (job_id + 1, line.strip()) + command_context.log(logging.INFO, "GTest", {"line": line}, "{line}") + + gtest_env["GTEST_TOTAL_SHARDS"] = str(jobs) + processes = {} + for i in range(0, jobs): + gtest_env["GTEST_SHARD_INDEX"] = str(i) + processes[i] = ProcessHandlerMixin( + [app_path, "-unittest"], + cwd=cwd, + env=gtest_env, + processOutputLine=[functools.partial(handle_line, i)], + universal_newlines=True, + ) + processes[i].run() + + exit_code = 0 + for process in processes.values(): + status = process.wait() + if status: + exit_code = status + + # Clamp error code to 255 to prevent overflowing multiple of + # 256 into 0 + if exit_code > 255: + exit_code = 255 + + return exit_code + + +def android_gtest( + command_context, + test_dir, + shuffle, + gtest_filter, + package, + adb_path, + device_serial, + remote_test_root, + libxul_path, + install, +): + # setup logging for mozrunner + from mozlog.commandline import setup_logging + + format_args = {"level": command_context._mach_context.settings["test"]["level"]} + default_format = command_context._mach_context.settings["test"]["format"] + setup_logging("mach-gtest", {}, {default_format: sys.stdout}, format_args) + + # ensure that a device is available and test app is installed + from mozrunner.devices.android_device import get_adb_path, verify_android_device + + verify_android_device( + command_context, install=install, app=package, device_serial=device_serial + ) + + if not adb_path: + adb_path = get_adb_path(command_context) + if not libxul_path: + libxul_path = os.path.join( + command_context.topobjdir, "dist", "bin", "gtest", "libxul.so" + ) + + # run gtest via remotegtests.py + exit_code = 0 + import imp + + path = os.path.join("testing", "gtest", "remotegtests.py") + with open(path, "r") as fh: + imp.load_module("remotegtests", fh, path, (".py", "r", imp.PY_SOURCE)) + import remotegtests + + tester = remotegtests.RemoteGTests() + if not tester.run_gtest( + test_dir, + shuffle, + gtest_filter, + package, + adb_path, + device_serial, + remote_test_root, + libxul_path, + None, + ): + exit_code = 1 + tester.cleanup() + + return exit_code + + +@Command( + "package", + category="post-build", + description="Package the built product for distribution as an APK, DMG, etc.", +) +@CommandArgument( + "-v", + "--verbose", + action="store_true", + help="Verbose output for what commands the packaging process is running.", +) +def package(command_context, verbose=False): + """Package the built product for distribution.""" + ret = command_context._run_make( + directory=".", target="package", silent=not verbose, ensure_exit_code=False + ) + if ret == 0: + command_context.notify("Packaging complete") + return ret + + +def _get_android_install_parser(): + parser = argparse.ArgumentParser() + parser.add_argument( + "--app", + default="org.mozilla.geckoview_example", + help="Android package to install (default: org.mozilla.geckoview_example)", + ) + parser.add_argument( + "--verbose", + "-v", + action="store_true", + help="Print verbose output when installing.", + ) + parser.add_argument( + "--aab", + action="store_true", + help="Install as AAB (Android App Bundle)", + ) + return parser + + +def setup_install_parser(): + build = MozbuildObject.from_environment(cwd=here) + if conditions.is_android(build): + return _get_android_install_parser() + return argparse.ArgumentParser() + + +@Command( + "install", + category="post-build", + conditions=[conditions.has_build], + parser=setup_install_parser, + description="Install the package on the machine (or device in the case of Android).", +) +def install(command_context, **kwargs): + """Install a package.""" + if conditions.is_android(command_context): + from mozrunner.devices.android_device import ( + InstallIntent, + verify_android_device, + ) + + ret = ( + verify_android_device(command_context, install=InstallIntent.YES, **kwargs) + == 0 + ) + else: + ret = command_context._run_make( + directory=".", target="install", ensure_exit_code=False + ) + + if ret == 0: + command_context.notify("Install complete") + return ret + + +@SettingsProvider +class RunSettings: + config_settings = [ + ( + "runprefs.*", + "string", + """ +Pass a pref into Firefox when using `mach run`, of the form `foo.bar=value`. +Prefs will automatically be cast into the appropriate type. Integers can be +single quoted to force them to be strings. +""".strip(), + ) + ] + + +def _get_android_run_parser(): + parser = argparse.ArgumentParser() + group = parser.add_argument_group("The compiled program") + group.add_argument( + "--app", + default="org.mozilla.geckoview_example", + help="Android package to run (default: org.mozilla.geckoview_example)", + ) + group.add_argument( + "--intent", + default="android.intent.action.VIEW", + help="Android intent action to launch with " + "(default: android.intent.action.VIEW)", + ) + group.add_argument( + "--setenv", + dest="env", + action="append", + default=[], + help="Set target environment variable, like FOO=BAR", + ) + group.add_argument( + "--profile", + "-P", + default=None, + help="Path to Gecko profile, like /path/to/host/profile " + "or /path/to/target/profile", + ) + group.add_argument("--url", default=None, help="URL to open") + group.add_argument( + "--aab", + action="store_true", + default=False, + help="Install app ass App Bundle (AAB).", + ) + group.add_argument( + "--no-install", + action="store_true", + default=False, + help="Do not try to install application on device before running " + "(default: False)", + ) + group.add_argument( + "--no-wait", + action="store_true", + default=False, + help="Do not wait for application to start before returning " + "(default: False)", + ) + group.add_argument( + "--enable-fission", + action="store_true", + help="Run the program with Fission (site isolation) enabled.", + ) + group.add_argument( + "--fail-if-running", + action="store_true", + default=False, + help="Fail if application is already running (default: False)", + ) + group.add_argument( + "--restart", + action="store_true", + default=False, + help="Stop the application if it is already running (default: False)", + ) + + group = parser.add_argument_group("Debugging") + group.add_argument("--debug", action="store_true", help="Enable the lldb debugger.") + group.add_argument( + "--debugger", + default=None, + type=str, + help="Name of lldb compatible debugger to use.", + ) + group.add_argument( + "--debugger-args", + default=None, + metavar="params", + type=str, + help="Command-line arguments to pass to the debugger itself; " + "split as the Bourne shell would.", + ) + group.add_argument( + "--no-attach", + action="store_true", + default=False, + help="Start the debugging servers on the device but do not " + "attach any debuggers.", + ) + group.add_argument( + "--use-existing-process", + action="store_true", + default=False, + help="Select an existing process to debug.", + ) + return parser + + +def _get_jsshell_run_parser(): + parser = argparse.ArgumentParser() + group = parser.add_argument_group("the compiled program") + group.add_argument( + "params", + nargs="...", + default=[], + help="Command-line arguments to be passed through to the program. Not " + "specifying a --profile or -P option will result in a temporary profile " + "being used.", + ) + + group = parser.add_argument_group("debugging") + group.add_argument( + "--debug", + action="store_true", + help="Enable the debugger. Not specifying a --debugger option will result " + "in the default debugger being used.", + ) + group.add_argument( + "--debugger", default=None, type=str, help="Name of debugger to use." + ) + group.add_argument( + "--debugger-args", + default=None, + metavar="params", + type=str, + help="Command-line arguments to pass to the debugger itself; " + "split as the Bourne shell would.", + ) + group.add_argument( + "--debugparams", + action=StoreDebugParamsAndWarnAction, + default=None, + type=str, + dest="debugger_args", + help=argparse.SUPPRESS, + ) + + return parser + + +def _get_desktop_run_parser(): + parser = argparse.ArgumentParser() + group = parser.add_argument_group("the compiled program") + group.add_argument( + "params", + nargs="...", + default=[], + help="Command-line arguments to be passed through to the program. Not " + "specifying a --profile or -P option will result in a temporary profile " + "being used.", + ) + group.add_argument("--packaged", action="store_true", help="Run a packaged build.") + group.add_argument( + "--app", help="Path to executable to run (default: output of ./mach build)" + ) + group.add_argument( + "--remote", + "-r", + action="store_true", + help="Do not pass the --no-remote argument by default.", + ) + group.add_argument( + "--background", + "-b", + action="store_true", + help="Do not pass the --foreground argument by default on Mac.", + ) + group.add_argument( + "--noprofile", + "-n", + action="store_true", + help="Do not pass the --profile argument by default.", + ) + group.add_argument( + "--disable-e10s", + action="store_true", + help="Run the program with electrolysis disabled.", + ) + group.add_argument( + "--enable-crash-reporter", + action="store_true", + help="Run the program with the crash reporter enabled.", + ) + group.add_argument( + "--disable-fission", + action="store_true", + help="Run the program with Fission (site isolation) disabled.", + ) + group.add_argument( + "--setpref", + action="append", + default=[], + help="Set the specified pref before starting the program. Can be set " + "multiple times. Prefs can also be set in ~/.mozbuild/machrc in the " + "[runprefs] section - see `./mach settings` for more information.", + ) + group.add_argument( + "--temp-profile", + action="store_true", + help="Run the program using a new temporary profile created inside " + "the objdir.", + ) + group.add_argument( + "--macos-open", + action="store_true", + help="On macOS, run the program using the open(1) command. Per open(1), " + "the browser is launched \"just as if you had double-clicked the file's " + 'icon". The browser can not be launched under a debugger with this ' + "option.", + ) + + group = parser.add_argument_group("debugging") + group.add_argument( + "--debug", + action="store_true", + help="Enable the debugger. Not specifying a --debugger option will result " + "in the default debugger being used.", + ) + group.add_argument( + "--debugger", default=None, type=str, help="Name of debugger to use." + ) + group.add_argument( + "--debugger-args", + default=None, + metavar="params", + type=str, + help="Command-line arguments to pass to the debugger itself; " + "split as the Bourne shell would.", + ) + group.add_argument( + "--debugparams", + action=StoreDebugParamsAndWarnAction, + default=None, + type=str, + dest="debugger_args", + help=argparse.SUPPRESS, + ) + + group = parser.add_argument_group("DMD") + group.add_argument( + "--dmd", + action="store_true", + help="Enable DMD. The following arguments have no effect without this.", + ) + group.add_argument( + "--mode", + choices=["live", "dark-matter", "cumulative", "scan"], + help="Profiling mode. The default is 'dark-matter'.", + ) + group.add_argument( + "--stacks", + choices=["partial", "full"], + help="Allocation stack trace coverage. The default is 'partial'.", + ) + group.add_argument( + "--show-dump-stats", action="store_true", help="Show stats when doing dumps." + ) + + return parser + + +def setup_run_parser(): + build = MozbuildObject.from_environment(cwd=here) + if conditions.is_android(build): + return _get_android_run_parser() + if conditions.is_jsshell(build): + return _get_jsshell_run_parser() + return _get_desktop_run_parser() + + +@Command( + "run", + category="post-build", + conditions=[conditions.has_build_or_shell], + parser=setup_run_parser, + description="Run the compiled program, possibly under a debugger or DMD.", +) +def run(command_context, **kwargs): + """Run the compiled program.""" + if conditions.is_android(command_context): + return _run_android(command_context, **kwargs) + if conditions.is_jsshell(command_context): + return _run_jsshell(command_context, **kwargs) + return _run_desktop(command_context, **kwargs) + + +def _run_android( + command_context, + app="org.mozilla.geckoview_example", + intent=None, + env=[], + profile=None, + url=None, + aab=False, + no_install=None, + no_wait=None, + fail_if_running=None, + restart=None, + enable_fission=False, + debug=False, + debugger=None, + debugger_args=None, + no_attach=False, + use_existing_process=False, +): + from mozrunner.devices.android_device import ( + InstallIntent, + _get_device, + verify_android_device, + ) + from six.moves import shlex_quote + + if app == "org.mozilla.geckoview_example": + activity_name = "org.mozilla.geckoview_example.GeckoViewActivity" + elif app == "org.mozilla.geckoview.test_runner": + activity_name = "org.mozilla.geckoview.test_runner.TestRunnerActivity" + elif "fennec" in app or "firefox" in app: + activity_name = "org.mozilla.gecko.BrowserApp" + else: + raise RuntimeError("Application not recognized: {}".format(app)) + + # If we want to debug an existing process, we implicitly do not want + # to kill it and pave over its installation with a new one. + if debug and use_existing_process: + no_install = True + + # `verify_android_device` respects `DEVICE_SERIAL` if it is set and sets it otherwise. + verify_android_device( + command_context, + app=app, + aab=aab, + debugger=debug, + install=InstallIntent.NO if no_install else InstallIntent.YES, + ) + device_serial = os.environ.get("DEVICE_SERIAL") + if not device_serial: + print("No ADB devices connected.") + return 1 + + device = _get_device(command_context.substs, device_serial=device_serial) + + if debug: + # This will terminate any existing processes, so we skip it when we + # want to attach to an existing one. + if not use_existing_process: + command_context.log( + logging.INFO, + "run", + {"app": app}, + "Setting {app} as the device debug app", + ) + device.shell("am set-debug-app -w --persistent %s" % app) + else: + # Make sure that the app doesn't block waiting for jdb + device.shell("am clear-debug-app") + + if not debug or not use_existing_process: + args = [] + if profile: + if os.path.isdir(profile): + host_profile = profile + # Always /data/local/tmp, rather than `device.test_root`, because + # GeckoView only takes its configuration file from /data/local/tmp, + # and we want to follow suit. + target_profile = "/data/local/tmp/{}-profile".format(app) + device.rm(target_profile, recursive=True, force=True) + device.push(host_profile, target_profile) + command_context.log( + logging.INFO, + "run", + { + "host_profile": host_profile, + "target_profile": target_profile, + }, + 'Pushed profile from host "{host_profile}" to ' + 'target "{target_profile}"', + ) + else: + target_profile = profile + command_context.log( + logging.INFO, + "run", + {"target_profile": target_profile}, + 'Using profile from target "{target_profile}"', + ) + + args = ["--profile", shlex_quote(target_profile)] + + # FIXME: When android switches to using Fission by default, + # MOZ_FORCE_DISABLE_FISSION will need to be configured correctly. + if enable_fission: + env.append("MOZ_FORCE_ENABLE_FISSION=1") + + extras = {} + for i, e in enumerate(env): + extras["env{}".format(i)] = e + if args: + extras["args"] = " ".join(args) + + if env or args: + restart = True + + if restart: + fail_if_running = False + command_context.log( + logging.INFO, + "run", + {"app": app}, + "Stopping {app} to ensure clean restart.", + ) + device.stop_application(app) + + # We'd prefer to log the actual `am start ...` command, but it's not trivial + # to wire the device's logger to mach's logger. + command_context.log( + logging.INFO, + "run", + {"app": app, "activity_name": activity_name}, + "Starting {app}/{activity_name}.", + ) + + device.launch_application( + app_name=app, + activity_name=activity_name, + intent=intent, + extras=extras, + url=url, + wait=not no_wait, + fail_if_running=fail_if_running, + ) + + if not debug: + return 0 + + from mozrunner.devices.android_device import run_lldb_server + + socket_file = run_lldb_server(app, command_context.substs, device_serial) + if not socket_file: + command_context.log( + logging.ERROR, + "run", + {"msg": "Failed to obtain a socket file!"}, + "{msg}", + ) + return 1 + + # Give lldb-server a chance to start + command_context.log( + logging.INFO, + "run", + {"msg": "Pausing to ensure lldb-server has started..."}, + "{msg}", + ) + time.sleep(1) + + if use_existing_process: + + def _is_geckoview_process(proc_name, pkg_name): + if not proc_name.startswith(pkg_name): + # Definitely not our package + return False + if len(proc_name) == len(pkg_name): + # Parent process from our package + return True + if proc_name[len(pkg_name)] == ":": + # Child process from our package + return True + # Process name is a prefix of our package name + return False + + # If we're going to attach to an existing process, we need to know + # who we're attaching to. Obtain a list of all processes associated + # with our desired app. + proc_list = [ + proc[:-1] + for proc in device.get_process_list() + if _is_geckoview_process(proc[1], app) + ] + + if not proc_list: + command_context.log( + logging.ERROR, + "run", + {"app": app}, + "No existing {app} processes found", + ) + return 1 + elif len(proc_list) == 1: + pid = proc_list[0][0] + else: + # Prompt the user to determine which process we should use + entries = [ + "%2d: %6d %s" % (n, p[0], p[1]) + for n, p in enumerate(proc_list, start=1) + ] + prompt = "\n".join(["\nPlease select a process:\n"] + entries) + "\n\n" + valid_range = range(1, len(proc_list) + 1) + + while True: + response = int(input(prompt).strip()) + if response in valid_range: + break + command_context.log( + logging.ERROR, "run", {"msg": "Invalid response"}, "{msg}" + ) + pid = proc_list[response - 1][0] + else: + # We're not using an existing process, so there should only be our + # parent process at this time. + pids = device.pidof(app_name=app) + if len(pids) != 1: + command_context.log( + logging.ERROR, + "run", + {"msg": "Not sure which pid to attach to!"}, + "{msg}", + ) + return 1 + pid = pids[0] + + command_context.log( + logging.INFO, "run", {"pid": str(pid)}, "Debuggee pid set to {pid}..." + ) + + lldb_connect_url = "unix-abstract-connect://" + socket_file + local_jdb_port = device.forward("tcp:0", "jdwp:%d" % pid) + + if no_attach: + command_context.log( + logging.INFO, + "run", + {"pid": str(pid), "url": lldb_connect_url}, + "To debug native code, connect lldb to {url} and attach to pid {pid}", + ) + command_context.log( + logging.INFO, + "run", + {"port": str(local_jdb_port)}, + "To debug Java code, connect jdb using tcp to localhost:{port}", + ) + return 0 + + # Beyond this point we want to be able to automatically clean up after ourselves, + # so we enter the following try block. + try: + command_context.log( + logging.INFO, "run", {"msg": "Starting debugger..."}, "{msg}" + ) + + if not use_existing_process: + # The app is waiting for jdb to attach and will not continue running + # until we do so. + def _jdb_ping(local_jdb_port): + jdb_process = subprocess.Popen( + ["jdb", "-attach", "localhost:%d" % local_jdb_port], + stdin=subprocess.PIPE, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + encoding="utf-8", + ) + # Wait a bit to provide enough time for jdb and lldb to connect + # to the debuggee + time.sleep(5) + # NOTE: jdb cannot detach while the debuggee is frozen in lldb, + # so its process might not necessarily exit immediately once the + # quit command has been issued. + jdb_process.communicate(input="quit\n") + + # We run this in the background while lldb attaches in the foreground + from threading import Thread + + jdb_thread = Thread(target=_jdb_ping, args=[local_jdb_port]) + jdb_thread.start() + + LLDBINIT = """ +settings set target.inline-breakpoint-strategy always +settings append target.exec-search-paths {obj_xul} +settings append target.exec-search-paths {obj_mozglue} +settings append target.exec-search-paths {obj_nss} +platform select remote-android +platform connect {connect_url} +process attach {continue_flag}-p {pid!s} +""".lstrip() + + obj_xul = os.path.join(command_context.topobjdir, "toolkit", "library", "build") + obj_mozglue = os.path.join(command_context.topobjdir, "mozglue", "build") + obj_nss = os.path.join(command_context.topobjdir, "security") + + if use_existing_process: + continue_flag = "" + else: + # Tell lldb to continue after attaching; instead we'll break at + # the initial SEGVHandler, similarly to how things work when we + # attach using Android Studio. Doing this gives Android a chance + # to dismiss the "Waiting for Debugger" dialog. + continue_flag = "-c " + + try: + # Write out our lldb startup commands to a temp file. We'll pass its + # name to lldb on its command line. + with tempfile.NamedTemporaryFile( + mode="wt", encoding="utf-8", newline="\n", delete=False + ) as tmp: + tmp_lldb_start_script = tmp.name + tmp.write( + LLDBINIT.format( + obj_xul=obj_xul, + obj_mozglue=obj_mozglue, + obj_nss=obj_nss, + connect_url=lldb_connect_url, + continue_flag=continue_flag, + pid=pid, + ) + ) + + our_debugger_args = "-s %s" % tmp_lldb_start_script + if debugger_args: + full_debugger_args = " ".join([debugger_args, our_debugger_args]) + else: + full_debugger_args = our_debugger_args + + args = _prepend_debugger_args([], debugger, full_debugger_args) + if not args: + return 1 + + return command_context.run_process( + args=args, ensure_exit_code=False, pass_thru=True + ) + finally: + os.remove(tmp_lldb_start_script) + finally: + device.remove_forwards("tcp:%d" % local_jdb_port) + device.shell("pkill -f lldb-server", enable_run_as=True) + if not use_existing_process: + device.shell("am clear-debug-app") + + +def _run_jsshell(command_context, params, debug, debugger, debugger_args): + try: + binpath = command_context.get_binary_path("app") + except BinaryNotFoundException as e: + command_context.log(logging.ERROR, "run", {"error": str(e)}, "ERROR: {error}") + command_context.log(logging.INFO, "run", {"help": e.help()}, "{help}") + return 1 + + args = [binpath] + + if params: + args.extend(params) + + extra_env = {"RUST_BACKTRACE": "full"} + + if debug or debugger or debugger_args: + if "INSIDE_EMACS" in os.environ: + command_context.log_manager.terminal_handler.setLevel(logging.WARNING) + + import mozdebug + + if not debugger: + # No debugger name was provided. Look for the default ones on + # current OS. + debugger = mozdebug.get_default_debugger_name( + mozdebug.DebuggerSearch.KeepLooking + ) + + if debugger: + debuggerInfo = mozdebug.get_debugger_info(debugger, debugger_args) + + if not debugger or not debuggerInfo: + print("Could not find a suitable debugger in your PATH.") + return 1 + + # Prepend the debugger args. + args = [debuggerInfo.path] + debuggerInfo.args + args + + return command_context.run_process( + args=args, ensure_exit_code=False, pass_thru=True, append_env=extra_env + ) + + +def _run_desktop( + command_context, + params, + packaged, + app, + remote, + background, + noprofile, + disable_e10s, + enable_crash_reporter, + disable_fission, + setpref, + temp_profile, + macos_open, + debug, + debugger, + debugger_args, + dmd, + mode, + stacks, + show_dump_stats, +): + from mozprofile import Preferences, Profile + + try: + if packaged: + binpath = command_context.get_binary_path(where="staged-package") + else: + binpath = app or command_context.get_binary_path("app") + except BinaryNotFoundException as e: + command_context.log(logging.ERROR, "run", {"error": str(e)}, "ERROR: {error}") + if packaged: + command_context.log( + logging.INFO, + "run", + { + "help": "It looks like your build isn't packaged. " + "You can run |./mach package| to package it." + }, + "{help}", + ) + else: + command_context.log(logging.INFO, "run", {"help": e.help()}, "{help}") + return 1 + + args = [] + if macos_open: + if debug: + print( + "The browser can not be launched in the debugger " + "when using the macOS open command." + ) + return 1 + try: + m = re.search(r"^.+\.app", binpath) + apppath = m.group(0) + args = ["open", apppath, "--args"] + except Exception as e: + print( + "Couldn't get the .app path from the binary path. " + "The macOS open option can only be used on macOS" + ) + print(e) + return 1 + else: + args = [binpath] + + if params: + args.extend(params) + + if not remote: + args.append("-no-remote") + + if not background and sys.platform == "darwin": + args.append("-foreground") + + if ( + sys.platform.startswith("win") + and "MOZ_LAUNCHER_PROCESS" in command_context.defines + ): + args.append("-wait-for-browser") + + no_profile_option_given = all( + p not in params for p in ["-profile", "--profile", "-P"] + ) + no_backgroundtask_mode_option_given = all( + p not in params for p in ["-backgroundtask", "--backgroundtask"] + ) + if ( + no_profile_option_given + and no_backgroundtask_mode_option_given + and not noprofile + ): + prefs = { + "browser.aboutConfig.showWarning": False, + "browser.shell.checkDefaultBrowser": False, + "general.warnOnAboutConfig": False, + } + prefs.update(command_context._mach_context.settings.runprefs) + prefs.update([p.split("=", 1) for p in setpref]) + for pref in prefs: + prefs[pref] = Preferences.cast(prefs[pref]) + + tmpdir = os.path.join(command_context.topobjdir, "tmp") + if not os.path.exists(tmpdir): + os.makedirs(tmpdir) + + if temp_profile: + path = tempfile.mkdtemp(dir=tmpdir, prefix="profile-") + else: + path = os.path.join(tmpdir, "profile-default") + + profile = Profile(path, preferences=prefs) + args.append("-profile") + args.append(profile.profile) + + if not no_profile_option_given and setpref: + print("setpref is only supported if a profile is not specified") + return 1 + + some_debugging_option = debug or debugger or debugger_args + + # By default, because Firefox is a GUI app, on Windows it will not + # 'create' a console to which stdout/stderr is printed. This means + # printf/dump debugging is invisible. We default to adding the + # -attach-console argument to fix this. We avoid this if we're launched + # under a debugger (which can do its own picking up of stdout/stderr). + # We also check for both the -console and -attach-console flags: + # -console causes Firefox to create a separate window; + # -attach-console just ends us up with output that gets relayed via mach. + # We shouldn't override the user using -console. For more info, see + # https://bugzilla.mozilla.org/show_bug.cgi?id=1257155 + if ( + sys.platform.startswith("win") + and not some_debugging_option + and "-console" not in args + and "--console" not in args + and "-attach-console" not in args + and "--attach-console" not in args + ): + args.append("-attach-console") + + extra_env = { + "MOZ_DEVELOPER_REPO_DIR": command_context.topsrcdir, + "MOZ_DEVELOPER_OBJ_DIR": command_context.topobjdir, + "RUST_BACKTRACE": "full", + } + + if not enable_crash_reporter: + extra_env["MOZ_CRASHREPORTER_DISABLE"] = "1" + else: + extra_env["MOZ_CRASHREPORTER"] = "1" + + if disable_e10s: + version_file = os.path.join( + command_context.topsrcdir, "browser", "config", "version.txt" + ) + f = open(version_file, "r") + extra_env["MOZ_FORCE_DISABLE_E10S"] = f.read().strip() + + if disable_fission: + extra_env["MOZ_FORCE_DISABLE_FISSION"] = "1" + + if some_debugging_option: + if "INSIDE_EMACS" in os.environ: + command_context.log_manager.terminal_handler.setLevel(logging.WARNING) + + import mozdebug + + if not debugger: + # No debugger name was provided. Look for the default ones on + # current OS. + debugger = mozdebug.get_default_debugger_name( + mozdebug.DebuggerSearch.KeepLooking + ) + + if debugger: + debuggerInfo = mozdebug.get_debugger_info(debugger, debugger_args) + + if not debugger or not debuggerInfo: + print("Could not find a suitable debugger in your PATH.") + return 1 + + # Parameters come from the CLI. We need to convert them before + # their use. + if debugger_args: + from mozbuild import shellutil + + try: + debugger_args = shellutil.split(debugger_args) + except shellutil.MetaCharacterException as e: + print( + "The --debugger-args you passed require a real shell to parse them." + ) + print("(We can't handle the %r character.)" % e.char) + return 1 + + # Prepend the debugger args. + args = [debuggerInfo.path] + debuggerInfo.args + args + + if dmd: + dmd_params = [] + + if mode: + dmd_params.append("--mode=" + mode) + if stacks: + dmd_params.append("--stacks=" + stacks) + if show_dump_stats: + dmd_params.append("--show-dump-stats=yes") + + if dmd_params: + extra_env["DMD"] = " ".join(dmd_params) + else: + extra_env["DMD"] = "1" + + return command_context.run_process( + args=args, ensure_exit_code=False, pass_thru=True, append_env=extra_env + ) + + +@Command( + "buildsymbols", + category="post-build", + description="Produce a package of Breakpad-format symbols.", +) +def buildsymbols(command_context): + """Produce a package of debug symbols suitable for use with Breakpad.""" + return command_context._run_make( + directory=".", target="buildsymbols", ensure_exit_code=False + ) + + +@Command( + "environment", + category="build-dev", + description="Show info about the mach and build environment.", +) +@CommandArgument( + "--format", + default="pretty", + choices=["pretty", "json"], + help="Print data in the given format.", +) +@CommandArgument("--output", "-o", type=str, help="Output to the given file.") +@CommandArgument("--verbose", "-v", action="store_true", help="Print verbose output.") +def environment(command_context, format, output=None, verbose=False): + func = {"pretty": _environment_pretty, "json": _environment_json}[ + format.replace(".", "_") + ] + + if output: + # We want to preserve mtimes if the output file already exists + # and the content hasn't changed. + from mozbuild.util import FileAvoidWrite + + with FileAvoidWrite(output) as out: + return func(command_context, out, verbose) + return func(command_context, sys.stdout, verbose) + + +def _environment_pretty(command_context, out, verbose): + state_dir = command_context._mach_context.state_dir + + print("platform:\n\t%s" % platform.platform(), file=out) + print("python version:\n\t%s" % sys.version, file=out) + print("python prefix:\n\t%s" % sys.prefix, file=out) + print("mach cwd:\n\t%s" % command_context._mach_context.cwd, file=out) + print("os cwd:\n\t%s" % os.getcwd(), file=out) + print("mach directory:\n\t%s" % command_context._mach_context.topdir, file=out) + print("state directory:\n\t%s" % state_dir, file=out) + + print("object directory:\n\t%s" % command_context.topobjdir, file=out) + + if command_context.mozconfig["path"]: + print("mozconfig path:\n\t%s" % command_context.mozconfig["path"], file=out) + if command_context.mozconfig["configure_args"]: + print("mozconfig configure args:", file=out) + for arg in command_context.mozconfig["configure_args"]: + print("\t%s" % arg, file=out) + + if command_context.mozconfig["make_extra"]: + print("mozconfig extra make args:", file=out) + for arg in command_context.mozconfig["make_extra"]: + print("\t%s" % arg, file=out) + + if command_context.mozconfig["make_flags"]: + print("mozconfig make flags:", file=out) + for arg in command_context.mozconfig["make_flags"]: + print("\t%s" % arg, file=out) + + config = None + + try: + config = command_context.config_environment + + except Exception: + pass + + if config: + print("config topsrcdir:\n\t%s" % config.topsrcdir, file=out) + print("config topobjdir:\n\t%s" % config.topobjdir, file=out) + + if verbose: + print("config substitutions:", file=out) + for k in sorted(config.substs): + print("\t%s: %s" % (k, config.substs[k]), file=out) + + print("config defines:", file=out) + for k in sorted(config.defines): + print("\t%s" % k, file=out) + + +def _environment_json(command_context, out, verbose): + import json + + class EnvironmentEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, MozbuildObject): + result = { + "topsrcdir": obj.topsrcdir, + "topobjdir": obj.topobjdir, + "mozconfig": obj.mozconfig, + } + if verbose: + result["substs"] = obj.substs + result["defines"] = obj.defines + return result + elif isinstance(obj, set): + return list(obj) + return json.JSONEncoder.default(self, obj) + + json.dump(command_context, cls=EnvironmentEncoder, sort_keys=True, fp=out) + + +@Command( + "repackage", + category="misc", + description="Repackage artifacts into different formats.", +) +def repackage(command_context): + """Repackages artifacts into different formats. + + This is generally used after packages are signed by the signing + scriptworkers in order to bundle things up into shippable formats, such as a + .dmg on OSX or an installer exe on Windows. + """ + print("Usage: ./mach repackage [dmg|pkg|installer|mar] [args...]") + + +@SubCommand( + "repackage", + "deb", + description="Repackage a tar file into a .deb for Linux", + virtualenv_name="repackage-deb", +) +@CommandArgument( + "--input", "-i", type=str, required=True, help="Input tarfile filename" +) +@CommandArgument("--output", "-o", type=str, required=True, help="Output .deb filename") +@CommandArgument("--arch", type=str, required=True, help="One of ['x86', 'x86_64']") +@CommandArgument( + "--version", + type=str, + required=True, + help="The Firefox version used to create the installer", +) +@CommandArgument( + "--build-number", + type=str, + required=True, + help="The release's build number", +) +@CommandArgument( + "--templates", + type=str, + required=True, + help="Location of the templates used to generate the debian/ directory files", +) +@CommandArgument( + "--release-product", + type=str, + required=True, + help="The product being shipped. Used to disambiguate beta/devedition etc.", +) +@CommandArgument( + "--release-type", + type=str, + required=True, + help="The release being shipped. Used to disambiguate nightly/try etc.", +) +def repackage_deb( + command_context, + input, + output, + arch, + version, + build_number, + templates, + release_product, + release_type, +): + if not os.path.exists(input): + print("Input file does not exist: %s" % input) + return 1 + + template_dir = os.path.join( + command_context.topsrcdir, + templates, + ) + + from fluent.runtime.fallback import FluentLocalization, FluentResourceLoader + + from mozbuild.repackaging.deb import repackage_deb + + repackage_deb( + command_context.log, + input, + output, + template_dir, + arch, + version, + build_number, + release_product, + release_type, + FluentLocalization, + FluentResourceLoader, + ) + + +@SubCommand( + "repackage", + "deb-l10n", + description="Repackage a .xpi langpack file into a .deb for Linux", +) +@CommandArgument( + "--input-xpi-file", type=str, required=True, help="Path to the XPI file" +) +@CommandArgument( + "--input-tar-file", + type=str, + required=True, + help="Path to tar archive that contains application.ini", +) +@CommandArgument( + "--version", + type=str, + required=True, + help="The Firefox version used to create the installer", +) +@CommandArgument( + "--build-number", + type=str, + required=True, + help="The release's build number", +) +@CommandArgument("--output", "-o", type=str, required=True, help="Output filename") +@CommandArgument( + "--templates", + type=str, + required=True, + help="Location of the templates used to generate the debian/ directory files", +) +def repackage_deb_l10n( + command_context, + input_xpi_file, + input_tar_file, + output, + version, + build_number, + templates, +): + for input_file in (input_xpi_file, input_tar_file): + if not os.path.exists(input_file): + print("Input file does not exist: %s" % input_file) + return 1 + + template_dir = os.path.join( + command_context.topsrcdir, + templates, + ) + + from mozbuild.repackaging.deb import repackage_deb_l10n + + repackage_deb_l10n( + input_xpi_file, input_tar_file, output, template_dir, version, build_number + ) + + +@SubCommand("repackage", "dmg", description="Repackage a tar file into a .dmg for OSX") +@CommandArgument("--input", "-i", type=str, required=True, help="Input filename") +@CommandArgument("--output", "-o", type=str, required=True, help="Output filename") +def repackage_dmg(command_context, input, output): + if not os.path.exists(input): + print("Input file does not exist: %s" % input) + return 1 + + from mozbuild.repackaging.dmg import repackage_dmg + + repackage_dmg(input, output) + + +@SubCommand("repackage", "pkg", description="Repackage a tar file into a .pkg for OSX") +@CommandArgument("--input", "-i", type=str, required=True, help="Input filename") +@CommandArgument("--output", "-o", type=str, required=True, help="Output filename") +def repackage_pkg(command_context, input, output): + if not os.path.exists(input): + print("Input file does not exist: %s" % input) + return 1 + + from mozbuild.repackaging.pkg import repackage_pkg + + repackage_pkg(input, output) + + +@SubCommand( + "repackage", "installer", description="Repackage into a Windows installer exe" +) +@CommandArgument( + "--tag", + type=str, + required=True, + help="The .tag file used to build the installer", +) +@CommandArgument( + "--setupexe", + type=str, + required=True, + help="setup.exe file inside the installer", +) +@CommandArgument( + "--package", + type=str, + required=False, + help="Optional package .zip for building a full installer", +) +@CommandArgument("--output", "-o", type=str, required=True, help="Output filename") +@CommandArgument( + "--package-name", + type=str, + required=False, + help="Name of the package being rebuilt", +) +@CommandArgument( + "--sfx-stub", type=str, required=True, help="Path to the self-extraction stub." +) +@CommandArgument( + "--use-upx", + required=False, + action="store_true", + help="Run UPX on the self-extraction stub.", +) +def repackage_installer( + command_context, + tag, + setupexe, + package, + output, + package_name, + sfx_stub, + use_upx, +): + from mozbuild.repackaging.installer import repackage_installer + + repackage_installer( + topsrcdir=command_context.topsrcdir, + tag=tag, + setupexe=setupexe, + package=package, + output=output, + package_name=package_name, + sfx_stub=sfx_stub, + use_upx=use_upx, + ) + + +@SubCommand("repackage", "msi", description="Repackage into a MSI") +@CommandArgument( + "--wsx", + type=str, + required=True, + help="The wsx file used to build the installer", +) +@CommandArgument( + "--version", + type=str, + required=True, + help="The Firefox version used to create the installer", +) +@CommandArgument( + "--locale", type=str, required=True, help="The locale of the installer" +) +@CommandArgument( + "--arch", type=str, required=True, help="The architecture you are building." +) +@CommandArgument("--setupexe", type=str, required=True, help="setup.exe installer") +@CommandArgument("--candle", type=str, required=False, help="location of candle binary") +@CommandArgument("--light", type=str, required=False, help="location of light binary") +@CommandArgument("--output", "-o", type=str, required=True, help="Output filename") +def repackage_msi( + command_context, + wsx, + version, + locale, + arch, + setupexe, + candle, + light, + output, +): + from mozbuild.repackaging.msi import repackage_msi + + repackage_msi( + topsrcdir=command_context.topsrcdir, + wsx=wsx, + version=version, + locale=locale, + arch=arch, + setupexe=setupexe, + candle=candle, + light=light, + output=output, + ) + + +@SubCommand("repackage", "msix", description="Repackage into an MSIX") +@CommandArgument( + "--input", + type=str, + help="Package (ZIP) or directory to repackage. Defaults to $OBJDIR/dist/bin", +) +@CommandArgument( + "--version", + type=str, + help="The Firefox version used to create the package " + "(Default: generated from package 'application.ini')", +) +@CommandArgument( + "--channel", + type=str, + choices=["official", "beta", "aurora", "nightly", "unofficial"], + help="Release channel.", +) +@CommandArgument( + "--distribution-dir", + metavar="DISTRIBUTION", + nargs="*", + dest="distribution_dirs", + default=[], + help="List of distribution directories to include.", +) +@CommandArgument( + "--arch", + type=str, + choices=["x86", "x86_64", "aarch64"], + help="The architecture you are building.", +) +@CommandArgument( + "--vendor", + type=str, + default="Mozilla", + required=False, + help="The vendor to use in the Package/Identity/Name string to use in the App Manifest." + + " Defaults to 'Mozilla'.", +) +@CommandArgument( + "--identity-name", + type=str, + default=None, + required=False, + help="The Package/Identity/Name string to use in the App Manifest." + + " Defaults to '.Firefox', '.FirefoxBeta', etc.", +) +@CommandArgument( + "--publisher", + type=str, + # This default is baked into enough places under `browser/` that we need + # not extract a constant. + default="CN=Mozilla Corporation, OU=MSIX Packaging", + required=False, + help="The Package/Identity/Publisher string to use in the App Manifest." + + " It must match the subject on the certificate used for signing.", +) +@CommandArgument( + "--publisher-display-name", + type=str, + default="Mozilla Corporation", + required=False, + help="The Package/Properties/PublisherDisplayName string to use in the App Manifest. " + + " Defaults to 'Mozilla Corporation'.", +) +@CommandArgument( + "--makeappx", + type=str, + default=None, + help="makeappx/makemsix binary name (required if you haven't run configure)", +) +@CommandArgument( + "--verbose", + default=False, + action="store_true", + help="Be verbose. (Default: false)", +) +@CommandArgument( + "--output", "-o", type=str, help="Output filename (Default: auto-generated)" +) +@CommandArgument( + "--sign", + default=False, + action="store_true", + help="Sign repackaged MSIX with self-signed certificate for local testing. " + "(Default: false)", +) +def repackage_msix( + command_context, + input, + version=None, + channel=None, + distribution_dirs=[], + arch=None, + identity_name=None, + vendor=None, + publisher=None, + publisher_display_name=None, + verbose=False, + output=None, + makeappx=None, + sign=False, +): + from mozbuild.repackaging.msix import repackage_msix + + command_context._set_log_level(verbose) + + firefox_to_msix_channel = { + "release": "official", + "beta": "beta", + "aurora": "aurora", + "nightly": "nightly", + } + + if not input: + if os.path.exists(command_context.bindir): + input = command_context.bindir + else: + command_context.log( + logging.ERROR, + "repackage-msix-no-input", + {}, + "No build found in objdir, please run ./mach build or pass --input", + ) + return 1 + + if not os.path.exists(input): + command_context.log( + logging.ERROR, + "repackage-msix-invalid-input", + {"input": input}, + "Input file or directory for msix repackaging does not exist: {input}", + ) + return 1 + + if not channel: + # Only try to guess the channel when this is clearly a local build. + if input.endswith("bin"): + channel = firefox_to_msix_channel.get( + command_context.defines.get("MOZ_UPDATE_CHANNEL"), "unofficial" + ) + else: + command_context.log( + logging.ERROR, + "repackage-msix-invalid-channel", + {}, + "Could not determine channel, please set --channel", + ) + return 1 + + if not arch: + # Only try to guess the arch when this is clearly a local build. + if input.endswith("bin"): + if command_context.substs["TARGET_CPU"] in ("i686", "x86_64", "aarch64"): + arch = command_context.substs["TARGET_CPU"].replace("i686", "x86") + + if not arch: + command_context.log( + logging.ERROR, + "repackage-msix-couldnt-detect-arch", + {}, + "Could not automatically detect architecture for msix repackaging. " + "Please pass --arch", + ) + return 1 + + output = repackage_msix( + input, + command_context.topsrcdir, + channel=channel, + arch=arch, + displayname=identity_name, + vendor=vendor, + publisher=publisher, + publisher_display_name=publisher_display_name, + version=version, + distribution_dirs=distribution_dirs, + # Configure this run. + force=True, + verbose=verbose, + log=command_context.log, + output=output, + makeappx=makeappx, + ) + + if sign: + repackage_sign_msix(command_context, output, force=False, verbose=verbose) + + command_context.log( + logging.INFO, + "msix", + {"output": output}, + "Wrote MSIX: {output}", + ) + + +@SubCommand("repackage", "sign-msix", description="Sign an MSIX for local testing") +@CommandArgument("--input", type=str, required=True, help="MSIX to sign.") +@CommandArgument( + "--force", + default=False, + action="store_true", + help="Force recreating self-signed certificate. (Default: false)", +) +@CommandArgument( + "--verbose", + default=False, + action="store_true", + help="Be verbose. (Default: false)", +) +def repackage_sign_msix(command_context, input, force=False, verbose=False): + from mozbuild.repackaging.msix import sign_msix + + command_context._set_log_level(verbose) + + sign_msix(input, force=force, log=command_context.log, verbose=verbose) + + return 0 + + +@SubCommand("repackage", "mar", description="Repackage into complete MAR file") +@CommandArgument("--input", "-i", type=str, required=True, help="Input filename") +@CommandArgument("--mar", type=str, required=True, help="Mar binary path") +@CommandArgument("--output", "-o", type=str, required=True, help="Output filename") +@CommandArgument( + "--arch", type=str, required=True, help="The architecture you are building." +) +@CommandArgument("--mar-channel-id", type=str, help="Mar channel id") +def repackage_mar(command_context, input, mar, output, arch, mar_channel_id): + from mozbuild.repackaging.mar import repackage_mar + + repackage_mar( + command_context.topsrcdir, + input, + mar, + output, + arch=arch, + mar_channel_id=mar_channel_id, + ) + + +@Command( + "package-multi-locale", + category="post-build", + description="Package a multi-locale version of the built product " + "for distribution as an APK, DMG, etc.", +) +@CommandArgument( + "--locales", + metavar="LOCALES", + nargs="+", + required=True, + help="List of locales to package", +) +@CommandArgument( + "--verbose", action="store_true", help="Log informative status messages." +) +def package_l10n(command_context, verbose=False, locales=[]): + if "RecursiveMake" not in command_context.substs["BUILD_BACKENDS"]: + print( + "Artifact builds do not support localization. " + "If you know what you are doing, you can use:\n" + "ac_add_options --disable-compile-environment\n" + "export BUILD_BACKENDS=FasterMake,RecursiveMake\n" + "in your mozconfig." + ) + return 1 + + locales = sorted(locale for locale in locales if locale != "en-US") + + append_env = { + # We are only (re-)packaging, we don't want to (re-)build + # anything inside Gradle. + "GRADLE_INVOKED_WITHIN_MACH_BUILD": "1", + "MOZ_CHROME_MULTILOCALE": " ".join(locales), + } + + command_context.log( + logging.INFO, + "package-multi-locale", + {"locales": locales}, + "Processing chrome Gecko resources for locales {locales}", + ) + command_context._run_make( + directory=command_context.topobjdir, + target=["chrome-{}".format(locale) for locale in locales], + append_env=append_env, + pass_thru=False, + print_directory=False, + ensure_exit_code=True, + ) + + if command_context.substs["MOZ_BUILD_APP"] == "mobile/android": + command_context.log( + logging.INFO, + "package-multi-locale", + {}, + "Invoking `mach android assemble-app`", + ) + command_context.run_process( + [ + mozpath.join(command_context.topsrcdir, "mach"), + "android", + "assemble-app", + ], + append_env=append_env, + pass_thru=True, + ensure_exit_code=True, + cwd=mozpath.join(command_context.topsrcdir), + ) + + if command_context.substs["MOZ_BUILD_APP"] == "browser": + command_context.log( + logging.INFO, "package-multi-locale", {}, "Repackaging browser" + ) + command_context._run_make( + directory=mozpath.join(command_context.topobjdir, "browser", "app"), + target=["tools"], + append_env=append_env, + pass_thru=True, + ensure_exit_code=True, + ) + + command_context.log( + logging.INFO, + "package-multi-locale", + {}, + "Invoking multi-locale `mach package`", + ) + target = ["package"] + if command_context.substs["MOZ_BUILD_APP"] == "mobile/android": + target.append("AB_CD=multi") + + command_context._run_make( + directory=command_context.topobjdir, + target=target, + append_env=append_env, + pass_thru=True, + ensure_exit_code=True, + ) + + if command_context.substs["MOZ_BUILD_APP"] == "mobile/android": + command_context.log( + logging.INFO, + "package-multi-locale", + {}, + "Invoking `mach android archive-geckoview`", + ) + command_context.run_process( + [ + mozpath.join(command_context.topsrcdir, "mach"), + "android", + "archive-geckoview", + ], + append_env=append_env, + pass_thru=True, + ensure_exit_code=True, + cwd=mozpath.join(command_context.topsrcdir), + ) + + # This is tricky: most Android build commands will regenerate the + # omnijar, producing a `res/multilocale.txt` that does not contain the + # set of locales packaged by this command. To avoid regenerating, we + # set a special environment variable. + print( + "Execute `env MOZ_CHROME_MULTILOCALE='{}' ".format( + append_env["MOZ_CHROME_MULTILOCALE"] + ) + + "mach android install-geckoview_example` " + + "to install the multi-locale geckoview_example and test APKs." + ) + + return 0 + + +def _prepend_debugger_args(args, debugger, debugger_args): + """ + Given an array with program arguments, prepend arguments to run it under a + debugger. + + :param args: The executable and arguments used to run the process normally. + :param debugger: The debugger to use, or empty to use the default debugger. + :param debugger_args: Any additional parameters to pass to the debugger. + """ + + import mozdebug + + if not debugger: + # No debugger name was provided. Look for the default ones on + # current OS. + debugger = mozdebug.get_default_debugger_name( + mozdebug.DebuggerSearch.KeepLooking + ) + + if debugger: + debuggerInfo = mozdebug.get_debugger_info(debugger, debugger_args) + + if not debugger or not debuggerInfo: + print("Could not find a suitable debugger in your PATH.") + return None + + # Parameters come from the CLI. We need to convert them before + # their use. + if debugger_args: + from mozbuild import shellutil + + try: + debugger_args = shellutil.split(debugger_args) + except shellutil.MetaCharacterException as e: + print("The --debugger_args you passed require a real shell to parse them.") + print("(We can't handle the %r character.)" % e.char) + return None + + # Prepend the debugger args. + args = [debuggerInfo.path] + debuggerInfo.args + args + return args diff --git a/python/mozbuild/mozbuild/makeutil.py b/python/mozbuild/mozbuild/makeutil.py new file mode 100644 index 0000000000..76691c5fa1 --- /dev/null +++ b/python/mozbuild/mozbuild/makeutil.py @@ -0,0 +1,209 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import re +from collections.abc import Iterable + +import six + + +class Makefile(object): + """Provides an interface for writing simple makefiles + + Instances of this class are created, populated with rules, then + written. + """ + + def __init__(self): + self._statements = [] + + def create_rule(self, targets=()): + """ + Create a new rule in the makefile for the given targets. + Returns the corresponding Rule instance. + """ + targets = list(targets) + for target in targets: + assert isinstance(target, six.text_type) + rule = Rule(targets) + self._statements.append(rule) + return rule + + def add_statement(self, statement): + """ + Add a raw statement in the makefile. Meant to be used for + simple variable assignments. + """ + assert isinstance(statement, six.text_type) + self._statements.append(statement) + + def dump(self, fh, removal_guard=True): + """ + Dump all the rules to the given file handle. Optionally (and by + default), add guard rules for file removals (empty rules for other + rules' dependencies) + """ + all_deps = set() + all_targets = set() + for statement in self._statements: + if isinstance(statement, Rule): + statement.dump(fh) + all_deps.update(statement.dependencies()) + all_targets.update(statement.targets()) + else: + fh.write("%s\n" % statement) + if removal_guard: + guard = Rule(sorted(all_deps - all_targets)) + guard.dump(fh) + + +class _SimpleOrderedSet(object): + """ + Simple ordered set, specialized for used in Rule below only. + It doesn't expose a complete API, and normalizes path separators + at insertion. + """ + + def __init__(self): + self._list = [] + self._set = set() + + def __nonzero__(self): + return bool(self._set) + + def __bool__(self): + return bool(self._set) + + def __iter__(self): + return iter(self._list) + + def __contains__(self, key): + return key in self._set + + def update(self, iterable): + def _add(iterable): + emitted = set() + for i in iterable: + i = i.replace(os.sep, "/") + if i not in self._set and i not in emitted: + yield i + emitted.add(i) + + added = list(_add(iterable)) + self._set.update(added) + self._list.extend(added) + + +class Rule(object): + """Class handling simple rules in the form: + target1 target2 ... : dep1 dep2 ... + command1 command2 ... + """ + + def __init__(self, targets=()): + self._targets = _SimpleOrderedSet() + self._dependencies = _SimpleOrderedSet() + self._commands = [] + self.add_targets(targets) + + def add_targets(self, targets): + """Add additional targets to the rule.""" + assert isinstance(targets, Iterable) and not isinstance( + targets, six.string_types + ) + targets = list(targets) + for target in targets: + assert isinstance(target, six.text_type) + self._targets.update(targets) + return self + + def add_dependencies(self, deps): + """Add dependencies to the rule.""" + assert isinstance(deps, Iterable) and not isinstance(deps, six.string_types) + deps = list(deps) + for dep in deps: + assert isinstance(dep, six.text_type) + self._dependencies.update(deps) + return self + + def add_commands(self, commands): + """Add commands to the rule.""" + assert isinstance(commands, Iterable) and not isinstance( + commands, six.string_types + ) + commands = list(commands) + for command in commands: + assert isinstance(command, six.text_type) + self._commands.extend(commands) + return self + + def targets(self): + """Return an iterator on the rule targets.""" + # Ensure the returned iterator is actually just that, an iterator. + # Avoids caller fiddling with the set itself. + return iter(self._targets) + + def dependencies(self): + """Return an iterator on the rule dependencies.""" + return iter(d for d in self._dependencies if d not in self._targets) + + def commands(self): + """Return an iterator on the rule commands.""" + return iter(self._commands) + + def dump(self, fh): + """ + Dump the rule to the given file handle. + """ + if not self._targets: + return + fh.write("%s:" % " ".join(self._targets)) + if self._dependencies: + fh.write(" %s" % " ".join(self.dependencies())) + fh.write("\n") + for cmd in self._commands: + fh.write("\t%s\n" % cmd) + + +# colon followed by anything except a slash (Windows path detection) +_depfilesplitter = re.compile(r":(?![\\/])") + + +def read_dep_makefile(fh): + """ + Read the file handler containing a dep makefile (simple makefile only + containing dependencies) and returns an iterator of the corresponding Rules + it contains. Ignores removal guard rules. + """ + + rule = "" + for line in fh.readlines(): + line = six.ensure_text(line) + assert not line.startswith("\t") + line = line.strip() + if line.endswith("\\"): + rule += line[:-1] + else: + rule += line + split_rule = _depfilesplitter.split(rule, 1) + if len(split_rule) > 1 and split_rule[1].strip(): + yield Rule(split_rule[0].strip().split()).add_dependencies( + split_rule[1].strip().split() + ) + rule = "" + + if rule: + raise Exception("Makefile finishes with a backslash. Expected more input.") + + +def write_dep_makefile(fh, target, deps): + """ + Write a Makefile containing only target's dependencies to the file handle + specified. + """ + mk = Makefile() + rule = mk.create_rule(targets=[target]) + rule.add_dependencies(deps) + mk.dump(fh, removal_guard=True) diff --git a/python/mozbuild/mozbuild/mozconfig.py b/python/mozbuild/mozbuild/mozconfig.py new file mode 100644 index 0000000000..5cb5a5e859 --- /dev/null +++ b/python/mozbuild/mozbuild/mozconfig.py @@ -0,0 +1,403 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import re +import subprocess +import sys +import traceback +from pathlib import Path +from textwrap import dedent + +import six +from mozboot.mozconfig import find_mozconfig +from mozpack import path as mozpath + +MOZCONFIG_BAD_EXIT_CODE = """ +Evaluation of your mozconfig exited with an error. This could be triggered +by a command inside your mozconfig failing. Please change your mozconfig +to not error and/or to catch errors in executed commands. +""".strip() + +MOZCONFIG_BAD_OUTPUT = """ +Evaluation of your mozconfig produced unexpected output. This could be +triggered by a command inside your mozconfig failing or producing some warnings +or error messages. Please change your mozconfig to not error and/or to catch +errors in executed commands. +""".strip() + + +class MozconfigLoadException(Exception): + """Raised when a mozconfig could not be loaded properly. + + This typically indicates a malformed or misbehaving mozconfig file. + """ + + def __init__(self, path, message, output=None): + self.path = path + self.output = output + + message = ( + dedent( + """ + Error loading mozconfig: {path} + + {message} + """ + ) + .format(path=self.path, message=message) + .lstrip() + ) + + if self.output: + message += dedent( + """ + mozconfig output: + + {output} + """ + ).format(output="\n".join([six.ensure_text(s) for s in self.output])) + + Exception.__init__(self, message) + + +class MozconfigLoader(object): + """Handles loading and parsing of mozconfig files.""" + + RE_MAKE_VARIABLE = re.compile( + """ + ^\s* # Leading whitespace + (?P[a-zA-Z_0-9]+) # Variable name + \s* [?:]?= \s* # Assignment operator surrounded by optional + # spaces + (?P.*$)""", # Everything else (likely the value) + re.VERBOSE, + ) + + IGNORE_SHELL_VARIABLES = {"_", "BASH_ARGV", "BASH_ARGV0", "BASH_ARGC"} + + ENVIRONMENT_VARIABLES = {"CC", "CXX", "CFLAGS", "CXXFLAGS", "LDFLAGS", "MOZ_OBJDIR"} + + AUTODETECT = object() + + def __init__(self, topsrcdir): + self.topsrcdir = topsrcdir + + @property + def _loader_script(self): + our_dir = os.path.abspath(os.path.dirname(__file__)) + + return os.path.join(our_dir, "mozconfig_loader") + + def read_mozconfig(self, path=None): + """Read the contents of a mozconfig into a data structure. + + This takes the path to a mozconfig to load. If the given path is + AUTODETECT, will try to find a mozconfig from the environment using + find_mozconfig(). + + mozconfig files are shell scripts. So, we can't just parse them. + Instead, we run the shell script in a wrapper which allows us to record + state from execution. Thus, the output from a mozconfig is a friendly + static data structure. + """ + if path is self.AUTODETECT: + path = find_mozconfig(self.topsrcdir) + if isinstance(path, Path): + path = str(path) + + result = { + "path": path, + "topobjdir": None, + "configure_args": None, + "make_flags": None, + "make_extra": None, + "env": None, + "vars": None, + } + + if path is None: + if "MOZ_OBJDIR" in os.environ: + result["topobjdir"] = os.environ["MOZ_OBJDIR"] + return result + + path = mozpath.normsep(path) + + result["configure_args"] = [] + result["make_extra"] = [] + result["make_flags"] = [] + + # Since mozconfig_loader is a shell script, running it "normally" + # actually leads to two shell executions on Windows. Avoid this by + # directly calling sh mozconfig_loader. + shell = "sh" + env = dict(os.environ) + env["PYTHONIOENCODING"] = "utf-8" + + if "MOZILLABUILD" in os.environ: + mozillabuild = os.environ["MOZILLABUILD"] + if (Path(mozillabuild) / "msys2").exists(): + shell = mozillabuild + "/msys2/usr/bin/sh" + else: + shell = mozillabuild + "/msys/bin/sh" + prefer_mozillabuild_path = [ + os.path.dirname(shell), + str(Path(mozillabuild) / "bin"), + env["PATH"], + ] + env["PATH"] = os.pathsep.join(prefer_mozillabuild_path) + if sys.platform == "win32": + shell = shell + ".exe" + + command = [ + mozpath.normsep(shell), + mozpath.normsep(self._loader_script), + mozpath.normsep(self.topsrcdir), + mozpath.normsep(path), + mozpath.normsep(sys.executable), + mozpath.join(mozpath.dirname(self._loader_script), "action", "dump_env.py"), + ] + + try: + # We need to capture stderr because that's where the shell sends + # errors if execution fails. + output = six.ensure_text( + subprocess.check_output( + command, + stderr=subprocess.STDOUT, + cwd=self.topsrcdir, + env=env, + universal_newlines=True, + encoding="utf-8", + ) + ) + except subprocess.CalledProcessError as e: + lines = e.output.splitlines() + + # Output before actual execution shouldn't be relevant. + try: + index = lines.index("------END_BEFORE_SOURCE") + lines = lines[index + 1 :] + except ValueError: + pass + + raise MozconfigLoadException(path, MOZCONFIG_BAD_EXIT_CODE, lines) + + try: + parsed = self._parse_loader_output(output) + except AssertionError: + # _parse_loader_output uses assertions to verify the + # well-formedness of the shell output; when these fail, it + # generally means there was a problem with the output, but we + # include the assertion traceback just to be sure. + print("Assertion failed in _parse_loader_output:") + traceback.print_exc() + raise MozconfigLoadException( + path, MOZCONFIG_BAD_OUTPUT, output.splitlines() + ) + + def diff_vars(vars_before, vars_after): + set1 = set(vars_before.keys()) - self.IGNORE_SHELL_VARIABLES + set2 = set(vars_after.keys()) - self.IGNORE_SHELL_VARIABLES + added = set2 - set1 + removed = set1 - set2 + maybe_modified = set1 & set2 + changed = {"added": {}, "removed": {}, "modified": {}, "unmodified": {}} + + for key in added: + changed["added"][key] = vars_after[key] + + for key in removed: + changed["removed"][key] = vars_before[key] + + for key in maybe_modified: + if vars_before[key] != vars_after[key]: + changed["modified"][key] = (vars_before[key], vars_after[key]) + elif key in self.ENVIRONMENT_VARIABLES: + # In order for irrelevant environment variable changes not + # to incur in re-running configure, only a set of + # environment variables are stored when they are + # unmodified. Otherwise, changes such as using a different + # terminal window, or even rebooting, would trigger + # reconfigures. + changed["unmodified"][key] = vars_after[key] + + return changed + + result["env"] = diff_vars(parsed["env_before"], parsed["env_after"]) + + # Environment variables also appear as shell variables, but that's + # uninteresting duplication of information. Filter them out. + def filt(x, y): + return {k: v for k, v in x.items() if k not in y} + + result["vars"] = diff_vars( + filt(parsed["vars_before"], parsed["env_before"]), + filt(parsed["vars_after"], parsed["env_after"]), + ) + + result["configure_args"] = [self._expand(o) for o in parsed["ac"]] + + if "MOZ_OBJDIR" in parsed["env_before"]: + result["topobjdir"] = parsed["env_before"]["MOZ_OBJDIR"] + + mk = [self._expand(o) for o in parsed["mk"]] + + for o in mk: + match = self.RE_MAKE_VARIABLE.match(o) + + if match is None: + result["make_extra"].append(o) + continue + + name, value = match.group("var"), match.group("value") + + if name == "MOZ_MAKE_FLAGS": + result["make_flags"] = value.split() + continue + + if name == "MOZ_OBJDIR": + result["topobjdir"] = value + if parsed["env_before"].get("MOZ_PROFILE_GENERATE") == "1": + # If MOZ_OBJDIR is specified in the mozconfig, we need to + # make sure that the '/instrumented' directory gets appended + # for the first build to avoid an objdir mismatch when + # running 'mach package' on Windows. + result["topobjdir"] = mozpath.join( + result["topobjdir"], "instrumented" + ) + continue + + result["make_extra"].append(o) + + return result + + def _parse_loader_output(self, output): + mk_options = [] + ac_options = [] + before_source = {} + after_source = {} + env_before_source = {} + env_after_source = {} + + current = None + current_type = None + in_variable = None + + for line in output.splitlines(): + + if not line: + continue + + if line.startswith("------BEGIN_"): + assert current_type is None + assert current is None + assert not in_variable + current_type = line[len("------BEGIN_") :] + current = [] + continue + + if line.startswith("------END_"): + assert not in_variable + section = line[len("------END_") :] + assert current_type == section + + if current_type == "AC_OPTION": + ac_options.append("\n".join(current)) + elif current_type == "MK_OPTION": + mk_options.append("\n".join(current)) + + current = None + current_type = None + continue + + assert current_type is not None + + vars_mapping = { + "BEFORE_SOURCE": before_source, + "AFTER_SOURCE": after_source, + "ENV_BEFORE_SOURCE": env_before_source, + "ENV_AFTER_SOURCE": env_after_source, + } + + if current_type in vars_mapping: + # mozconfigs are sourced using the Bourne shell (or at least + # in Bourne shell mode). This means |set| simply lists + # variables from the current shell (not functions). (Note that + # if Bash is installed in /bin/sh it acts like regular Bourne + # and doesn't print functions.) So, lines should have the + # form: + # + # key='value' + # key=value + # + # The only complication is multi-line variables. Those have the + # form: + # + # key='first + # second' + + # TODO Bug 818377 Properly handle multi-line variables of form: + # $ foo="a='b' + # c='d'" + # $ set + # foo='a='"'"'b'"'"' + # c='"'"'d'"'" + + name = in_variable + value = None + if in_variable: + # Reached the end of a multi-line variable. + if line.endswith("'") and not line.endswith("\\'"): + current.append(line[:-1]) + value = "\n".join(current) + in_variable = None + else: + current.append(line) + continue + else: + equal_pos = line.find("=") + + if equal_pos < 1: + # TODO log warning? + continue + + name = line[0:equal_pos] + value = line[equal_pos + 1 :] + + if len(value): + has_quote = value[0] == "'" + + if has_quote: + value = value[1:] + + # Lines with a quote not ending in a quote are multi-line. + if has_quote and not value.endswith("'"): + in_variable = name + current.append(value) + continue + else: + value = value[:-1] if has_quote else value + + assert name is not None + + vars_mapping[current_type][name] = value + + current = [] + + continue + + current.append(line) + + return { + "mk": mk_options, + "ac": ac_options, + "vars_before": before_source, + "vars_after": after_source, + "env_before": env_before_source, + "env_after": env_after_source, + } + + def _expand(self, s): + return s.replace("@TOPSRCDIR@", self.topsrcdir) diff --git a/python/mozbuild/mozbuild/mozconfig_loader b/python/mozbuild/mozbuild/mozconfig_loader new file mode 100755 index 0000000000..29355c69a2 --- /dev/null +++ b/python/mozbuild/mozbuild/mozconfig_loader @@ -0,0 +1,48 @@ +#!/bin/sh +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# This script provides an execution environment for mozconfig scripts. +# This script is not meant to be called by users. Instead, some +# higher-level driver invokes it and parses the machine-tailored output. + +set -e + +ac_add_options() { + for _mozconfig_opt; do + echo "------BEGIN_AC_OPTION" + echo $_mozconfig_opt + echo "------END_AC_OPTION" + done +} + +mk_add_options() { + for _mozconfig_opt; do + echo "------BEGIN_MK_OPTION" + echo $_mozconfig_opt + echo "------END_MK_OPTION" + done +} + +echo "------BEGIN_ENV_BEFORE_SOURCE" +"$3" "$4" +echo "------END_ENV_BEFORE_SOURCE" + +echo "------BEGIN_BEFORE_SOURCE" +set +echo "------END_BEFORE_SOURCE" + +topsrcdir="$1" + +. "$2" + +unset topsrcdir + +echo "------BEGIN_AFTER_SOURCE" +set +echo "------END_AFTER_SOURCE" + +echo "------BEGIN_ENV_AFTER_SOURCE" +"$3" "$4" +echo "------END_ENV_AFTER_SOURCE" diff --git a/python/mozbuild/mozbuild/mozinfo.py b/python/mozbuild/mozbuild/mozinfo.py new file mode 100644 index 0000000000..42edabe32d --- /dev/null +++ b/python/mozbuild/mozbuild/mozinfo.py @@ -0,0 +1,163 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# This module produces a JSON file that provides basic build info and +# configuration metadata. + +import json +import os +import platform +import re + +import six + + +def build_dict(config, env=os.environ): + """ + Build a dict containing data about the build configuration from + the environment. + """ + substs = config.substs + + # Check that all required variables are present first. + required = ["TARGET_CPU", "OS_TARGET"] + missing = [r for r in required if r not in substs] + if missing: + raise Exception( + "Missing required environment variables: %s" % ", ".join(missing) + ) + + d = {} + d["topsrcdir"] = config.topsrcdir + + if config.mozconfig: + d["mozconfig"] = config.mozconfig + + # os + o = substs["OS_TARGET"] + known_os = {"Linux": "linux", "WINNT": "win", "Darwin": "mac", "Android": "android"} + if o in known_os: + d["os"] = known_os[o] + else: + # Allow unknown values, just lowercase them. + d["os"] = o.lower() + + # Widget toolkit, just pass the value directly through. + d["toolkit"] = substs.get("MOZ_WIDGET_TOOLKIT") + + # Application name + if "MOZ_APP_NAME" in substs: + d["appname"] = substs["MOZ_APP_NAME"] + + # Build app name + if "MOZ_BUILD_APP" in substs: + d["buildapp"] = substs["MOZ_BUILD_APP"] + + # processor + p = substs["TARGET_CPU"] + # do some slight massaging for some values + # TODO: retain specific values in case someone wants them? + if p.startswith("arm"): + p = "arm" + elif re.match("i[3-9]86", p): + p = "x86" + d["processor"] = p + # hardcoded list of 64-bit CPUs + if p in ["x86_64", "ppc64", "aarch64"]: + d["bits"] = 64 + # hardcoded list of known 32-bit CPUs + elif p in ["x86", "arm", "ppc"]: + d["bits"] = 32 + # other CPUs will wind up with unknown bits + + d["debug"] = substs.get("MOZ_DEBUG") == "1" + d["nightly_build"] = substs.get("NIGHTLY_BUILD") == "1" + d["early_beta_or_earlier"] = substs.get("EARLY_BETA_OR_EARLIER") == "1" + d["release_or_beta"] = substs.get("RELEASE_OR_BETA") == "1" + d["devedition"] = substs.get("MOZ_DEV_EDITION") == "1" + d["pgo"] = substs.get("MOZ_PGO") == "1" + d["crashreporter"] = bool(substs.get("MOZ_CRASHREPORTER")) + d["normandy"] = substs.get("MOZ_NORMANDY") == "1" + d["datareporting"] = bool(substs.get("MOZ_DATA_REPORTING")) + d["healthreport"] = substs.get("MOZ_SERVICES_HEALTHREPORT") == "1" + d["sync"] = substs.get("MOZ_SERVICES_SYNC") == "1" + # FIXME(emilio): We need to update a lot of WPT expectations before removing this. + d["stylo"] = True + d["asan"] = substs.get("MOZ_ASAN") == "1" + d["tsan"] = substs.get("MOZ_TSAN") == "1" + d["ubsan"] = substs.get("MOZ_UBSAN") == "1" + d["telemetry"] = substs.get("MOZ_TELEMETRY_REPORTING") == "1" + d["tests_enabled"] = substs.get("ENABLE_TESTS") == "1" + d["bin_suffix"] = substs.get("BIN_SUFFIX", "") + d["require_signing"] = substs.get("MOZ_REQUIRE_SIGNING") == "1" + d["official"] = bool(substs.get("MOZILLA_OFFICIAL")) + d["updater"] = substs.get("MOZ_UPDATER") == "1" + d["artifact"] = substs.get("MOZ_ARTIFACT_BUILDS") == "1" + d["ccov"] = substs.get("MOZ_CODE_COVERAGE") == "1" + d["cc_type"] = substs.get("CC_TYPE") + d["domstreams"] = substs.get("MOZ_DOM_STREAMS") == "1" + d["isolated_process"] = ( + substs.get("MOZ_ANDROID_CONTENT_SERVICE_ISOLATED_PROCESS") == "1" + ) + + def guess_platform(): + if d["buildapp"] == "browser": + p = d["os"] + if p == "mac": + p = "macosx64" + elif d["bits"] == 64: + p = "{}64".format(p) + elif p in ("win",): + p = "{}32".format(p) + + if d["asan"]: + p = "{}-asan".format(p) + + return p + + if d["buildapp"] == "mobile/android": + if d["processor"] == "x86": + return "android-x86" + if d["processor"] == "x86_64": + return "android-x86_64" + if d["processor"] == "aarch64": + return "android-aarch64" + return "android-arm" + + def guess_buildtype(): + if d["debug"]: + return "debug" + if d["pgo"]: + return "pgo" + return "opt" + + # if buildapp or bits are unknown, we don't have a configuration similar to + # any in automation and the guesses are useless. + if "buildapp" in d and (d["os"] == "mac" or "bits" in d): + d["platform_guess"] = guess_platform() + d["buildtype_guess"] = guess_buildtype() + + if ( + d.get("buildapp", "") == "mobile/android" + and "MOZ_ANDROID_MIN_SDK_VERSION" in substs + ): + d["android_min_sdk"] = substs["MOZ_ANDROID_MIN_SDK_VERSION"] + + d["is_ubuntu"] = "Ubuntu" in platform.version() + + return d + + +def write_mozinfo(file, config, env=os.environ): + """Write JSON data about the configuration specified in config and an + environment variable dict to ``|file|``, which may be a filename or file-like + object. + See build_dict for information about what environment variables are used, + and what keys are produced. + """ + build_conf = build_dict(config, env) + if isinstance(file, six.text_type): + file = open(file, "wt") + + json.dump(build_conf, file, sort_keys=True, indent=4) diff --git a/python/mozbuild/mozbuild/nodeutil.py b/python/mozbuild/mozbuild/nodeutil.py new file mode 100644 index 0000000000..8ec724ab89 --- /dev/null +++ b/python/mozbuild/mozbuild/nodeutil.py @@ -0,0 +1,126 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import platform +import subprocess +from distutils.version import StrictVersion + +from mozboot.util import get_tools_dir +from mozfile import which +from six import PY3 + +NODE_MIN_VERSION = StrictVersion("12.22.12") +NPM_MIN_VERSION = StrictVersion("6.14.16") + + +def find_node_paths(): + """Determines the possible paths for node executables. + + Returns a list of paths, which includes the build state directory. + """ + mozbuild_tools_dir = get_tools_dir() + + if platform.system() == "Windows": + mozbuild_node_path = os.path.join(mozbuild_tools_dir, "node") + else: + mozbuild_node_path = os.path.join(mozbuild_tools_dir, "node", "bin") + + # We still fallback to the PATH, since on OSes that don't have toolchain + # artifacts available to download, Node may be coming from $PATH. + paths = [mozbuild_node_path] + os.environ.get("PATH").split(os.pathsep) + + if platform.system() == "Windows": + paths += [ + "%s\\nodejs" % os.environ.get("SystemDrive"), + os.path.join(os.environ.get("ProgramFiles"), "nodejs"), + os.path.join(os.environ.get("PROGRAMW6432"), "nodejs"), + os.path.join(os.environ.get("PROGRAMFILES"), "nodejs"), + ] + + return paths + + +def check_executable_version(exe, wrap_call_with_node=False): + """Determine the version of a Node executable by invoking it. + + May raise ``subprocess.CalledProcessError`` or ``ValueError`` on failure. + """ + out = None + # npm may be a script (Except on Windows), so we must call it with node. + if wrap_call_with_node and platform.system() != "Windows": + binary, _ = find_node_executable() + if binary: + out = ( + subprocess.check_output( + [binary, exe, "--version"], universal_newlines=PY3 + ) + .lstrip("v") + .rstrip() + ) + + # If we can't find node, or we don't need to wrap it, fallback to calling + # direct. + if not out: + out = ( + subprocess.check_output([exe, "--version"], universal_newlines=PY3) + .lstrip("v") + .rstrip() + ) + return StrictVersion(out) + + +def find_node_executable( + nodejs_exe=os.environ.get("NODEJS"), min_version=NODE_MIN_VERSION +): + """Find a Node executable from the mozbuild directory. + + Returns a tuple containing the the path to an executable binary and a + version tuple. Both tuple entries will be None if a Node executable + could not be resolved. + """ + if nodejs_exe: + try: + version = check_executable_version(nodejs_exe) + except (subprocess.CalledProcessError, ValueError): + return None, None + + if version >= min_version: + return nodejs_exe, version.version + + return None, None + + # "nodejs" is first in the tuple on the assumption that it's only likely to + # exist on systems (probably linux distros) where there is a program in the path + # called "node" that does something else. + return find_executable("node", min_version) + + +def find_npm_executable(min_version=NPM_MIN_VERSION): + """Find a Node executable from the mozbuild directory. + + Returns a tuple containing the the path to an executable binary and a + version tuple. Both tuple entries will be None if a Node executable + could not be resolved. + """ + return find_executable("npm", min_version, True) + + +def find_executable(name, min_version, use_node_for_version_check=False): + paths = find_node_paths() + exe = which(name, path=paths) + + if not exe: + return None, None + + # Verify we can invoke the executable and its version is acceptable. + try: + version = check_executable_version(exe, use_node_for_version_check) + except (subprocess.CalledProcessError, ValueError): + return None, None + + if version < min_version: + return None, None + + return exe, version.version diff --git a/python/mozbuild/mozbuild/preprocessor.py b/python/mozbuild/mozbuild/preprocessor.py new file mode 100644 index 0000000000..193eb58475 --- /dev/null +++ b/python/mozbuild/mozbuild/preprocessor.py @@ -0,0 +1,938 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +This is a very primitive line based preprocessor, for times when using +a C preprocessor isn't an option. + +It currently supports the following grammar for expressions, whitespace is +ignored: + +expression : + and_cond ( '||' expression ) ? ; +and_cond: + test ( '&&' and_cond ) ? ; +test: + unary ( ( '==' | '!=' ) unary ) ? ; +unary : + '!'? value ; +value : + [0-9]+ # integer + | 'defined(' \w+ ')' + | \w+ # string identifier or value; +""" + +import errno +import io +import os +import re +import sys +from optparse import OptionParser + +import six +from mozpack.path import normsep + +from mozbuild.makeutil import Makefile + +# hack around win32 mangling our line endings +# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/65443 +if sys.platform == "win32": + import msvcrt + + msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) + os.linesep = "\n" + + +__all__ = ["Context", "Expression", "Preprocessor", "preprocess"] + + +def _to_text(a): + # We end up converting a lot of different types (text_type, binary_type, + # int, etc.) to Unicode in this script. This function handles all of those + # possibilities. + if isinstance(a, (six.text_type, six.binary_type)): + return six.ensure_text(a) + return six.text_type(a) + + +def path_starts_with(path, prefix): + if os.altsep: + prefix = prefix.replace(os.altsep, os.sep) + path = path.replace(os.altsep, os.sep) + prefix = [os.path.normcase(p) for p in prefix.split(os.sep)] + path = [os.path.normcase(p) for p in path.split(os.sep)] + return path[: len(prefix)] == prefix + + +class Expression: + def __init__(self, expression_string): + """ + Create a new expression with this string. + The expression will already be parsed into an Abstract Syntax Tree. + """ + self.content = expression_string + self.offset = 0 + self.__ignore_whitespace() + self.e = self.__get_logical_or() + if self.content: + raise Expression.ParseError(self) + + def __get_logical_or(self): + """ + Production: and_cond ( '||' expression ) ? + """ + if not len(self.content): + return None + rv = Expression.__AST("logical_op") + # test + rv.append(self.__get_logical_and()) + self.__ignore_whitespace() + if self.content[:2] != "||": + # no logical op needed, short cut to our prime element + return rv[0] + # append operator + rv.append(Expression.__ASTLeaf("op", self.content[:2])) + self.__strip(2) + self.__ignore_whitespace() + rv.append(self.__get_logical_or()) + self.__ignore_whitespace() + return rv + + def __get_logical_and(self): + """ + Production: test ( '&&' and_cond ) ? + """ + if not len(self.content): + return None + rv = Expression.__AST("logical_op") + # test + rv.append(self.__get_equality()) + self.__ignore_whitespace() + if self.content[:2] != "&&": + # no logical op needed, short cut to our prime element + return rv[0] + # append operator + rv.append(Expression.__ASTLeaf("op", self.content[:2])) + self.__strip(2) + self.__ignore_whitespace() + rv.append(self.__get_logical_and()) + self.__ignore_whitespace() + return rv + + def __get_equality(self): + """ + Production: unary ( ( '==' | '!=' ) unary ) ? + """ + if not len(self.content): + return None + rv = Expression.__AST("equality") + # unary + rv.append(self.__get_unary()) + self.__ignore_whitespace() + if not re.match("[=!]=", self.content): + # no equality needed, short cut to our prime unary + return rv[0] + # append operator + rv.append(Expression.__ASTLeaf("op", self.content[:2])) + self.__strip(2) + self.__ignore_whitespace() + rv.append(self.__get_unary()) + self.__ignore_whitespace() + return rv + + def __get_unary(self): + """ + Production: '!'? value + """ + # eat whitespace right away, too + not_ws = re.match("!\s*", self.content) + if not not_ws: + return self.__get_value() + rv = Expression.__AST("not") + self.__strip(not_ws.end()) + rv.append(self.__get_value()) + self.__ignore_whitespace() + return rv + + def __get_value(self): + """ + Production: ( [0-9]+ | 'defined(' \w+ ')' | \w+ ) + Note that the order is important, and the expression is kind-of + ambiguous as \w includes 0-9. One could make it unambiguous by + removing 0-9 from the first char of a string literal. + """ + rv = None + m = re.match("defined\s*\(\s*(\w+)\s*\)", self.content) + if m: + word_len = m.end() + rv = Expression.__ASTLeaf("defined", m.group(1)) + else: + word_len = re.match("[0-9]*", self.content).end() + if word_len: + value = int(self.content[:word_len]) + rv = Expression.__ASTLeaf("int", value) + else: + word_len = re.match("\w*", self.content).end() + if word_len: + rv = Expression.__ASTLeaf("string", self.content[:word_len]) + else: + raise Expression.ParseError(self) + self.__strip(word_len) + self.__ignore_whitespace() + return rv + + def __ignore_whitespace(self): + ws_len = re.match("\s*", self.content).end() + self.__strip(ws_len) + return + + def __strip(self, length): + """ + Remove a given amount of chars from the input and update + the offset. + """ + self.content = self.content[length:] + self.offset += length + + def evaluate(self, context): + """ + Evaluate the expression with the given context + """ + + # Helper function to evaluate __get_equality results + def eval_equality(tok): + left = opmap[tok[0].type](tok[0]) + right = opmap[tok[2].type](tok[2]) + rv = left == right + if tok[1].value == "!=": + rv = not rv + return rv + + # Helper function to evaluate __get_logical_and and __get_logical_or results + def eval_logical_op(tok): + left = opmap[tok[0].type](tok[0]) + right = opmap[tok[2].type](tok[2]) + if tok[1].value == "&&": + return left and right + elif tok[1].value == "||": + return left or right + raise Expression.ParseError(self) + + # Mapping from token types to evaluator functions + # Apart from (non-)equality, all these can be simple lambda forms. + opmap = { + "logical_op": eval_logical_op, + "equality": eval_equality, + "not": lambda tok: not opmap[tok[0].type](tok[0]), + "string": lambda tok: context[tok.value], + "defined": lambda tok: tok.value in context, + "int": lambda tok: tok.value, + } + + return opmap[self.e.type](self.e) + + class __AST(list): + """ + Internal class implementing Abstract Syntax Tree nodes + """ + + def __init__(self, type): + self.type = type + super(self.__class__, self).__init__(self) + + class __ASTLeaf: + """ + Internal class implementing Abstract Syntax Tree leafs + """ + + def __init__(self, type, value): + self.value = value + self.type = type + + def __str__(self): + return self.value.__str__() + + def __repr__(self): + return self.value.__repr__() + + class ParseError(Exception): + """ + Error raised when parsing fails. + It has two members, offset and content, which give the offset of the + error and the offending content. + """ + + def __init__(self, expression): + self.offset = expression.offset + self.content = expression.content[:3] + + def __str__(self): + return 'Unexpected content at offset {0}, "{1}"'.format( + self.offset, self.content + ) + + +class Context(dict): + """ + This class holds variable values by subclassing dict, and while it + truthfully reports True and False on + + name in context + + it returns the variable name itself on + + context["name"] + + to reflect the ambiguity between string literals and preprocessor + variables. + """ + + def __getitem__(self, key): + if key in self: + return super(self.__class__, self).__getitem__(key) + return key + + +class Preprocessor: + """ + Class for preprocessing text files. + """ + + class Error(RuntimeError): + def __init__(self, cpp, MSG, context): + self.file = cpp.context["FILE"] + self.line = cpp.context["LINE"] + self.key = MSG + RuntimeError.__init__(self, (self.file, self.line, self.key, context)) + + def __init__(self, defines=None, marker="#"): + self.context = Context() + self.context.update({"FILE": "", "LINE": 0, "DIRECTORY": os.path.abspath(".")}) + try: + # Can import globally because of bootstrapping issues. + from buildconfig import topobjdir, topsrcdir + except ImportError: + # Allow this script to still work independently of a configured objdir. + topsrcdir = topobjdir = None + self.topsrcdir = topsrcdir + self.topobjdir = topobjdir + self.curdir = "." + self.actionLevel = 0 + self.disableLevel = 0 + # ifStates can be + # 0: hadTrue + # 1: wantsTrue + # 2: #else found + self.ifStates = [] + self.checkLineNumbers = False + + # A list of (filter_name, filter_function) pairs. + self.filters = [] + + self.cmds = {} + for cmd, level in ( + ("define", 0), + ("undef", 0), + ("if", sys.maxsize), + ("ifdef", sys.maxsize), + ("ifndef", sys.maxsize), + ("else", 1), + ("elif", 1), + ("elifdef", 1), + ("elifndef", 1), + ("endif", sys.maxsize), + ("expand", 0), + ("literal", 0), + ("filter", 0), + ("unfilter", 0), + ("include", 0), + ("includesubst", 0), + ("error", 0), + ): + self.cmds[cmd] = (level, getattr(self, "do_" + cmd)) + self.out = sys.stdout + self.setMarker(marker) + self.varsubst = re.compile("@(?P\w+)@", re.U) + self.includes = set() + self.silenceMissingDirectiveWarnings = False + if defines: + self.context.update(defines) + + def failUnused(self, file): + msg = None + if self.actionLevel == 0 and not self.silenceMissingDirectiveWarnings: + msg = "no preprocessor directives found" + elif self.actionLevel == 1: + msg = "no useful preprocessor directives found" + if msg: + + class Fake(object): + pass + + fake = Fake() + fake.context = { + "FILE": file, + "LINE": None, + } + raise Preprocessor.Error(fake, msg, None) + + def setMarker(self, aMarker): + """ + Set the marker to be used for processing directives. + Used for handling CSS files, with pp.setMarker('%'), for example. + The given marker may be None, in which case no markers are processed. + """ + self.marker = aMarker + if aMarker: + instruction_prefix = "\s*{0}" + instruction_cmd = "(?P[a-z]+)(?:\s+(?P.*?))?\s*$" + instruction_fmt = instruction_prefix + instruction_cmd + ambiguous_fmt = instruction_prefix + "\s+" + instruction_cmd + + self.instruction = re.compile(instruction_fmt.format(aMarker)) + self.comment = re.compile(aMarker, re.U) + self.ambiguous_comment = re.compile(ambiguous_fmt.format(aMarker)) + else: + + class NoMatch(object): + def match(self, *args): + return False + + self.instruction = self.comment = NoMatch() + + def setSilenceDirectiveWarnings(self, value): + """ + Sets whether missing directive warnings are silenced, according to + ``value``. The default behavior of the preprocessor is to emit + such warnings. + """ + self.silenceMissingDirectiveWarnings = value + + def addDefines(self, defines): + """ + Adds the specified defines to the preprocessor. + ``defines`` may be a dictionary object or an iterable of key/value pairs + (as tuples or other iterables of length two) + """ + self.context.update(defines) + + def clone(self): + """ + Create a clone of the current processor, including line ending + settings, marker, variable definitions, output stream. + """ + rv = Preprocessor() + rv.context.update(self.context) + rv.setMarker(self.marker) + rv.out = self.out + return rv + + def processFile(self, input, output, depfile=None): + """ + Preprocesses the contents of the ``input`` stream and writes the result + to the ``output`` stream. If ``depfile`` is set, the dependencies of + ``output`` file are written to ``depfile`` in Makefile format. + """ + self.out = output + + self.do_include(input, False) + self.failUnused(input.name) + + if depfile: + mk = Makefile() + mk.create_rule([output.name]).add_dependencies(self.includes) + mk.dump(depfile) + + def computeDependencies(self, input): + """ + Reads the ``input`` stream, and computes the dependencies for that input. + """ + try: + old_out = self.out + self.out = None + self.do_include(input, False) + + return self.includes + finally: + self.out = old_out + + def applyFilters(self, aLine): + for f in self.filters: + aLine = f[1](aLine) + return aLine + + def noteLineInfo(self): + # Record the current line and file. Called once before transitioning + # into or out of an included file and after writing each line. + self.line_info = self.context["FILE"], self.context["LINE"] + + def write(self, aLine): + """ + Internal method for handling output. + """ + if not self.out: + return + + next_line, next_file = self.context["LINE"], self.context["FILE"] + if self.checkLineNumbers: + expected_file, expected_line = self.line_info + expected_line += 1 + if ( + expected_line != next_line + or expected_file + and expected_file != next_file + ): + self.out.write( + '//@line {line} "{file}"\n'.format(line=next_line, file=next_file) + ) + self.noteLineInfo() + + filteredLine = self.applyFilters(aLine) + if filteredLine != aLine: + self.actionLevel = 2 + self.out.write(filteredLine) + + def handleCommandLine(self, args, defaultToStdin=False): + """ + Parse a commandline into this parser. + Uses OptionParser internally, no args mean sys.argv[1:]. + """ + + def get_output_file(path, encoding=None): + if encoding is None: + encoding = "utf-8" + dir = os.path.dirname(path) + if dir: + try: + os.makedirs(dir) + except OSError as error: + if error.errno != errno.EEXIST: + raise + return io.open(path, "w", encoding=encoding, newline="\n") + + p = self.getCommandLineParser() + options, args = p.parse_args(args=args) + out = self.out + depfile = None + + if options.output: + out = get_output_file(options.output, options.output_encoding) + elif options.output_encoding: + raise Preprocessor.Error( + self, "--output-encoding doesn't work without --output", None + ) + if defaultToStdin and len(args) == 0: + args = [sys.stdin] + if options.depend: + raise Preprocessor.Error(self, "--depend doesn't work with stdin", None) + if options.depend: + if not options.output: + raise Preprocessor.Error( + self, "--depend doesn't work with stdout", None + ) + depfile = get_output_file(options.depend) + + if args: + for f in args: + if not isinstance(f, io.TextIOBase): + f = io.open(f, "r", encoding="utf-8") + with f as input_: + self.processFile(input=input_, output=out) + if depfile: + mk = Makefile() + mk.create_rule([six.ensure_text(options.output)]).add_dependencies( + self.includes + ) + mk.dump(depfile) + depfile.close() + + if options.output: + out.close() + + def getCommandLineParser(self, unescapeDefines=False): + escapedValue = re.compile('".*"$') + numberValue = re.compile("\d+$") + + def handleD(option, opt, value, parser): + vals = value.split("=", 1) + if len(vals) == 1: + vals.append(1) + elif unescapeDefines and escapedValue.match(vals[1]): + # strip escaped string values + vals[1] = vals[1][1:-1] + elif numberValue.match(vals[1]): + vals[1] = int(vals[1]) + self.context[vals[0]] = vals[1] + + def handleU(option, opt, value, parser): + del self.context[value] + + def handleF(option, opt, value, parser): + self.do_filter(value) + + def handleMarker(option, opt, value, parser): + self.setMarker(value) + + def handleSilenceDirectiveWarnings(option, opt, value, parse): + self.setSilenceDirectiveWarnings(True) + + p = OptionParser() + p.add_option( + "-D", + action="callback", + callback=handleD, + type="string", + metavar="VAR[=VAL]", + help="Define a variable", + ) + p.add_option( + "-U", + action="callback", + callback=handleU, + type="string", + metavar="VAR", + help="Undefine a variable", + ) + p.add_option( + "-F", + action="callback", + callback=handleF, + type="string", + metavar="FILTER", + help="Enable the specified filter", + ) + p.add_option( + "-o", + "--output", + type="string", + default=None, + metavar="FILENAME", + help="Output to the specified file instead of stdout", + ) + p.add_option( + "--depend", + type="string", + default=None, + metavar="FILENAME", + help="Generate dependencies in the given file", + ) + p.add_option( + "--marker", + action="callback", + callback=handleMarker, + type="string", + help="Use the specified marker instead of #", + ) + p.add_option( + "--silence-missing-directive-warnings", + action="callback", + callback=handleSilenceDirectiveWarnings, + help="Don't emit warnings about missing directives", + ) + p.add_option( + "--output-encoding", + type="string", + default=None, + metavar="ENCODING", + help="Encoding to use for the output", + ) + return p + + def handleLine(self, aLine): + """ + Handle a single line of input (internal). + """ + if self.actionLevel == 0 and self.comment.match(aLine): + self.actionLevel = 1 + m = self.instruction.match(aLine) + if m: + args = None + cmd = m.group("cmd") + try: + args = m.group("args") + except IndexError: + pass + if cmd not in self.cmds: + raise Preprocessor.Error(self, "INVALID_CMD", aLine) + level, cmd = self.cmds[cmd] + if level >= self.disableLevel: + cmd(args) + if cmd != "literal": + self.actionLevel = 2 + elif self.disableLevel == 0: + if self.comment.match(aLine): + # make sure the comment is not ambiguous with a command + m = self.ambiguous_comment.match(aLine) + if m: + cmd = m.group("cmd") + if cmd in self.cmds: + raise Preprocessor.Error(self, "AMBIGUOUS_COMMENT", aLine) + else: + self.write(aLine) + + # Instruction handlers + # These are named do_'instruction name' and take one argument + + # Variables + def do_define(self, args): + m = re.match("(?P\w+)(?:\s(?P.*))?", args, re.U) + if not m: + raise Preprocessor.Error(self, "SYNTAX_DEF", args) + val = "" + if m.group("value"): + val = self.applyFilters(m.group("value")) + try: + val = int(val) + except Exception: + pass + self.context[m.group("name")] = val + + def do_undef(self, args): + m = re.match("(?P\w+)$", args, re.U) + if not m: + raise Preprocessor.Error(self, "SYNTAX_DEF", args) + if args in self.context: + del self.context[args] + + # Logic + def ensure_not_else(self): + if len(self.ifStates) == 0 or self.ifStates[-1] == 2: + sys.stderr.write( + "WARNING: bad nesting of #else in %s\n" % self.context["FILE"] + ) + + def do_if(self, args, replace=False): + if self.disableLevel and not replace: + self.disableLevel += 1 + return + val = None + try: + e = Expression(args) + val = e.evaluate(self.context) + except Exception: + # XXX do real error reporting + raise Preprocessor.Error(self, "SYNTAX_ERR", args) + if isinstance(val, six.text_type) or isinstance(val, six.binary_type): + # we're looking for a number value, strings are false + val = False + if not val: + self.disableLevel = 1 + if replace: + if val: + self.disableLevel = 0 + self.ifStates[-1] = self.disableLevel + else: + self.ifStates.append(self.disableLevel) + + def do_ifdef(self, args, replace=False): + if self.disableLevel and not replace: + self.disableLevel += 1 + return + if re.search("\W", args, re.U): + raise Preprocessor.Error(self, "INVALID_VAR", args) + if args not in self.context: + self.disableLevel = 1 + if replace: + if args in self.context: + self.disableLevel = 0 + self.ifStates[-1] = self.disableLevel + else: + self.ifStates.append(self.disableLevel) + + def do_ifndef(self, args, replace=False): + if self.disableLevel and not replace: + self.disableLevel += 1 + return + if re.search("\W", args, re.U): + raise Preprocessor.Error(self, "INVALID_VAR", args) + if args in self.context: + self.disableLevel = 1 + if replace: + if args not in self.context: + self.disableLevel = 0 + self.ifStates[-1] = self.disableLevel + else: + self.ifStates.append(self.disableLevel) + + def do_else(self, args, ifState=2): + self.ensure_not_else() + hadTrue = self.ifStates[-1] == 0 + self.ifStates[-1] = ifState # in-else + if hadTrue: + self.disableLevel = 1 + return + self.disableLevel = 0 + + def do_elif(self, args): + if self.disableLevel == 1: + if self.ifStates[-1] == 1: + self.do_if(args, replace=True) + else: + self.do_else(None, self.ifStates[-1]) + + def do_elifdef(self, args): + if self.disableLevel == 1: + if self.ifStates[-1] == 1: + self.do_ifdef(args, replace=True) + else: + self.do_else(None, self.ifStates[-1]) + + def do_elifndef(self, args): + if self.disableLevel == 1: + if self.ifStates[-1] == 1: + self.do_ifndef(args, replace=True) + else: + self.do_else(None, self.ifStates[-1]) + + def do_endif(self, args): + if self.disableLevel > 0: + self.disableLevel -= 1 + if self.disableLevel == 0: + self.ifStates.pop() + + # output processing + def do_expand(self, args): + lst = re.split("__(\w+)__", args, re.U) + + def vsubst(v): + if v in self.context: + return _to_text(self.context[v]) + return "" + + for i in range(1, len(lst), 2): + lst[i] = vsubst(lst[i]) + lst.append("\n") # add back the newline + self.write(six.moves.reduce(lambda x, y: x + y, lst, "")) + + def do_literal(self, args): + self.write(args + "\n") + + def do_filter(self, args): + filters = [f for f in args.split(" ") if hasattr(self, "filter_" + f)] + if len(filters) == 0: + return + current = dict(self.filters) + for f in filters: + current[f] = getattr(self, "filter_" + f) + self.filters = [(fn, current[fn]) for fn in sorted(current.keys())] + return + + def do_unfilter(self, args): + filters = args.split(" ") + current = dict(self.filters) + for f in filters: + if f in current: + del current[f] + self.filters = [(fn, current[fn]) for fn in sorted(current.keys())] + return + + # Filters + # + # emptyLines: Strips blank lines from the output. + def filter_emptyLines(self, aLine): + if aLine == "\n": + return "" + return aLine + + # dumbComments: Empties out lines that consists of optional whitespace + # followed by a `//`. + def filter_dumbComments(self, aLine): + return re.sub("^\s*//.*", "", aLine) + + # substitution: variables wrapped in @ are replaced with their value. + def filter_substitution(self, aLine, fatal=True): + def repl(matchobj): + varname = matchobj.group("VAR") + if varname in self.context: + return _to_text(self.context[varname]) + if fatal: + raise Preprocessor.Error(self, "UNDEFINED_VAR", varname) + return matchobj.group(0) + + return self.varsubst.sub(repl, aLine) + + # attemptSubstitution: variables wrapped in @ are replaced with their + # value, or an empty string if the variable is not defined. + def filter_attemptSubstitution(self, aLine): + return self.filter_substitution(aLine, fatal=False) + + # File ops + def do_include(self, args, filters=True): + """ + Preprocess a given file. + args can either be a file name, or a file-like object. + Files should be opened, and will be closed after processing. + """ + isName = isinstance(args, six.string_types) + oldCheckLineNumbers = self.checkLineNumbers + self.checkLineNumbers = False + if isName: + try: + args = _to_text(args) + if filters: + args = self.applyFilters(args) + if not os.path.isabs(args): + args = os.path.join(self.curdir, args) + args = io.open(args, "r", encoding="utf-8") + except Preprocessor.Error: + raise + except Exception: + raise Preprocessor.Error(self, "FILE_NOT_FOUND", _to_text(args)) + self.checkLineNumbers = bool( + re.search("\.(js|jsm|java|webidl)(?:\.in)?$", args.name) + ) + oldFile = self.context["FILE"] + oldLine = self.context["LINE"] + oldDir = self.context["DIRECTORY"] + oldCurdir = self.curdir + self.noteLineInfo() + + if args.isatty(): + # we're stdin, use '-' and '' for file and dir + self.context["FILE"] = "-" + self.context["DIRECTORY"] = "" + self.curdir = "." + else: + abspath = os.path.abspath(args.name) + self.curdir = os.path.dirname(abspath) + self.includes.add(six.ensure_text(abspath)) + if self.topobjdir and path_starts_with(abspath, self.topobjdir): + abspath = "$OBJDIR" + normsep(abspath[len(self.topobjdir) :]) + elif self.topsrcdir and path_starts_with(abspath, self.topsrcdir): + abspath = "$SRCDIR" + normsep(abspath[len(self.topsrcdir) :]) + self.context["FILE"] = abspath + self.context["DIRECTORY"] = os.path.dirname(abspath) + self.context["LINE"] = 0 + + for l in args: + self.context["LINE"] += 1 + self.handleLine(l) + if isName: + args.close() + + self.context["FILE"] = oldFile + self.checkLineNumbers = oldCheckLineNumbers + self.context["LINE"] = oldLine + self.context["DIRECTORY"] = oldDir + self.curdir = oldCurdir + + def do_includesubst(self, args): + args = self.filter_substitution(args) + self.do_include(args) + + def do_error(self, args): + raise Preprocessor.Error(self, "Error: ", _to_text(args)) + + +def preprocess(includes=[sys.stdin], defines={}, output=sys.stdout, marker="#"): + pp = Preprocessor(defines=defines, marker=marker) + for f in includes: + with io.open(f, "r", encoding="utf-8") as input: + pp.processFile(input=input, output=output) + return pp.includes + + +# Keep this module independently executable. +if __name__ == "__main__": + pp = Preprocessor() + pp.handleCommandLine(None, True) diff --git a/python/mozbuild/mozbuild/pythonutil.py b/python/mozbuild/mozbuild/pythonutil.py new file mode 100644 index 0000000000..a3540647f9 --- /dev/null +++ b/python/mozbuild/mozbuild/pythonutil.py @@ -0,0 +1,23 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import sys + + +def iter_modules_in_path(*paths): + paths = [os.path.abspath(os.path.normcase(p)) + os.sep for p in paths] + for name, module in sys.modules.items(): + if getattr(module, "__file__", None) is None: + continue + if module.__file__ is None: + continue + path = module.__file__ + + if path.endswith(".pyc"): + path = path[:-1] + path = os.path.abspath(os.path.normcase(path)) + + if any(path.startswith(p) for p in paths): + yield path diff --git a/python/mozbuild/mozbuild/repackaging/__init__.py b/python/mozbuild/mozbuild/repackaging/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/repackaging/application_ini.py b/python/mozbuild/mozbuild/repackaging/application_ini.py new file mode 100644 index 0000000000..f11c94f781 --- /dev/null +++ b/python/mozbuild/mozbuild/repackaging/application_ini.py @@ -0,0 +1,66 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +from mozpack.files import FileFinder +from six import string_types +from six.moves import configparser + + +def get_application_ini_value( + finder_or_application_directory, section, value, fallback=None +): + """Find string with given `section` and `value` in any `application.ini` + under given directory or finder. + + If string is not found and `fallback` is given, find string with given + `section` and `fallback` instead. + + Raises an `Exception` if no string is found.""" + + return next( + get_application_ini_values( + finder_or_application_directory, + dict(section=section, value=value, fallback=fallback), + ) + ) + + +def get_application_ini_values(finder_or_application_directory, *args): + """Find multiple strings for given `section` and `value` pairs. + Additional `args` should be dictionaries with keys `section`, `value`, + and optional `fallback`. Returns an iterable of strings, one for each + dictionary provided. + + `fallback` is treated as with `get_application_ini_value`. + + Raises an `Exception` if any string is not found.""" + + if isinstance(finder_or_application_directory, string_types): + finder = FileFinder(finder_or_application_directory) + else: + finder = finder_or_application_directory + + # Packages usually have a top-level `firefox/` directory; search below it. + for p, f in finder.find("**/application.ini"): + data = f.open().read().decode("utf-8") + parser = configparser.ConfigParser() + parser.read_string(data) + + for d in args: + rc = None + try: + rc = parser.get(d["section"], d["value"]) + except configparser.NoOptionError: + if "fallback" not in d: + raise + else: + rc = parser.get(d["section"], d["fallback"]) + + if rc is None: + raise Exception("Input does not contain an application.ini file") + + yield rc + + # Process only the first `application.ini`. + break diff --git a/python/mozbuild/mozbuild/repackaging/deb.py b/python/mozbuild/mozbuild/repackaging/deb.py new file mode 100644 index 0000000000..3e01680437 --- /dev/null +++ b/python/mozbuild/mozbuild/repackaging/deb.py @@ -0,0 +1,694 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import datetime +import json +import logging +import os +import shutil +import subprocess +import tarfile +import tempfile +import zipfile +from email.utils import format_datetime +from pathlib import Path +from string import Template + +import mozfile +import mozpack.path as mozpath +import requests +from mozilla_version.gecko import GeckoVersion +from redo import retry + +from mozbuild.repackaging.application_ini import get_application_ini_values + + +class NoDebPackageFound(Exception): + """Raised when no .deb is found after calling dpkg-buildpackage""" + + def __init__(self, deb_file_path) -> None: + super().__init__( + f"No {deb_file_path} package found after calling dpkg-buildpackage" + ) + + +class HgServerError(Exception): + """Raised when Hg responds with an error code that is not 404 (i.e. when there is an outage)""" + + def __init__(self, msg) -> None: + super().__init__(msg) + + +_DEB_ARCH = { + "all": "all", + "x86": "i386", + "x86_64": "amd64", +} +# At the moment the Firefox build baseline is jessie. +# The debian-repackage image defined in taskcluster/docker/debian-repackage/Dockerfile +# bootstraps the /srv/jessie-i386 and /srv/jessie-amd64 chroot environments we use to +# create the `.deb` repackages. By running the repackage using chroot we generate shared +# library dependencies that match the Firefox build baseline +# defined in taskcluster/scripts/misc/build-sysroot.sh +_DEB_DIST = "jessie" + + +def repackage_deb( + log, + infile, + output, + template_dir, + arch, + version, + build_number, + release_product, + release_type, + fluent_localization, + fluent_resource_loader, +): + if not tarfile.is_tarfile(infile): + raise Exception("Input file %s is not a valid tarfile." % infile) + + tmpdir = _create_temporary_directory(arch) + source_dir = os.path.join(tmpdir, "source") + try: + mozfile.extract_tarball(infile, source_dir) + application_ini_data = _extract_application_ini_data(infile) + build_variables = _get_build_variables( + application_ini_data, + arch, + version, + build_number, + depends="${shlibs:Depends},", + ) + + _copy_plain_deb_config(template_dir, source_dir) + _render_deb_templates(template_dir, source_dir, build_variables) + + app_name = application_ini_data["name"] + with open( + mozpath.join(source_dir, app_name.lower(), "is-packaged-app"), "w" + ) as f: + f.write("This is a packaged app.\n") + + _inject_deb_distribution_folder(source_dir, app_name) + _inject_deb_desktop_entry_file( + log, + source_dir, + build_variables, + release_product, + release_type, + fluent_localization, + fluent_resource_loader, + ) + _generate_deb_archive( + source_dir, + target_dir=tmpdir, + output_file_path=output, + build_variables=build_variables, + arch=arch, + ) + + finally: + shutil.rmtree(tmpdir) + + +def repackage_deb_l10n( + input_xpi_file, input_tar_file, output, template_dir, version, build_number +): + arch = "all" + + tmpdir = _create_temporary_directory(arch) + source_dir = os.path.join(tmpdir, "source") + try: + langpack_metadata = _extract_langpack_metadata(input_xpi_file) + langpack_dir = mozpath.join(source_dir, "firefox", "distribution", "extensions") + application_ini_data = _extract_application_ini_data(input_tar_file) + langpack_id = langpack_metadata["langpack_id"] + build_variables = _get_build_variables( + application_ini_data, + arch, + version, + build_number, + depends=application_ini_data["remoting_name"], + # Debian package names are only lowercase + package_name_suffix=f"-l10n-{langpack_id.lower()}", + description_suffix=f" - {langpack_metadata['description']}", + ) + _copy_plain_deb_config(template_dir, source_dir) + _render_deb_templates(template_dir, source_dir, build_variables) + + os.makedirs(langpack_dir, exist_ok=True) + shutil.copy( + input_xpi_file, + mozpath.join( + langpack_dir, + f"{langpack_metadata['browser_specific_settings']['gecko']['id']}.xpi", + ), + ) + _generate_deb_archive( + source_dir=source_dir, + target_dir=tmpdir, + output_file_path=output, + build_variables=build_variables, + arch=arch, + ) + finally: + shutil.rmtree(tmpdir) + + +def _extract_application_ini_data(input_tar_file): + with tempfile.TemporaryDirectory() as d: + with tarfile.open(input_tar_file) as tar: + application_ini_files = [ + tar_info + for tar_info in tar.getmembers() + if tar_info.name.endswith("/application.ini") + ] + if len(application_ini_files) == 0: + raise ValueError( + f"Cannot find any application.ini file in archive {input_tar_file}" + ) + if len(application_ini_files) > 1: + raise ValueError( + f"Too many application.ini files found in archive {input_tar_file}. " + f"Found: {application_ini_files}" + ) + + tar.extract(application_ini_files[0], path=d) + + return _extract_application_ini_data_from_directory(d) + + +def _extract_application_ini_data_from_directory(application_directory): + values = get_application_ini_values( + application_directory, + dict(section="App", value="Name"), + dict(section="App", value="CodeName", fallback="Name"), + dict(section="App", value="Vendor"), + dict(section="App", value="RemotingName"), + dict(section="App", value="BuildID"), + ) + + data = { + "name": next(values), + "display_name": next(values), + "vendor": next(values), + "remoting_name": next(values), + "build_id": next(values), + } + data["timestamp"] = datetime.datetime.strptime(data["build_id"], "%Y%m%d%H%M%S") + + return data + + +def _get_build_variables( + application_ini_data, + arch, + version_string, + build_number, + depends, + package_name_suffix="", + description_suffix="", +): + version = GeckoVersion.parse(version_string) + # Nightlies don't have build numbers + deb_pkg_version = ( + f"{version}~{application_ini_data['build_id']}" + if version.is_nightly + else f"{version}~build{build_number}" + ) + remoting_name = application_ini_data["remoting_name"].lower() + + return { + "DEB_DESCRIPTION": f"{application_ini_data['vendor']} {application_ini_data['display_name']}" + f"{description_suffix}", + "DEB_PKG_INSTALL_PATH": f"usr/lib/{remoting_name}", + "DEB_PKG_NAME": f"{remoting_name}{package_name_suffix}", + "DEB_PKG_VERSION": deb_pkg_version, + "DEB_CHANGELOG_DATE": format_datetime(application_ini_data["timestamp"]), + "DEB_ARCH_NAME": _DEB_ARCH[arch], + "DEB_DEPENDS": depends, + } + + +def _copy_plain_deb_config(input_template_dir, source_dir): + template_dir_filenames = os.listdir(input_template_dir) + plain_filenames = [ + mozpath.basename(filename) + for filename in template_dir_filenames + if not filename.endswith(".in") + ] + os.makedirs(mozpath.join(source_dir, "debian"), exist_ok=True) + + for filename in plain_filenames: + shutil.copy( + mozpath.join(input_template_dir, filename), + mozpath.join(source_dir, "debian", filename), + ) + + +def _render_deb_templates( + input_template_dir, source_dir, build_variables, exclude_file_names=None +): + exclude_file_names = [] if exclude_file_names is None else exclude_file_names + + template_dir_filenames = os.listdir(input_template_dir) + template_filenames = [ + mozpath.basename(filename) + for filename in template_dir_filenames + if filename.endswith(".in") and filename not in exclude_file_names + ] + os.makedirs(mozpath.join(source_dir, "debian"), exist_ok=True) + + for file_name in template_filenames: + with open(mozpath.join(input_template_dir, file_name)) as f: + template = Template(f.read()) + with open(mozpath.join(source_dir, "debian", Path(file_name).stem), "w") as f: + f.write(template.substitute(build_variables)) + + +def _inject_deb_distribution_folder(source_dir, app_name): + with tempfile.TemporaryDirectory() as git_clone_dir: + subprocess.check_call( + [ + "git", + "clone", + "https://github.com/mozilla-partners/deb.git", + git_clone_dir, + ], + ) + shutil.copytree( + mozpath.join(git_clone_dir, "desktop/deb/distribution"), + mozpath.join(source_dir, app_name.lower(), "distribution"), + ) + + +def _inject_deb_desktop_entry_file( + log, + source_dir, + build_variables, + release_product, + release_type, + fluent_localization, + fluent_resource_loader, +): + desktop_entry_file_text = _generate_browser_desktop_entry_file_text( + log, + build_variables, + release_product, + release_type, + fluent_localization, + fluent_resource_loader, + ) + desktop_entry_file_filename = f"{build_variables['DEB_PKG_NAME']}.desktop" + os.makedirs(mozpath.join(source_dir, "debian"), exist_ok=True) + with open( + mozpath.join(source_dir, "debian", desktop_entry_file_filename), "w" + ) as f: + f.write(desktop_entry_file_text) + + +def _generate_browser_desktop_entry_file_text( + log, + build_variables, + release_product, + release_type, + fluent_localization, + fluent_resource_loader, +): + localizations = _create_fluent_localizations( + fluent_resource_loader, fluent_localization, release_type, release_product, log + ) + desktop_entry = _generate_browser_desktop_entry(build_variables, localizations) + desktop_entry_file_text = "\n".join(desktop_entry) + return desktop_entry_file_text + + +def _create_fluent_localizations( + fluent_resource_loader, fluent_localization, release_type, release_product, log +): + brand_fluent_filename = "brand.ftl" + l10n_central_url = "https://hg.mozilla.org/l10n-central" + desktop_entry_fluent_filename = "linuxDesktopEntry.ftl" + + l10n_dir = tempfile.mkdtemp() + + loader = fluent_resource_loader(os.path.join(l10n_dir, "{locale}")) + + localizations = {} + linux_l10n_changesets = _load_linux_l10n_changesets( + "browser/locales/l10n-changesets.json" + ) + locales = ["en-US"] + locales.extend(linux_l10n_changesets.keys()) + en_US_brand_fluent_filename = _get_en_US_brand_fluent_filename( + brand_fluent_filename, release_type, release_product + ) + + for locale in locales: + locale_dir = os.path.join(l10n_dir, locale) + os.mkdir(locale_dir) + localized_desktop_entry_filename = os.path.join( + locale_dir, desktop_entry_fluent_filename + ) + if locale == "en-US": + en_US_desktop_entry_fluent_filename = os.path.join( + "browser/locales/en-US/browser", desktop_entry_fluent_filename + ) + shutil.copyfile( + en_US_desktop_entry_fluent_filename, + localized_desktop_entry_filename, + ) + else: + non_en_US_desktop_entry_fluent_filename = os.path.join( + "browser/browser", desktop_entry_fluent_filename + ) + non_en_US_fluent_resource_file_url = os.path.join( + l10n_central_url, + locale, + "raw-file", + linux_l10n_changesets[locale]["revision"], + non_en_US_desktop_entry_fluent_filename, + ) + response = requests.get(non_en_US_fluent_resource_file_url) + response = retry( + requests.get, + args=[non_en_US_fluent_resource_file_url], + attempts=5, + sleeptime=3, + jitter=2, + ) + mgs = "Missing {fluent_resource_file_name} for {locale}: received HTTP {status_code} for GET {resource_file_url}" + params = { + "fluent_resource_file_name": desktop_entry_fluent_filename, + "locale": locale, + "resource_file_url": non_en_US_fluent_resource_file_url, + "status_code": response.status_code, + } + action = "repackage-deb" + if response.status_code == 404: + log( + logging.WARNING, + action, + params, + mgs, + ) + continue + if response.status_code != 200: + log( + logging.ERROR, + action, + params, + mgs, + ) + raise HgServerError(mgs.format(**params)) + + with open(localized_desktop_entry_filename, "w", encoding="utf-8") as f: + f.write(response.text) + + shutil.copyfile( + en_US_brand_fluent_filename, + os.path.join(locale_dir, brand_fluent_filename), + ) + + fallbacks = [locale] + if locale != "en-US": + fallbacks.append("en-US") + localizations[locale] = fluent_localization( + fallbacks, [desktop_entry_fluent_filename, brand_fluent_filename], loader + ) + + return localizations + + +def _get_en_US_brand_fluent_filename( + brand_fluent_filename, release_type, release_product +): + branding_fluent_filename_template = os.path.join( + "browser/branding/{brand}/locales/en-US", brand_fluent_filename + ) + if release_type == "nightly": + return branding_fluent_filename_template.format(brand="nightly") + elif release_type == "beta" and release_product == "firefox": + return branding_fluent_filename_template.format(brand="official") + elif release_type == "beta" and release_product == "devedition": + return branding_fluent_filename_template.format(brand="aurora") + else: + return branding_fluent_filename_template.format(brand="unofficial") + + +def _load_linux_l10n_changesets(l10n_changesets_filename): + with open(l10n_changesets_filename) as l10n_changesets_file: + l10n_changesets = json.load(l10n_changesets_file) + return { + locale: changeset + for locale, changeset in l10n_changesets.items() + if any(platform.startswith("linux") for platform in changeset["platforms"]) + } + + +def _generate_browser_desktop_entry(build_variables, localizations): + mime_types = [ + "application/json", + "application/pdf", + "application/rdf+xml", + "application/rss+xml", + "application/x-xpinstall", + "application/xhtml+xml", + "application/xml", + "audio/flac", + "audio/ogg", + "audio/webm", + "image/avif", + "image/gif", + "image/jpeg", + "image/png", + "image/svg+xml", + "image/webp", + "text/html", + "text/xml", + "video/ogg", + "video/webm", + "x-scheme-handler/chrome", + "x-scheme-handler/http", + "x-scheme-handler/https", + ] + + categories = [ + "GNOME", + "GTK", + "Network", + "WebBrowser", + ] + + actions = [ + { + "name": "new-window", + "message": "desktop-action-new-window-name", + "command": f"{build_variables['DEB_PKG_NAME']} --new-window %u", + }, + { + "name": "new-private-window", + "message": "desktop-action-new-private-window-name", + "command": f"{build_variables['DEB_PKG_NAME']} --private-window %u", + }, + { + "name": "open-profile-manager", + "message": "desktop-action-open-profile-manager", + "command": f"{build_variables['DEB_PKG_NAME']} --ProfileManager", + }, + ] + + desktop_entry = _desktop_entry_section( + "Desktop Entry", + [ + { + "key": "Version", + "value": "1.0", + }, + { + "key": "Type", + "value": "Application", + }, + { + "key": "Exec", + "value": f"{build_variables['DEB_PKG_NAME']} %u", + }, + { + "key": "Terminal", + "value": "false", + }, + { + "key": "X-MultipleArgs", + "value": "false", + }, + { + "key": "Icon", + "value": build_variables["DEB_PKG_NAME"], + }, + { + "key": "StartupWMClass", + "value": build_variables["DEB_PKG_NAME"], + }, + { + "key": "Categories", + "value": _desktop_entry_list(categories), + }, + { + "key": "MimeType", + "value": _desktop_entry_list(mime_types), + }, + { + "key": "StartupNotify", + "value": "true", + }, + { + "key": "Actions", + "value": _desktop_entry_list([action["name"] for action in actions]), + }, + {"key": "Name", "value": "desktop-entry-name", "l10n": True}, + {"key": "Comment", "value": "desktop-entry-comment", "l10n": True}, + {"key": "GenericName", "value": "desktop-entry-generic-name", "l10n": True}, + {"key": "Keywords", "value": "desktop-entry-keywords", "l10n": True}, + { + "key": "X-GNOME-FullName", + "value": "desktop-entry-x-gnome-full-name", + "l10n": True, + }, + ], + localizations, + ) + + for action in actions: + desktop_entry.extend( + _desktop_entry_section( + f"Desktop Action {action['name']}", + [ + { + "key": "Name", + "value": action["message"], + "l10n": True, + }, + { + "key": "Exec", + "value": action["command"], + }, + ], + localizations, + ) + ) + + return desktop_entry + + +def _desktop_entry_list(iterable): + delimiter = ";" + return f"{delimiter.join(iterable)}{delimiter}" + + +def _desktop_entry_attribute(key, value, locale=None, localizations=None): + if not locale and not localizations: + return f"{key}={value}" + if locale and locale == "en-US": + return f"{key}={localizations[locale].format_value(value)}" + else: + return f"{key}[{locale.replace('-', '_')}]={localizations[locale].format_value(value)}" + + +def _desktop_entry_section(header, attributes, localizations): + desktop_entry_section = [f"[{header}]"] + l10n_attributes = [attribute for attribute in attributes if attribute.get("l10n")] + non_l10n_attributes = [ + attribute for attribute in attributes if not attribute.get("l10n") + ] + for attribute in non_l10n_attributes: + desktop_entry_section.append( + _desktop_entry_attribute(attribute["key"], attribute["value"]) + ) + for attribute in l10n_attributes: + for locale in localizations: + desktop_entry_section.append( + _desktop_entry_attribute( + attribute["key"], attribute["value"], locale, localizations + ) + ) + desktop_entry_section.append("") + return desktop_entry_section + + +def _generate_deb_archive( + source_dir, target_dir, output_file_path, build_variables, arch +): + command = _get_command(arch) + subprocess.check_call(command, cwd=source_dir) + deb_arch = _DEB_ARCH[arch] + deb_file_name = f"{build_variables['DEB_PKG_NAME']}_{build_variables['DEB_PKG_VERSION']}_{deb_arch}.deb" + deb_file_path = mozpath.join(target_dir, deb_file_name) + + if not os.path.exists(deb_file_path): + raise NoDebPackageFound(deb_file_path) + + subprocess.check_call(["dpkg-deb", "--info", deb_file_path]) + shutil.move(deb_file_path, output_file_path) + + +def _get_command(arch): + deb_arch = _DEB_ARCH[arch] + command = [ + "dpkg-buildpackage", + # TODO: Use long options once we stop supporting Debian Jesse. They're more + # explicit. + # + # Long options were added in dpkg 1.18.8 which is part of Debian Stretch. + # + # https://git.dpkg.org/cgit/dpkg/dpkg.git/commit/?h=1.18.x&id=293bd243a19149165fc4fd8830b16a51d471a5e9 + # https://packages.debian.org/stretch/dpkg-dev + "-us", # --unsigned-source + "-uc", # --unsigned-changes + "-b", # --build=binary + ] + + if deb_arch != "all": + command.append(f"--host-arch={deb_arch}") + + if _is_chroot_available(arch): + flattened_command = " ".join(command) + command = [ + "chroot", + _get_chroot_path(arch), + "bash", + "-c", + f"cd /tmp/*/source; {flattened_command}", + ] + + return command + + +def _create_temporary_directory(arch): + if _is_chroot_available(arch): + return tempfile.mkdtemp(dir=f"{_get_chroot_path(arch)}/tmp") + else: + return tempfile.mkdtemp() + + +def _is_chroot_available(arch): + return os.path.isdir(_get_chroot_path(arch)) + + +def _get_chroot_path(arch): + deb_arch = "amd64" if arch == "all" else _DEB_ARCH[arch] + return f"/srv/{_DEB_DIST}-{deb_arch}" + + +_MANIFEST_FILE_NAME = "manifest.json" + + +def _extract_langpack_metadata(input_xpi_file): + with tempfile.TemporaryDirectory() as d: + with zipfile.ZipFile(input_xpi_file) as zip: + zip.extract(_MANIFEST_FILE_NAME, path=d) + + with open(mozpath.join(d, _MANIFEST_FILE_NAME)) as f: + return json.load(f) diff --git a/python/mozbuild/mozbuild/repackaging/dmg.py b/python/mozbuild/mozbuild/repackaging/dmg.py new file mode 100644 index 0000000000..883927f214 --- /dev/null +++ b/python/mozbuild/mozbuild/repackaging/dmg.py @@ -0,0 +1,56 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import tarfile +from pathlib import Path + +import mozfile +from mozpack.dmg import create_dmg + +from mozbuild.bootstrap import bootstrap_toolchain +from mozbuild.repackaging.application_ini import get_application_ini_value + + +def repackage_dmg(infile, output): + + if not tarfile.is_tarfile(infile): + raise Exception("Input file %s is not a valid tarfile." % infile) + + # Resolve required tools + dmg_tool = bootstrap_toolchain("dmg/dmg") + if not dmg_tool: + raise Exception("DMG tool not found") + hfs_tool = bootstrap_toolchain("dmg/hfsplus") + if not hfs_tool: + raise Exception("HFS tool not found") + mkfshfs_tool = bootstrap_toolchain("hfsplus/newfs_hfs") + if not mkfshfs_tool: + raise Exception("MKFSHFS tool not found") + + with mozfile.TemporaryDirectory() as tmp: + tmpdir = Path(tmp) + mozfile.extract_tarball(infile, tmpdir) + + # Remove the /Applications symlink. If we don't, an rsync command in + # create_dmg() will break, and create_dmg() re-creates the symlink anyway. + symlink = tmpdir / " " + if symlink.is_file(): + symlink.unlink() + + volume_name = get_application_ini_value( + str(tmpdir), "App", "CodeName", fallback="Name" + ) + + # The extra_files argument is empty [] because they are already a part + # of the original dmg produced by the build, and they remain in the + # tarball generated by the signing task. + create_dmg( + source_directory=tmpdir, + output_dmg=Path(output), + volume_name=volume_name, + extra_files=[], + dmg_tool=Path(dmg_tool), + hfs_tool=Path(hfs_tool), + mkfshfs_tool=Path(mkfshfs_tool), + ) diff --git a/python/mozbuild/mozbuild/repackaging/installer.py b/python/mozbuild/mozbuild/repackaging/installer.py new file mode 100644 index 0000000000..9bd17613bf --- /dev/null +++ b/python/mozbuild/mozbuild/repackaging/installer.py @@ -0,0 +1,55 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import shutil +import tempfile +import zipfile + +import mozpack.path as mozpath + +from mozbuild.action.exe_7z_archive import archive_exe +from mozbuild.util import ensureParentDir + + +def repackage_installer( + topsrcdir, tag, setupexe, package, output, package_name, sfx_stub, use_upx +): + if package and not zipfile.is_zipfile(package): + raise Exception("Package file %s is not a valid .zip file." % package) + if package is not None and package_name is None: + raise Exception("Package name must be provided, if a package is provided.") + if package is None and package_name is not None: + raise Exception( + "Package name must not be provided, if a package is not provided." + ) + + # We need the full path for the tag and output, since we chdir later. + tag = mozpath.realpath(tag) + output = mozpath.realpath(output) + ensureParentDir(output) + + tmpdir = tempfile.mkdtemp() + old_cwd = os.getcwd() + try: + if package: + z = zipfile.ZipFile(package) + z.extractall(tmpdir) + z.close() + + # Copy setup.exe into the root of the install dir, alongside the + # package. + shutil.copyfile(setupexe, mozpath.join(tmpdir, mozpath.basename(setupexe))) + + # archive_exe requires us to be in the directory where the package is + # unpacked (the tmpdir) + os.chdir(tmpdir) + + sfx_package = mozpath.join(topsrcdir, sfx_stub) + + archive_exe(package_name, tag, sfx_package, output, use_upx) + + finally: + os.chdir(old_cwd) + shutil.rmtree(tmpdir) diff --git a/python/mozbuild/mozbuild/repackaging/mar.py b/python/mozbuild/mozbuild/repackaging/mar.py new file mode 100644 index 0000000000..f215c17238 --- /dev/null +++ b/python/mozbuild/mozbuild/repackaging/mar.py @@ -0,0 +1,93 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import shutil +import subprocess +import sys +import tarfile +import tempfile +import zipfile +from pathlib import Path + +import mozfile +import mozpack.path as mozpath + +from mozbuild.repackaging.application_ini import get_application_ini_value +from mozbuild.util import ensureParentDir + +_BCJ_OPTIONS = { + "x86": ["--x86"], + "x86_64": ["--x86"], + "aarch64": [], + # macOS Universal Builds + "macos-x86_64-aarch64": [], +} + + +def repackage_mar(topsrcdir, package, mar, output, arch=None, mar_channel_id=None): + if not zipfile.is_zipfile(package) and not tarfile.is_tarfile(package): + raise Exception("Package file %s is not a valid .zip or .tar file." % package) + if arch and arch not in _BCJ_OPTIONS: + raise Exception( + "Unknown architecture {}, available architectures: {}".format( + arch, list(_BCJ_OPTIONS.keys()) + ) + ) + + ensureParentDir(output) + tmpdir = tempfile.mkdtemp() + try: + if tarfile.is_tarfile(package): + filelist = mozfile.extract_tarball(package, tmpdir) + else: + z = zipfile.ZipFile(package) + z.extractall(tmpdir) + filelist = z.namelist() + z.close() + + toplevel_dirs = set([mozpath.split(f)[0] for f in filelist]) + excluded_stuff = set([" ", ".background", ".DS_Store", ".VolumeIcon.icns"]) + toplevel_dirs = toplevel_dirs - excluded_stuff + # Make sure the .zip file just contains a directory like 'firefox/' at + # the top, and find out what it is called. + if len(toplevel_dirs) != 1: + raise Exception( + "Package file is expected to have a single top-level directory" + "(eg: 'firefox'), not: %s" % toplevel_dirs + ) + ffxdir = mozpath.join(tmpdir, toplevel_dirs.pop()) + + make_full_update = mozpath.join( + topsrcdir, "tools/update-packaging/make_full_update.sh" + ) + + env = os.environ.copy() + env["MOZ_PRODUCT_VERSION"] = get_application_ini_value(tmpdir, "App", "Version") + env["MAR"] = mozpath.normpath(mar) + if arch: + env["BCJ_OPTIONS"] = " ".join(_BCJ_OPTIONS[arch]) + if mar_channel_id: + env["MAR_CHANNEL_ID"] = mar_channel_id + # The Windows build systems have xz installed but it isn't in the path + # like it is on Linux and Mac OS X so just use the XZ env var so the mar + # generation scripts can find it. + xz_path = mozpath.join(topsrcdir, "xz/xz.exe") + if os.path.exists(xz_path): + env["XZ"] = mozpath.normpath(xz_path) + + cmd = [make_full_update, output, ffxdir] + if sys.platform == "win32": + # make_full_update.sh is a bash script, and Windows needs to + # explicitly call out the shell to execute the script from Python. + + mozillabuild = os.environ["MOZILLABUILD"] + if (Path(mozillabuild) / "msys2").exists(): + cmd.insert(0, mozillabuild + "/msys2/usr/bin/bash.exe") + else: + cmd.insert(0, mozillabuild + "/msys/bin/bash.exe") + subprocess.check_call(cmd, env=env) + + finally: + shutil.rmtree(tmpdir) diff --git a/python/mozbuild/mozbuild/repackaging/msi.py b/python/mozbuild/mozbuild/repackaging/msi.py new file mode 100644 index 0000000000..b0b1b09983 --- /dev/null +++ b/python/mozbuild/mozbuild/repackaging/msi.py @@ -0,0 +1,122 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import shutil +import subprocess +import sys +import tempfile +from xml.dom import minidom + +import mozpack.path as mozpath + +from mozbuild.util import ensureParentDir + +_MSI_ARCH = { + "x86": "x86", + "x86_64": "x64", +} + + +def update_wsx(wfile, pvalues): + + parsed = minidom.parse(wfile) + + # construct a dictinary for the pre-processing options + # iterate over that list and add them to the wsx xml doc + for k, v in pvalues.items(): + entry = parsed.createProcessingInstruction("define", k + ' = "' + v + '"') + root = parsed.firstChild + parsed.insertBefore(entry, root) + # write out xml to new wfile + new_w_file = wfile + ".new" + with open(new_w_file, "w") as fh: + parsed.writexml(fh) + shutil.move(new_w_file, wfile) + return wfile + + +def repackage_msi( + topsrcdir, wsx, version, locale, arch, setupexe, candle, light, output +): + if sys.platform != "win32": + raise Exception("repackage msi only works on windows") + if not os.path.isdir(topsrcdir): + raise Exception("%s does not exist." % topsrcdir) + if not os.path.isfile(wsx): + raise Exception("%s does not exist." % wsx) + if version is None: + raise Exception("version name must be provided.") + if locale is None: + raise Exception("locale name must be provided.") + if arch is None or arch not in _MSI_ARCH.keys(): + raise Exception( + "arch name must be provided and one of {}.".format(_MSI_ARCH.keys()) + ) + if not os.path.isfile(setupexe): + raise Exception("%s does not exist." % setupexe) + if candle is not None and not os.path.isfile(candle): + raise Exception("%s does not exist." % candle) + if light is not None and not os.path.isfile(light): + raise Exception("%s does not exist." % light) + embeddedVersion = "0.0.0.0" + # Version string cannot contain 'a' or 'b' when embedding in msi manifest. + if "a" not in version and "b" not in version: + if version.endswith("esr"): + parts = version[:-3].split(".") + else: + parts = version.split(".") + while len(parts) < 4: + parts.append("0") + embeddedVersion = ".".join(parts) + + wsx = mozpath.realpath(wsx) + setupexe = mozpath.realpath(setupexe) + output = mozpath.realpath(output) + ensureParentDir(output) + + if sys.platform == "win32": + tmpdir = tempfile.mkdtemp() + old_cwd = os.getcwd() + try: + wsx_file = os.path.split(wsx)[1] + shutil.copy(wsx, tmpdir) + temp_wsx_file = os.path.join(tmpdir, wsx_file) + temp_wsx_file = mozpath.realpath(temp_wsx_file) + pre_values = { + "Vendor": "Mozilla", + "BrandFullName": "Mozilla Firefox", + "Version": version, + "AB_CD": locale, + "Architecture": _MSI_ARCH[arch], + "ExeSourcePath": setupexe, + "EmbeddedVersionCode": embeddedVersion, + } + # update wsx file with inputs from + newfile = update_wsx(temp_wsx_file, pre_values) + wix_object_file = os.path.join(tmpdir, "installer.wixobj") + env = os.environ.copy() + if candle is None: + candle = "candle.exe" + cmd = [candle, "-out", wix_object_file, newfile] + subprocess.check_call(cmd, env=env) + wix_installer = wix_object_file.replace(".wixobj", ".msi") + if light is None: + light = "light.exe" + light_cmd = [ + light, + "-cultures:neutral", + "-sw1076", + "-sw1079", + "-out", + wix_installer, + wix_object_file, + ] + subprocess.check_call(light_cmd, env=env) + os.remove(wix_object_file) + # mv file to output dir + shutil.move(wix_installer, output) + finally: + os.chdir(old_cwd) + shutil.rmtree(tmpdir) diff --git a/python/mozbuild/mozbuild/repackaging/msix.py b/python/mozbuild/mozbuild/repackaging/msix.py new file mode 100644 index 0000000000..707096c499 --- /dev/null +++ b/python/mozbuild/mozbuild/repackaging/msix.py @@ -0,0 +1,1193 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +r"""Repackage ZIP archives (or directories) into MSIX App Packages. + +# Known issues + +- The icons in the Start Menu have a solid colour tile behind them. I think + this is an issue with plating. +""" + +import functools +import itertools +import logging +import os +import re +import shutil +import subprocess +import sys +import time +import urllib +from collections import defaultdict +from pathlib import Path + +import mozpack.path as mozpath +from mach.util import get_state_dir +from mozfile import which +from mozpack.copier import FileCopier +from mozpack.files import FileFinder, JarFinder +from mozpack.manifests import InstallManifest +from mozpack.mozjar import JarReader +from mozpack.packager.unpack import UnpackFinder +from six.moves import shlex_quote + +from mozbuild.repackaging.application_ini import get_application_ini_values +from mozbuild.util import ensureParentDir + + +def log_copy_result(log, elapsed, destdir, result): + COMPLETE = ( + "Elapsed: {elapsed:.2f}s; From {dest}: Kept {existing} existing; " + "Added/updated {updated}; " + "Removed {rm_files} files and {rm_dirs} directories." + ) + copy_result = COMPLETE.format( + elapsed=elapsed, + dest=destdir, + existing=result.existing_files_count, + updated=result.updated_files_count, + rm_files=result.removed_files_count, + rm_dirs=result.removed_directories_count, + ) + log(logging.INFO, "msix", {"copy_result": copy_result}, "{copy_result}") + + +# See https://docs.microsoft.com/en-us/uwp/schemas/appxpackage/uapmanifestschema/element-identity. +_MSIX_ARCH = {"x86": "x86", "x86_64": "x64", "aarch64": "arm64"} + + +@functools.lru_cache(maxsize=None) +def sdk_tool_search_path(): + from mozbuild.configure import ConfigureSandbox + + sandbox = ConfigureSandbox({}, argv=["configure"]) + sandbox.include_file( + str(Path(__file__).parent.parent.parent.parent.parent / "moz.configure") + ) + return sandbox._value_for(sandbox["sdk_bin_path"]) + [ + "c:/Windows/System32/WindowsPowershell/v1.0" + ] + + +def find_sdk_tool(binary, log=None): + if binary.lower().endswith(".exe"): + binary = binary[:-4] + + maybe = os.environ.get(binary.upper()) + if maybe: + log( + logging.DEBUG, + "msix", + {"binary": binary, "path": maybe}, + "Found {binary} in environment: {path}", + ) + return mozpath.normsep(maybe) + + maybe = which(binary, extra_search_dirs=sdk_tool_search_path()) + if maybe: + log( + logging.DEBUG, + "msix", + {"binary": binary, "path": maybe}, + "Found {binary} on path: {path}", + ) + return mozpath.normsep(maybe) + + return None + + +def get_embedded_version(version, buildid): + r"""Turn a display version into "dotted quad" notation. + + N.b.: some parts of the MSIX packaging ecosystem require the final part of + the dotted quad to be identically 0, so we enforce that here. + """ + + # It's irritating to roll our own version parsing, but the tree doesn't seem + # to contain exactly what we need at this time. + version = version.rsplit("esr", 1)[0] + alpha = "a" in version + + tail = None + if "a" in version: + head, tail = version.rsplit("a", 1) + if tail != "1": + # Disallow anything beyond `X.Ya1`. + raise ValueError( + f"Alpha version not of the form X.0a1 is not supported: {version}" + ) + tail = buildid + elif "b" in version: + head, tail = version.rsplit("b", 1) + if len(head.split(".")) > 2: + raise ValueError( + f"Beta version not of the form X.YbZ is not supported: {version}" + ) + elif "rc" in version: + head, tail = version.rsplit("rc", 1) + if len(head.split(".")) > 2: + raise ValueError( + f"Release candidate version not of the form X.YrcZ is not supported: {version}" + ) + else: + head = version + + components = (head.split(".") + ["0", "0", "0"])[:3] + if tail: + components[2] = tail + + if alpha: + # Nightly builds are all `X.0a1`, which isn't helpful. Include build ID + # to disambiguate. But each part of the dotted quad is 16 bits, so we + # have to squash. + if components[1] != "0": + # Disallow anything beyond `X.0a1`. + raise ValueError( + f"Alpha version not of the form X.0a1 is not supported: {version}" + ) + + # Last two digits only to save space. Nightly builds in 2066 and 2099 + # will be impacted, but future us can deal with that. + year = buildid[2:4] + if year[0] == "0": + # Avoid leading zero, like `.0YMm`. + year = year[1:] + month = buildid[4:6] + day = buildid[6:8] + if day[0] == "0": + # Avoid leading zero, like `.0DHh`. + day = day[1:] + hour = buildid[8:10] + + components[1] = "".join((year, month)) + components[2] = "".join((day, hour)) + + version = "{}.{}.{}.0".format(*components) + + return version + + +def get_appconstants_sys_mjs_values(finder, *args): + r"""Extract values, such as the display version like `MOZ_APP_VERSION_DISPLAY: + "...";`, from the omnijar. This allows to determine the beta number, like + `X.YbW`, where the regular beta version is only `X.Y`. Takes a list of + names and returns an iterator of the unique such value found for each name. + Raises an exception if a name is not found or if multiple values are found. + """ + lines = defaultdict(list) + for _, f in finder.find("**/modules/AppConstants.sys.mjs"): + # MOZ_OFFICIAL_BRANDING is split across two lines, so remove line breaks + # immediately following ":"s so those values can be read. + data = f.open().read().decode("utf-8").replace(":\n", ":") + for line in data.splitlines(): + for arg in args: + if arg in line: + lines[arg].append(line) + + for arg in args: + (value,) = lines[arg] # We expect exactly one definition. + _, _, value = value.partition(":") + value = value.strip().strip('",;') + yield value + + +def get_branding(use_official, topsrcdir, build_app, finder, log=None): + """Figure out which branding directory to use.""" + conf_vars = mozpath.join(topsrcdir, build_app, "confvars.sh") + + def conf_vars_value(key): + lines = open(conf_vars).readlines() + for line in lines: + line = line.strip() + if line and line[0] == "#": + continue + if key not in line: + continue + _, _, value = line.partition("=") + if not value: + continue + log( + logging.INFO, + "msix", + {"key": key, "conf_vars": conf_vars, "value": value}, + "Read '{key}' from {conf_vars}: {value}", + ) + return value + log( + logging.ERROR, + "msix", + {"key": key, "conf_vars": conf_vars}, + "Unable to find '{key}' in {conf_vars}!", + ) + + # Branding defaults + branding_reason = "No branding set" + branding = conf_vars_value("MOZ_BRANDING_DIRECTORY") + + if use_official: + # Read MOZ_OFFICIAL_BRANDING_DIRECTORY from confvars.sh + branding_reason = "'MOZ_OFFICIAL_BRANDING' set" + branding = conf_vars_value("MOZ_OFFICIAL_BRANDING_DIRECTORY") + else: + # Check if --with-branding was used when building + log( + logging.INFO, + "msix", + {}, + "Checking buildconfig.html for --with-branding build flag.", + ) + for _, f in finder.find("**/chrome/toolkit/content/global/buildconfig.html"): + data = f.open().read().decode("utf-8") + match = re.search(r"--with-branding=([a-z/]+)", data) + if match: + branding_reason = "'--with-branding' set" + branding = match.group(1) + + log( + logging.INFO, + "msix", + { + "branding_reason": branding_reason, + "branding": branding, + }, + "{branding_reason}; Using branding from '{branding}'.", + ) + return mozpath.join(topsrcdir, branding) + + +def unpack_msix(input_msix, output, log=None, verbose=False): + r"""Unpack the given MSIX to the given output directory. + + MSIX packages are ZIP files, but they are Zip64/version 4.5 ZIP files, so + `mozjar.py` doesn't yet handle. Unpack using `unzip{.exe}` for simplicity. + + In addition, file names inside the MSIX package are URL quoted. URL unquote + here. + """ + + log( + logging.INFO, + "msix", + { + "input_msix": input_msix, + "output": output, + }, + "Unpacking input MSIX '{input_msix}' to directory '{output}'", + ) + + unzip = find_sdk_tool("unzip.exe", log=log) + if not unzip: + raise ValueError("unzip is required; set UNZIP or PATH") + + subprocess.check_call( + [unzip, input_msix, "-d", output] + (["-q"] if not verbose else []), + universal_newlines=True, + ) + + # Sanity check: is this an MSIX? + temp_finder = FileFinder(output) + if not temp_finder.contains("AppxManifest.xml"): + raise ValueError("MSIX file does not contain 'AppxManifest.xml'?") + + # Files in the MSIX are URL encoded/quoted; unquote here. + for dirpath, dirs, files in os.walk(output): + # This is a one way to update (in place, for os.walk) the variable `dirs` while iterating + # over it and `files`. + for i, (p, var) in itertools.chain( + enumerate((f, files) for f in files), enumerate((g, dirs) for g in dirs) + ): + q = urllib.parse.unquote(p) + if p != q: + log( + logging.DEBUG, + "msix", + { + "dirpath": dirpath, + "p": p, + "q": q, + }, + "URL unquoting '{p}' -> '{q}' in {dirpath}", + ) + + var[i] = q + os.rename(os.path.join(dirpath, p), os.path.join(dirpath, q)) + + # The "package root" of our MSIX packages is like "Mozilla Firefox Beta Package Root", i.e., it + # varies by channel. This is an easy way to determine it. + for p, _ in temp_finder.find("**/application.ini"): + relpath = os.path.split(p)[0] + + # The application executable, like `firefox.exe`, is in this directory. + return mozpath.normpath(mozpath.join(output, relpath)) + + +def repackage_msix( + dir_or_package, + topsrcdir, + channel=None, + distribution_dirs=[], + version=None, + vendor=None, + displayname=None, + app_name=None, + identity=None, + publisher=None, + publisher_display_name="Mozilla Corporation", + arch=None, + output=None, + force=False, + log=None, + verbose=False, + makeappx=None, +): + if not channel: + raise Exception("channel is required") + if channel not in ( + "official", + "beta", + "aurora", + "nightly", + "unofficial", + ): + raise Exception("channel is unrecognized: {}".format(channel)) + + # TODO: maybe we can fish this from the package directly? Maybe from a DLL, + # maybe from application.ini? + if arch is None or arch not in _MSIX_ARCH.keys(): + raise Exception( + "arch name must be provided and one of {}.".format(_MSIX_ARCH.keys()) + ) + + if not os.path.exists(dir_or_package): + raise Exception("{} does not exist".format(dir_or_package)) + + if ( + os.path.isfile(dir_or_package) + and os.path.splitext(dir_or_package)[1] == ".msix" + ): + # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE. + msix_dir = mozpath.normsep( + mozpath.join( + get_state_dir(), + "cache", + "mach-msix", + "msix-unpack", + ) + ) + + if os.path.exists(msix_dir): + shutil.rmtree(msix_dir) + ensureParentDir(msix_dir) + + dir_or_package = unpack_msix(dir_or_package, msix_dir, log=log, verbose=verbose) + + log( + logging.INFO, + "msix", + { + "input": dir_or_package, + }, + "Adding files from '{input}'", + ) + + if os.path.isdir(dir_or_package): + finder = FileFinder(dir_or_package) + else: + finder = JarFinder(dir_or_package, JarReader(dir_or_package)) + + values = get_application_ini_values( + finder, + dict(section="App", value="CodeName", fallback="Name"), + dict(section="App", value="Vendor"), + ) + + first = next(values) + if not displayname: + displayname = "Mozilla {}".format(first) + + if channel == "beta": + # Release (official) and Beta share branding. Differentiate Beta a little bit. + displayname += " Beta" + + second = next(values) + vendor = vendor or second + + # For `AppConstants.sys.mjs` and `brand.properties`, which are in the omnijar in packaged + # builds. The nested langpack XPI files can't be read by `mozjar.py`. + unpack_finder = UnpackFinder(finder, unpack_xpi=False) + + values = get_appconstants_sys_mjs_values( + unpack_finder, + "MOZ_OFFICIAL_BRANDING", + "MOZ_BUILD_APP", + "MOZ_APP_NAME", + "MOZ_APP_VERSION_DISPLAY", + "MOZ_BUILDID", + ) + try: + use_official_branding = {"true": True, "false": False}[next(values)] + except KeyError as err: + raise Exception( + f"Unexpected value '{err.args[0]}' found for 'MOZ_OFFICIAL_BRANDING'." + ) from None + + build_app = next(values) + + _temp = next(values) + if not app_name: + app_name = _temp + + if not version: + display_version = next(values) + buildid = next(values) + version = get_embedded_version(display_version, buildid) + log( + logging.INFO, + "msix", + { + "version": version, + "display_version": display_version, + "buildid": buildid, + }, + "AppConstants.sys.mjs display version is '{display_version}' and build ID is" + + " '{buildid}': embedded version will be '{version}'", + ) + + # TODO: Bug 1721922: localize this description via Fluent. + lines = [] + for _, f in unpack_finder.find("**/chrome/en-US/locale/branding/brand.properties"): + lines.extend( + line + for line in f.open().read().decode("utf-8").splitlines() + if "brandFullName" in line + ) + (brandFullName,) = lines # We expect exactly one definition. + _, _, brandFullName = brandFullName.partition("=") + brandFullName = brandFullName.strip() + + if channel == "beta": + # Release (official) and Beta share branding. Differentiate Beta a little bit. + brandFullName += " Beta" + + branding = get_branding( + use_official_branding, topsrcdir, build_app, unpack_finder, log + ) + if not os.path.isdir(branding): + raise Exception("branding dir {} does not exist".format(branding)) + + template = os.path.join(topsrcdir, build_app, "installer", "windows", "msix") + + # Discard everything after a '#' comment character. + locale_allowlist = set( + locale.partition("#")[0].strip().lower() + for locale in open(os.path.join(template, "msix-all-locales")).readlines() + if locale.partition("#")[0].strip() + ) + + # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE. + output_dir = mozpath.normsep( + mozpath.join( + get_state_dir(), "cache", "mach-msix", "msix-temp-{}".format(channel) + ) + ) + + # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox Beta + # Package Root'. This is `BrandFullName` in the installer, and we want to + # be close but to not match. By not matching, we hope to prevent confusion + # and/or errors between regularly installed builds and App Package builds. + instdir = "{} Package Root".format(displayname) + + # The standard package name is like "CompanyNoSpaces.ProductNoSpaces". + identity = identity or "{}.{}".format(vendor, displayname).replace(" ", "") + + # We might want to include the publisher ID hash here. I.e., + # "__{publisherID}". My locally produced MSIX was named like + # `Mozilla.MozillaFirefoxNightly_89.0.0.0_x64__4gf61r4q480j0`, suggesting also a + # missing field, but it's not necessary, since this is just an output file name. + package_output_name = "{identity}_{version}_{arch}".format( + identity=identity, version=version, arch=_MSIX_ARCH[arch] + ) + # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE. + default_output = mozpath.normsep( + mozpath.join( + get_state_dir(), "cache", "mach-msix", "{}.msix".format(package_output_name) + ) + ) + output = output or default_output + log(logging.INFO, "msix", {"output": output}, "Repackaging to: {output}") + + m = InstallManifest() + m.add_copy(mozpath.join(template, "Resources.pri"), "Resources.pri") + + m.add_pattern_copy(mozpath.join(branding, "msix", "Assets"), "**", "Assets") + m.add_pattern_copy(mozpath.join(template, "VFS"), "**", "VFS") + + copier = FileCopier() + + # TODO: Bug 1710147: filter out MSVCRT files and use a dependency instead. + for p, f in finder: + if not os.path.isdir(dir_or_package): + # In archived builds, `p` is like "firefox/firefox.exe"; we want just "firefox.exe". + pp = os.path.relpath(p, app_name) + else: + # In local builds and unpacked MSIX directories, `p` is like "firefox.exe" already. + pp = p + + if pp.startswith("distribution"): + # Treat any existing distribution as a distribution directory, + # potentially with language packs. This makes it easy to repack + # unpacked MSIXes. + distribution_dir = mozpath.join(dir_or_package, "distribution") + if distribution_dir not in distribution_dirs: + distribution_dirs.append(distribution_dir) + + continue + + copier.add(mozpath.normsep(mozpath.join("VFS", "ProgramFiles", instdir, pp)), f) + + # Locales to declare as supported in `AppxManifest.xml`. + locales = set(["en-US"]) + + for distribution_dir in [ + mozpath.join(template, "distribution") + ] + distribution_dirs: + log( + logging.INFO, + "msix", + {"dir": distribution_dir}, + "Adding distribution files from {dir}", + ) + + # In automation, we have no easy way to remap the names of artifacts fetched from dependent + # tasks. In particular, langpacks will be named like `target.langpack.xpi`. The fetch + # tasks do allow us to put them in a per-locale directory, so that the entire set can be + # fetched. Here we remap the names. + finder = FileFinder(distribution_dir) + + for p, f in finder: + locale = None + if os.path.basename(p) == "target.langpack.xpi": + # Turn "/path/to/LOCALE/target.langpack.xpi" into "LOCALE". This is how langpacks + # are presented in CI. + base, locale = os.path.split(os.path.dirname(p)) + + # Like "locale-LOCALE/langpack-LOCALE@firefox.mozilla.org.xpi". This is what AMO + # serves and how flatpak builds name langpacks, but not how snap builds name + # langpacks. I can't explain the discrepancy. + dest = mozpath.normsep( + mozpath.join( + base, + f"locale-{locale}", + f"langpack-{locale}@{app_name}.mozilla.org.xpi", + ) + ) + + log( + logging.DEBUG, + "msix", + {"path": p, "dest": dest}, + "Renaming langpack {path} to {dest}", + ) + + elif os.path.basename(p).startswith("langpack-"): + # Turn "/path/to/langpack-LOCALE@firefox.mozilla.org.xpi" into "LOCALE". This is + # how langpacks are presented from an unpacked MSIX. + _, _, locale = os.path.basename(p).partition("langpack-") + locale, _, _ = locale.partition("@") + dest = p + + else: + dest = p + + if locale: + locale = locale.strip().lower() + locales.add(locale) + log( + logging.DEBUG, + "msix", + {"locale": locale, "dest": dest}, + "Distributing locale '{locale}' from {dest}", + ) + + dest = mozpath.normsep( + mozpath.join("VFS", "ProgramFiles", instdir, "distribution", dest) + ) + if copier.contains(dest): + log( + logging.INFO, + "msix", + {"dest": dest, "path": mozpath.join(finder.base, p)}, + "Skipping duplicate: {dest} from {path}", + ) + continue + + log( + logging.DEBUG, + "msix", + {"dest": dest, "path": mozpath.join(finder.base, p)}, + "Adding distribution path: {dest} from {path}", + ) + + copier.add( + dest, + f, + ) + + locales.remove("en-US") + + # Windows MSIX packages support a finite set of locales: see + # https://docs.microsoft.com/en-us/windows/uwp/publish/supported-languages, which is encoded in + # https://searchfox.org/mozilla-central/source/browser/installer/windows/msix/msix-all-locales. + # We distribute all of the langpacks supported by the release channel in our MSIX, which is + # encoded in https://searchfox.org/mozilla-central/source/browser/locales/all-locales. But we + # only advertise support in the App manifest for the intersection of that set and the set of + # supported locales. + # + # We distribute all langpacks to avoid the following issue. Suppose a user manually installs a + # langpack that is not supported by Windows, and then updates the installed MSIX package. MSIX + # package upgrades are essentially paveover installs, so there is no opportunity for Firefox to + # update the langpack before the update. But, since all langpacks are bundled with the MSIX, + # that langpack will be up-to-date, preventing one class of YSOD. + unadvertised = set() + if locale_allowlist: + unadvertised = locales - locale_allowlist + locales = locales & locale_allowlist + for locale in sorted(unadvertised): + log( + logging.INFO, + "msix", + {"locale": locale}, + "Not advertising distributed locale '{locale}' that is not recognized by Windows", + ) + + locales = ["en-US"] + list(sorted(locales)) + resource_language_list = "\n".join( + f' ' for locale in locales + ) + + defines = { + "APPX_ARCH": _MSIX_ARCH[arch], + "APPX_DISPLAYNAME": brandFullName, + "APPX_DESCRIPTION": brandFullName, + # Like 'Mozilla.MozillaFirefox', 'Mozilla.MozillaFirefoxBeta', or + # 'Mozilla.MozillaFirefoxNightly'. + "APPX_IDENTITY": identity, + # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox + # Beta Package Root'. See above. + "APPX_INSTDIR": instdir, + # Like 'Firefox%20Package%20Root'. + "APPX_INSTDIR_QUOTED": urllib.parse.quote(instdir), + "APPX_PUBLISHER": publisher, + "APPX_PUBLISHER_DISPLAY_NAME": publisher_display_name, + "APPX_RESOURCE_LANGUAGE_LIST": resource_language_list, + "APPX_VERSION": version, + "MOZ_APP_DISPLAYNAME": displayname, + "MOZ_APP_NAME": app_name, + # Keep synchronized with `toolkit\mozapps\notificationserver\NotificationComServer.cpp`. + "MOZ_INOTIFICATIONACTIVATION_CLSID": "916f9b5d-b5b2-4d36-b047-03c7a52f81c8", + } + + m.add_preprocess( + mozpath.join(template, "AppxManifest.xml.in"), + "AppxManifest.xml", + [], + defines=defines, + marker=" + + + + Build System Resource Usage + + + + + + + + +

Build Resource Usage Report

+ + + + +
+

CPU

+
+
+
+

Memory

+
+
+
+

Tiers

+
+
+
+

Summary

+
+
    +
  • +
    Wall Time (s)
    +
    +
    s
    +
  • +
  • +
    Start Date
    +
    +
    +
  • +
  • +
    End Date
    +
    +
    +
  • +
  • +
    CPU %
    +
    +
    %
    +
  • +
  • +
    Write Bytes
    +
    +
    B
    +
  • +
  • +
    Read Bytes
    +
    +
    B
    +
  • +
  • +
    Write Time
    +
    +
    +
  • +
  • +
    Read Time
    +
    +
    +
  • +
+
+
+ + diff --git a/python/mozbuild/mozbuild/schedules.py b/python/mozbuild/mozbuild/schedules.py new file mode 100644 index 0000000000..5f484ed377 --- /dev/null +++ b/python/mozbuild/mozbuild/schedules.py @@ -0,0 +1,77 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +Constants for SCHEDULES configuration in moz.build files and for +skip-unless-schedules optimizations in task-graph generation. +""" + +# TODO: ideally these lists could be specified in moz.build itself + +# Inclusive components are those which are scheduled when certain files are +# changed, but do not run by default. These are generally added to +# `SCHEDULES.inclusive` using `+=`, but can also be used as exclusive +# components for files which *only* affect the named component. +INCLUSIVE_COMPONENTS = [ + "docs", + "py-lint", + "js-lint", + "yaml-lint", + # inclusive test suites -- these *only* run when certain files have changed + "jittest", + "test-verify", + "test-verify-gpu", + "test-verify-wpt", + "test-coverage", + "test-coverage-wpt", + "jsreftest", + "android-hw-gfx", + "rusttests", +] +INCLUSIVE_COMPONENTS = sorted(INCLUSIVE_COMPONENTS) + +# Exclusive components are those which are scheduled by default, but for which +# some files *only* affect that component. For example, most files affect all +# platforms, but platform-specific files exclusively affect a single platform. +# These components are assigned to `SCHEDULES.exclusive` with `=`. Each comment +# denotes a new mutually exclusive set of groups that tasks can belong to. +EXCLUSIVE_COMPONENTS = [ + # os families + "android", + "linux", + "macosx", + "windows", + # broad test harness categories + "awsy", + "condprofile", + "cppunittest", + "firefox-ui", + "fuzztest", + "geckoview-junit", + "gtest", + "marionette", + "mochitest", + "raptor", + "reftest", + "talos", + "telemetry-tests-client", + "xpcshell", + "xpcshell-coverage", + "web-platform-tests", + # specific test suites + "crashtest", + "mochitest-a11y", + "mochitest-browser-a11y", + "mochitest-browser-media", + "mochitest-browser-chrome", + "mochitest-chrome", + "mochitest-plain", + "web-platform-tests-crashtest", + "web-platform-tests-print-reftest", + "web-platform-tests-reftest", + "web-platform-tests-wdspec", + "nss", +] +EXCLUSIVE_COMPONENTS = sorted(EXCLUSIVE_COMPONENTS) +ALL_COMPONENTS = INCLUSIVE_COMPONENTS + EXCLUSIVE_COMPONENTS diff --git a/python/mozbuild/mozbuild/settings.py b/python/mozbuild/mozbuild/settings.py new file mode 100644 index 0000000000..e17a5cb400 --- /dev/null +++ b/python/mozbuild/mozbuild/settings.py @@ -0,0 +1,30 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from mach.decorators import SettingsProvider + + +@SettingsProvider +class TelemetrySettings: + config_settings = [ + ( + "build.telemetry", + "boolean", + "Enable submission of build system telemetry " + '(Deprecated, replaced by "telemetry.is_enabled")', + ), + ( + "mach_telemetry.is_enabled", + "boolean", + "Build system telemetry is allowed", + False, + ), + ( + "mach_telemetry.is_set_up", + "boolean", + "The telemetry setup workflow has been completed " + "(e.g.: user has been prompted to opt-in)", + False, + ), + ] diff --git a/python/mozbuild/mozbuild/shellutil.py b/python/mozbuild/mozbuild/shellutil.py new file mode 100644 index 0000000000..36665cf4b1 --- /dev/null +++ b/python/mozbuild/mozbuild/shellutil.py @@ -0,0 +1,210 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import re + + +def _tokens2re(**tokens): + # Create a pattern for non-escaped tokens, in the form: + # (?pattern) + # which matches the pattern and captures it in a named match group. + # The group names and patterns are given as arguments. + all_tokens = "|".join( + "(?P<%s>%s)" % (name, value) for name, value in tokens.items() + ) + nonescaped = r"(?\\\\)")) + + +UNQUOTED_TOKENS_RE = _tokens2re( + whitespace=r"[\t\r\n ]+", + quote=r'[\'"]', + comment="#", + special=r"[<>&|`(){}$;\*\?]", + backslashed=r"\\[^\\]", +) + +DOUBLY_QUOTED_TOKENS_RE = _tokens2re( + quote='"', + backslashedquote=r'\\"', + special="\$", + backslashed=r'\\[^\\"]', +) + +ESCAPED_NEWLINES_RE = re.compile(r"\\\n") + +# This regexp contains the same characters as all those listed in +# UNQUOTED_TOKENS_RE. Please keep in sync. +SHELL_QUOTE_RE = re.compile(r"[\\\t\r\n \'\"#<>&|`(){}$;\*\?]") + + +class MetaCharacterException(Exception): + def __init__(self, char): + self.char = char + + +class _ClineSplitter(object): + """ + Parses a given command line string and creates a list of command + and arguments, with wildcard expansion. + """ + + def __init__(self, cline): + self.arg = None + self.cline = cline + self.result = [] + self._parse_unquoted() + + def _push(self, str): + """ + Push the given string as part of the current argument + """ + if self.arg is None: + self.arg = "" + self.arg += str + + def _next(self): + """ + Finalize current argument, effectively adding it to the list. + """ + if self.arg is None: + return + self.result.append(self.arg) + self.arg = None + + def _parse_unquoted(self): + """ + Parse command line remainder in the context of an unquoted string. + """ + while self.cline: + # Find the next token + m = UNQUOTED_TOKENS_RE.search(self.cline) + # If we find none, the remainder of the string can be pushed to + # the current argument and the argument finalized + if not m: + self._push(self.cline) + break + # The beginning of the string, up to the found token, is part of + # the current argument + if m.start(): + self._push(self.cline[: m.start()]) + self.cline = self.cline[m.end() :] + + match = {name: value for name, value in m.groupdict().items() if value} + if "quote" in match: + # " or ' start a quoted string + if match["quote"] == '"': + self._parse_doubly_quoted() + else: + self._parse_quoted() + elif "comment" in match: + # Comments are ignored. The current argument can be finalized, + # and parsing stopped. + break + elif "special" in match: + # Unquoted, non-escaped special characters need to be sent to a + # shell. + raise MetaCharacterException(match["special"]) + elif "whitespace" in match: + # Whitespaces terminate current argument. + self._next() + elif "escape" in match: + # Escaped backslashes turn into a single backslash + self._push("\\") + elif "backslashed" in match: + # Backslashed characters are unbackslashed + # e.g. echo \a -> a + self._push(match["backslashed"][1]) + else: + raise Exception("Shouldn't reach here") + if self.arg: + self._next() + + def _parse_quoted(self): + # Single quoted strings are preserved, except for the final quote + index = self.cline.find("'") + if index == -1: + raise Exception("Unterminated quoted string in command") + self._push(self.cline[:index]) + self.cline = self.cline[index + 1 :] + + def _parse_doubly_quoted(self): + if not self.cline: + raise Exception("Unterminated quoted string in command") + while self.cline: + m = DOUBLY_QUOTED_TOKENS_RE.search(self.cline) + if not m: + raise Exception("Unterminated quoted string in command") + self._push(self.cline[: m.start()]) + self.cline = self.cline[m.end() :] + match = {name: value for name, value in m.groupdict().items() if value} + if "quote" in match: + # a double quote ends the quoted string, so go back to + # unquoted parsing + return + elif "special" in match: + # Unquoted, non-escaped special characters in a doubly quoted + # string still have a special meaning and need to be sent to a + # shell. + raise MetaCharacterException(match["special"]) + elif "escape" in match: + # Escaped backslashes turn into a single backslash + self._push("\\") + elif "backslashedquote" in match: + # Backslashed double quotes are un-backslashed + self._push('"') + elif "backslashed" in match: + # Backslashed characters are kept backslashed + self._push(match["backslashed"]) + + +def split(cline): + """ + Split the given command line string. + """ + s = ESCAPED_NEWLINES_RE.sub("", cline) + return _ClineSplitter(s).result + + +def _quote(s): + """Given a string, returns a version that can be used literally on a shell + command line, enclosing it with single quotes if necessary. + + As a special case, if given an int, returns a string containing the int, + not enclosed in quotes. + """ + if type(s) == int: + return "%d" % s + + # Empty strings need to be quoted to have any significance + if s and not SHELL_QUOTE_RE.search(s) and not s.startswith("~"): + return s + + # Single quoted strings can contain any characters unescaped except the + # single quote itself, which can't even be escaped, so the string needs to + # be closed, an escaped single quote added, and reopened. + t = type(s) + return t("'%s'") % s.replace(t("'"), t("'\\''")) + + +def quote(*strings): + """Given one or more strings, returns a quoted string that can be used + literally on a shell command line. + + >>> quote('a', 'b') + "a b" + >>> quote('a b', 'c') + "'a b' c" + """ + return " ".join(_quote(s) for s in strings) + + +__all__ = ["MetaCharacterException", "split", "quote"] diff --git a/python/mozbuild/mozbuild/sphinx.py b/python/mozbuild/mozbuild/sphinx.py new file mode 100644 index 0000000000..4d7afb621c --- /dev/null +++ b/python/mozbuild/mozbuild/sphinx.py @@ -0,0 +1,293 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import importlib +from pathlib import Path + +from docutils import nodes +from docutils.parsers.rst import Directive +from mots.config import FileConfig +from mots.directory import Directory +from mots.export import export_to_format +from sphinx.util.docstrings import prepare_docstring +from sphinx.util.docutils import ReferenceRole + + +def function_reference(f, attr, args, doc): + lines = [] + + lines.extend( + [ + f, + "-" * len(f), + "", + ] + ) + + docstring = prepare_docstring(doc) + + lines.extend( + [ + docstring[0], + "", + ] + ) + + arg_types = [] + + for t in args: + if isinstance(t, list): + inner_types = [t2.__name__ for t2 in t] + arg_types.append(" | ".join(inner_types)) + continue + + arg_types.append(t.__name__) + + arg_s = "(%s)" % ", ".join(arg_types) + + lines.extend( + [ + ":Arguments: %s" % arg_s, + "", + ] + ) + + lines.extend(docstring[1:]) + lines.append("") + + return lines + + +def variable_reference(v, st_type, in_type, doc): + lines = [ + v, + "-" * len(v), + "", + ] + + docstring = prepare_docstring(doc) + + lines.extend( + [ + docstring[0], + "", + ] + ) + + lines.extend( + [ + ":Storage Type: ``%s``" % st_type.__name__, + ":Input Type: ``%s``" % in_type.__name__, + "", + ] + ) + + lines.extend(docstring[1:]) + lines.append("") + + return lines + + +def special_reference(v, func, typ, doc): + lines = [ + v, + "-" * len(v), + "", + ] + + docstring = prepare_docstring(doc) + + lines.extend( + [ + docstring[0], + "", + ":Type: ``%s``" % typ.__name__, + "", + ] + ) + + lines.extend(docstring[1:]) + lines.append("") + + return lines + + +def format_module(m): + lines = [] + + lines.extend( + [ + ".. note::", + " moz.build files' implementation includes a ``Path`` class.", + ] + ) + path_docstring_minus_summary = prepare_docstring(m.Path.__doc__)[2:] + lines.extend([" " + line for line in path_docstring_minus_summary]) + + for subcontext, cls in sorted(m.SUBCONTEXTS.items()): + lines.extend( + [ + ".. _mozbuild_subcontext_%s:" % subcontext, + "", + "Sub-Context: %s" % subcontext, + "=============" + "=" * len(subcontext), + "", + ] + ) + lines.extend(prepare_docstring(cls.__doc__)) + if lines[-1]: + lines.append("") + + for k, v in sorted(cls.VARIABLES.items()): + lines.extend(variable_reference(k, *v)) + + lines.extend( + [ + "Variables", + "=========", + "", + ] + ) + + for v in sorted(m.VARIABLES): + lines.extend(variable_reference(v, *m.VARIABLES[v])) + + lines.extend( + [ + "Functions", + "=========", + "", + ] + ) + + for func in sorted(m.FUNCTIONS): + lines.extend(function_reference(func, *m.FUNCTIONS[func])) + + lines.extend( + [ + "Special Variables", + "=================", + "", + ] + ) + + for v in sorted(m.SPECIAL_VARIABLES): + lines.extend(special_reference(v, *m.SPECIAL_VARIABLES[v])) + + return lines + + +def find_mots_config_path(app): + """Find and return mots config path if it exists.""" + base_path = Path(app.srcdir).parent + config_path = base_path / "mots.yaml" + if config_path.exists(): + return config_path + + +def export_mots(config_path): + """Load mots configuration and export it to file.""" + # Load from disk and initialize configuration and directory. + config = FileConfig(config_path) + config.load() + directory = Directory(config) + directory.load() + + # Fetch file format (i.e., "rst") and export path. + frmt = config.config["export"]["format"] + path = config_path.parent / config.config["export"]["path"] + + # Generate output. + output = export_to_format(directory, frmt) + + # Create export directory if it does not exist. + path.parent.mkdir(parents=True, exist_ok=True) + + # Write changes to disk. + with path.open("w", encoding="utf-8") as f: + f.write(output) + + +class MozbuildSymbols(Directive): + """Directive to insert mozbuild sandbox symbol information.""" + + required_arguments = 1 + + def run(self): + module = importlib.import_module(self.arguments[0]) + fname = module.__file__ + if fname.endswith(".pyc"): + fname = fname[0:-1] + + self.state.document.settings.record_dependencies.add(fname) + + # We simply format out the documentation as rst then feed it back + # into the parser for conversion. We don't even emit ourselves, so + # there's no record of us. + self.state_machine.insert_input(format_module(module), fname) + + return [] + + +class Searchfox(ReferenceRole): + """Role which links a relative path from the source to it's searchfox URL. + + Can be used like: + + See :searchfox:`browser/base/content/browser-places.js` for more details. + + Will generate a link to + ``https://searchfox.org/mozilla-central/source/browser/base/content/browser-places.js`` + + The example above will use the path as the text, to use custom text: + + See :searchfox:`this file ` for + more details. + + To specify a different source tree: + + See :searchfox:`mozilla-beta:browser/base/content/browser-places.js` + for more details. + """ + + def run(self): + base = "https://searchfox.org/{source}/source/{path}" + + if ":" in self.target: + source, path = self.target.split(":", 1) + else: + source = "mozilla-central" + path = self.target + + url = base.format(source=source, path=path) + + if self.has_explicit_title: + title = self.title + else: + title = path + + node = nodes.reference(self.rawtext, title, refuri=url, **self.options) + return [node], [] + + +def setup(app): + from moztreedocs import manager + + app.add_directive("mozbuildsymbols", MozbuildSymbols) + app.add_role("searchfox", Searchfox()) + + # Unlike typical Sphinx installs, our documentation is assembled from + # many sources and staged in a common location. This arguably isn't a best + # practice, but it was the easiest to implement at the time. + # + # Here, we invoke our custom code for staging/generating all our + # documentation. + + # Export and write "governance" documentation to disk. + config_path = find_mots_config_path(app) + if config_path: + export_mots(config_path) + + manager.generate_docs(app) + app.srcdir = manager.staging_dir diff --git a/python/mozbuild/mozbuild/telemetry.py b/python/mozbuild/mozbuild/telemetry.py new file mode 100644 index 0000000000..d656a9a2aa --- /dev/null +++ b/python/mozbuild/mozbuild/telemetry.py @@ -0,0 +1,264 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +This file contains functions used for telemetry. +""" + +import math +import os +import platform +import sys + +import distro +import mozpack.path as mozpath + +from .base import BuildEnvironmentNotFoundException + + +def cpu_brand_linux(): + """ + Read the CPU brand string out of /proc/cpuinfo on Linux. + """ + with open("/proc/cpuinfo", "r") as f: + for line in f: + if line.startswith("model name"): + _, brand = line.split(": ", 1) + return brand.rstrip() + # not found? + return None + + +def cpu_brand_windows(): + """ + Read the CPU brand string from the registry on Windows. + """ + try: + import _winreg + except ImportError: + import winreg as _winreg + + try: + h = _winreg.OpenKey( + _winreg.HKEY_LOCAL_MACHINE, + r"HARDWARE\DESCRIPTION\System\CentralProcessor\0", + ) + (brand, ty) = _winreg.QueryValueEx(h, "ProcessorNameString") + if ty == _winreg.REG_SZ: + return brand + except WindowsError: + pass + return None + + +def cpu_brand_mac(): + """ + Get the CPU brand string via sysctl on macos. + """ + import ctypes + import ctypes.util + + libc = ctypes.cdll.LoadLibrary(ctypes.util.find_library("c")) + # First, find the required buffer size. + bufsize = ctypes.c_size_t(0) + result = libc.sysctlbyname( + b"machdep.cpu.brand_string", None, ctypes.byref(bufsize), None, 0 + ) + if result != 0: + return None + bufsize.value += 1 + buf = ctypes.create_string_buffer(bufsize.value) + # Now actually get the value. + result = libc.sysctlbyname( + b"machdep.cpu.brand_string", buf, ctypes.byref(bufsize), None, 0 + ) + if result != 0: + return None + + return buf.value.decode() + + +def get_cpu_brand(): + """ + Get the CPU brand string as returned by CPUID. + """ + return { + "Linux": cpu_brand_linux, + "Windows": cpu_brand_windows, + "Darwin": cpu_brand_mac, + }.get(platform.system(), lambda: None)() + + +def get_os_name(): + return {"Linux": "linux", "Windows": "windows", "Darwin": "macos"}.get( + platform.system(), "other" + ) + + +def get_psutil_stats(): + """Return whether psutil exists and its associated stats. + + @returns (bool, int, int, int) whether psutil exists, the logical CPU count, + physical CPU count, and total number of bytes of memory. + """ + try: + import psutil + + return ( + True, + psutil.cpu_count(), + psutil.cpu_count(logical=False), + psutil.virtual_memory().total, + ) + except ImportError: + return False, None, None, None + + +def get_system_info(): + """ + Gather info to fill the `system` keys in the schema. + """ + # Normalize OS names a bit, and bucket non-tier-1 platforms into "other". + has_psutil, logical_cores, physical_cores, memory_total = get_psutil_stats() + info = {"os": get_os_name()} + if has_psutil: + # `total` on Linux is gathered from /proc/meminfo's `MemTotal`, which is the + # total amount of physical memory minus some kernel usage, so round up to the + # nearest GB to get a sensible answer. + info["memory_gb"] = int(math.ceil(float(memory_total) / (1024 * 1024 * 1024))) + info["logical_cores"] = logical_cores + if physical_cores is not None: + info["physical_cores"] = physical_cores + cpu_brand = get_cpu_brand() + if cpu_brand is not None: + info["cpu_brand"] = cpu_brand + # TODO: drive_is_ssd, virtual_machine: https://bugzilla.mozilla.org/show_bug.cgi?id=1481613 + return info + + +def get_build_opts(substs): + """ + Translate selected items from `substs` into `build_opts` keys in the schema. + """ + try: + opts = { + k: ty(substs.get(s, None)) + for (k, s, ty) in ( + # Selected substitutions. + ("artifact", "MOZ_ARTIFACT_BUILDS", bool), + ("debug", "MOZ_DEBUG", bool), + ("opt", "MOZ_OPTIMIZE", bool), + ("ccache", "CCACHE", bool), + ("sccache", "MOZ_USING_SCCACHE", bool), + ) + } + compiler = substs.get("CC_TYPE", None) + if compiler: + opts["compiler"] = str(compiler) + if substs.get("CXX_IS_ICECREAM", None): + opts["icecream"] = True + return opts + except BuildEnvironmentNotFoundException: + return {} + + +def get_build_attrs(attrs): + """ + Extracts clobber and cpu usage info from command attributes. + """ + res = {} + clobber = attrs.get("clobber") + if clobber: + res["clobber"] = clobber + usage = attrs.get("usage") + if usage: + cpu_percent = usage.get("cpu_percent") + if cpu_percent: + res["cpu_percent"] = int(round(cpu_percent)) + return res + + +def filter_args(command, argv, topsrcdir, topobjdir, cwd=None): + """ + Given the full list of command-line arguments, remove anything up to and including `command`, + and attempt to filter absolute pathnames out of any arguments after that. + """ + if cwd is None: + cwd = os.getcwd() + + # Each key is a pathname and the values are replacement sigils + paths = { + topsrcdir: "$topsrcdir/", + topobjdir: "$topobjdir/", + mozpath.normpath(os.path.expanduser("~")): "$HOME/", + # This might override one of the existing entries, that's OK. + # We don't use a sigil here because we treat all arguments as potentially relative + # paths, so we'd like to get them back as they were specified. + mozpath.normpath(cwd): "", + } + + args = list(argv) + while args: + a = args.pop(0) + if a == command: + break + + def filter_path(p): + p = mozpath.abspath(p) + base = mozpath.basedir(p, paths.keys()) + if base: + return paths[base] + mozpath.relpath(p, base) + # Best-effort. + return "" + + return [filter_path(arg) for arg in args] + + +def get_distro_and_version(): + if sys.platform.startswith("linux"): + dist, version, _ = distro.linux_distribution(full_distribution_name=False) + return dist, version + elif sys.platform.startswith("darwin"): + return "macos", platform.mac_ver()[0] + elif sys.platform.startswith("win32") or sys.platform.startswith("msys"): + ver = sys.getwindowsversion() + return "windows", "%s.%s.%s" % (ver.major, ver.minor, ver.build) + else: + return sys.platform, "" + + +def get_shell_info(): + """Returns if the current shell was opened by vscode and if it's a SSH connection""" + + return ( + True if "vscode" in os.getenv("TERM_PROGRAM", "") else False, + bool(os.getenv("SSH_CLIENT", False)), + ) + + +def get_vscode_running(): + """Return if the vscode is currently running.""" + try: + import psutil + + for proc in psutil.process_iter(): + try: + # On Windows we have "Code.exe" + # On MacOS we have "Code Helper (Renderer)" + # On Linux we have "" + if ( + proc.name == "Code.exe" + or proc.name == "Code Helper (Renderer)" + or proc.name == "code" + ): + return True + except Exception: + # may not be able to access process info for all processes + continue + except Exception: + # On some platforms, sometimes, the generator throws an + # exception preventing us to enumerate. + return False + + return False diff --git a/python/mozbuild/mozbuild/test/__init__.py b/python/mozbuild/mozbuild/test/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_basic.xml b/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_basic.xml new file mode 100644 index 0000000000..251b4a3069 --- /dev/null +++ b/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_basic.xml @@ -0,0 +1,10 @@ + + + + diff --git a/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_multiple_templates.xml b/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_multiple_templates.xml new file mode 100644 index 0000000000..2e249aec63 --- /dev/null +++ b/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_multiple_templates.xml @@ -0,0 +1,30 @@ + + + + diff --git a/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_xul.xml b/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_xul.xml new file mode 100644 index 0000000000..5e0ea0b34a --- /dev/null +++ b/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_xul.xml @@ -0,0 +1,14 @@ + + + + diff --git a/python/mozbuild/mozbuild/test/action/data/invalid/region.properties b/python/mozbuild/mozbuild/test/action/data/invalid/region.properties new file mode 100644 index 0000000000..d4d8109b69 --- /dev/null +++ b/python/mozbuild/mozbuild/test/action/data/invalid/region.properties @@ -0,0 +1,12 @@ +# A region.properties file with invalid unicode byte sequences. The +# sequences were cribbed from Markus Kuhn's "UTF-8 decoder capability +# and stress test", available at +# http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt + +# 3.5 Impossible bytes | +# | +# The following two bytes cannot appear in a correct UTF-8 string | +# | +# 3.5.1 fe = "þ" | +# 3.5.2 ff = "ÿ" | +# 3.5.3 fe fe ff ff = "þþÿÿ" | diff --git a/python/mozbuild/mozbuild/test/action/data/node/node-test-script.js b/python/mozbuild/mozbuild/test/action/data/node/node-test-script.js new file mode 100644 index 0000000000..f6dbfcc594 --- /dev/null +++ b/python/mozbuild/mozbuild/test/action/data/node/node-test-script.js @@ -0,0 +1,11 @@ +#! /usr/bin/env node +"use strict"; + +/* eslint-disable no-console */ + +let args = process.argv.slice(2); + +for (let arg of args) { + console.log(`dep:${arg}`); +} + diff --git a/python/mozbuild/mozbuild/test/action/test_buildlist.py b/python/mozbuild/mozbuild/test/action/test_buildlist.py new file mode 100644 index 0000000000..9a1d2738ed --- /dev/null +++ b/python/mozbuild/mozbuild/test/action/test_buildlist.py @@ -0,0 +1,96 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import os.path +import unittest +from shutil import rmtree +from tempfile import mkdtemp + +import mozunit + +from mozbuild.action.buildlist import addEntriesToListFile + + +class TestBuildList(unittest.TestCase): + """ + Unit tests for buildlist.py + """ + + def setUp(self): + self.tmpdir = mkdtemp() + + def tearDown(self): + rmtree(self.tmpdir) + + # utility methods for tests + def touch(self, file, dir=None): + if dir is None: + dir = self.tmpdir + f = os.path.join(dir, file) + open(f, "w").close() + return f + + def assertFileContains(self, filename, l): + """Assert that the lines in the file |filename| are equal + to the contents of the list |l|, in order.""" + l = l[:] + f = open(filename, "r") + lines = [line.rstrip() for line in f.readlines()] + f.close() + for line in lines: + self.assertTrue( + len(l) > 0, + "ran out of expected lines! (expected '{0}', got '{1}')".format( + l, lines + ), + ) + self.assertEqual(line, l.pop(0)) + self.assertTrue( + len(l) == 0, + "not enough lines in file! (expected '{0}'," " got '{1}'".format(l, lines), + ) + + def test_basic(self): + "Test that addEntriesToListFile works when file doesn't exist." + testfile = os.path.join(self.tmpdir, "test.list") + l = ["a", "b", "c"] + addEntriesToListFile(testfile, l) + self.assertFileContains(testfile, l) + # ensure that attempting to add the same entries again doesn't change it + addEntriesToListFile(testfile, l) + self.assertFileContains(testfile, l) + + def test_append(self): + "Test adding new entries." + testfile = os.path.join(self.tmpdir, "test.list") + l = ["a", "b", "c"] + addEntriesToListFile(testfile, l) + self.assertFileContains(testfile, l) + l2 = ["x", "y", "z"] + addEntriesToListFile(testfile, l2) + l.extend(l2) + self.assertFileContains(testfile, l) + + def test_append_some(self): + "Test adding new entries mixed with existing entries." + testfile = os.path.join(self.tmpdir, "test.list") + l = ["a", "b", "c"] + addEntriesToListFile(testfile, l) + self.assertFileContains(testfile, l) + addEntriesToListFile(testfile, ["a", "x", "c", "z"]) + self.assertFileContains(testfile, ["a", "b", "c", "x", "z"]) + + def test_add_multiple(self): + """Test that attempting to add the same entry multiple times results in + only one entry being added.""" + testfile = os.path.join(self.tmpdir, "test.list") + addEntriesToListFile(testfile, ["a", "b", "a", "a", "b"]) + self.assertFileContains(testfile, ["a", "b"]) + addEntriesToListFile(testfile, ["c", "a", "c", "b", "c"]) + self.assertFileContains(testfile, ["a", "b", "c"]) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/action/test_html_fragment_preprocessor.py b/python/mozbuild/mozbuild/test/action/test_html_fragment_preprocessor.py new file mode 100644 index 0000000000..3cce1c5f94 --- /dev/null +++ b/python/mozbuild/mozbuild/test/action/test_html_fragment_preprocessor.py @@ -0,0 +1,196 @@ +import os +import unittest +import xml.etree.ElementTree as ET + +import mozpack.path as mozpath +import mozunit + +from mozbuild.action.html_fragment_preprocesor import ( + fill_html_fragments_map, + generate, + get_fragment_key, + get_html_fragments_from_file, +) + +test_data_path = mozpath.abspath(mozpath.dirname(__file__)) +test_data_path = mozpath.join(test_data_path, "data", "html_fragment_preprocesor") + + +def data(name): + return os.path.join(test_data_path, name) + + +TEST_PATH = "/some/path/somewhere/example.xml".replace("/", os.sep) +EXAMPLE_BASIC = data("example_basic.xml") +EXAMPLE_TEMPLATES = data("example_multiple_templates.xml") +EXAMPLE_XUL = data("example_xul.xml") +DUMMY_FILE = data("dummy.js") + + +class TestNode(unittest.TestCase): + """ + Tests for html_fragment_preprocesor.py. + """ + + maxDiff = None + + def assertXMLEqual(self, a, b, message): + aRoot = ET.fromstring(a) + bRoot = ET.fromstring(b) + self.assertXMLNodesEqual(aRoot, bRoot, message) + + def assertXMLNodesEqual(self, a, b, message, xpath=""): + xpath += "/" + a.tag + messageWithPath = message + " at " + xpath + self.assertEqual(a.tag, b.tag, messageWithPath + " tag name") + self.assertEqual(a.text, b.text, messageWithPath + " text") + self.assertEqual( + a.attrib.keys(), b.attrib.keys(), messageWithPath + " attribute names" + ) + for aKey, aValue in a.attrib.items(): + self.assertEqual( + aValue, + b.attrib[aKey], + messageWithPath + "[@" + aKey + "] attribute value", + ) + for aChild, bChild in zip(a, b): + self.assertXMLNodesEqual(aChild, bChild, message, xpath) + + def test_get_fragment_key_path(self): + key = get_fragment_key("/some/path/somewhere/example.xml") + self.assertEqual(key, "example") + + def test_get_fragment_key_with_named_template(self): + key = get_fragment_key(TEST_PATH, "some-template") + self.assertEqual(key, "example/some-template") + + def test_get_html_fragments_from_template_no_doctype_no_name(self): + key = "example" + fragment_map = {} + template = ET.Element("template") + p1 = ET.SubElement(template, "p") + p1.text = "Hello World" + p2 = ET.SubElement(template, "p") + p2.text = "Goodbye" + fill_html_fragments_map(fragment_map, TEST_PATH, template) + self.assertEqual(fragment_map[key], "

Hello World

Goodbye

") + + def test_get_html_fragments_from_named_template_with_html_element(self): + key = "example/some-template" + fragment_map = {} + template = ET.Element("template") + template.attrib["name"] = "some-template" + p = ET.SubElement(template, "p") + p.text = "Hello World" + fill_html_fragments_map(fragment_map, TEST_PATH, template) + self.assertEqual(fragment_map[key], "

Hello World

") + + def test_get_html_fragments_from_template_with_doctype(self): + key = "example" + doctype = "doctype definition goes here" + fragment_map = {} + template = ET.Element("template") + p = ET.SubElement(template, "p") + p.text = "Hello World" + fill_html_fragments_map(fragment_map, TEST_PATH, template, doctype) + self.assertEqual( + fragment_map[key], "doctype definition goes here\n

Hello World

" + ) + + def test_get_html_fragments_from_file_basic(self): + key = "example_basic" + fragment_map = {} + get_html_fragments_from_file(fragment_map, EXAMPLE_BASIC) + self.assertEqual( + fragment_map[key], + '
' + + "

Hello World

", + ) + + def test_get_html_fragments_from_file_multiple_templates(self): + key1 = "example_multiple_templates/alpha" + key2 = "example_multiple_templates/beta" + key3 = "example_multiple_templates/charlie" + fragment_map = {} + get_html_fragments_from_file(fragment_map, EXAMPLE_TEMPLATES) + self.assertIn("

Hello World

", fragment_map[key1], "Has HTML content") + self.assertIn( + '', + fragment_map[key1], + "Has doctype", + ) + self.assertIn("

Lorem ipsum", fragment_map[key2], "Has HTML content") + self.assertIn( + '', + fragment_map[key2], + "Has doctype", + ) + self.assertIn("

Goodbye

", fragment_map[key3], "Has HTML content") + self.assertIn( + '', + fragment_map[key3], + "Has doctype", + ) + + def test_get_html_fragments_from_file_with_xul(self): + key = "example_xul" + fragment_map = {} + get_html_fragments_from_file(fragment_map, EXAMPLE_XUL) + xml = "" + fragment_map[key] + "" + self.assertXMLEqual( + xml, + "" + + '' + + " " + + ' ' + + ' ' + + ' ' + + ' ' + + " " + + '' + + "", + "XML values must match", + ) + + def test_generate(self): + with open(DUMMY_FILE, "w") as file: + deps = generate( + file, + EXAMPLE_BASIC, + EXAMPLE_TEMPLATES, + EXAMPLE_XUL, + ) + with open(DUMMY_FILE, "r") as file: + contents = file.read() + self.assertIn( + "Lorem ipsum", contents, "Has HTML content") + self.assertIn('"example_basic"', contents, "Has basic fragment key") + self.assertIn( + '"example_multiple_templates/alpha"', + contents, + "Has multiple templates fragment key", + ) + self.assertIn('"example_xul"', contents, "Has XUL fragment key") + self.assertIn( + "const getHTMLFragment =", + contents, + "Has fragment loader method declaration", + ) + os.remove(DUMMY_FILE) + self.assertEqual(len(deps), 3, "deps are correct") + self.assertIn(EXAMPLE_BASIC, deps, "deps are correct") + self.assertIn(EXAMPLE_TEMPLATES, deps, "deps are correct") + self.assertIn(EXAMPLE_XUL, deps, "deps are correct") + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/action/test_langpack_manifest.py b/python/mozbuild/mozbuild/test/action/test_langpack_manifest.py new file mode 100644 index 0000000000..29e8642fc7 --- /dev/null +++ b/python/mozbuild/mozbuild/test/action/test_langpack_manifest.py @@ -0,0 +1,269 @@ +# -*- coding: utf-8 -*- + +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +import json +import os +import shutil +import tempfile +import unittest + +import mozunit + +from mozbuild.action import langpack_manifest + + +class TestGenerateManifest(unittest.TestCase): + """ + Unit tests for langpack_manifest.py. + """ + + def test_parse_flat_ftl(self): + src = """ +langpack-creator = bar {"bar"} +langpack-contributors = { "" } +""" + tmp = tempfile.NamedTemporaryFile(mode="wt", suffix=".ftl", delete=False) + try: + tmp.write(src) + tmp.close() + ftl = langpack_manifest.parse_flat_ftl(tmp.name) + self.assertEqual(ftl["langpack-creator"], "bar bar") + self.assertEqual(ftl["langpack-contributors"], "") + finally: + os.remove(tmp.name) + + def test_parse_flat_ftl_missing(self): + ftl = langpack_manifest.parse_flat_ftl("./does-not-exist.ftl") + self.assertEqual(len(ftl), 0) + + def test_manifest(self): + ctx = { + "langpack-creator": "Suomennosprojekti", + "langpack-contributors": "Joe Smith, Mary White", + } + os.environ["MOZ_BUILD_DATE"] = "20210928100000" + manifest = langpack_manifest.create_webmanifest( + "fi", + "57.0.1", + "57.0", + "57.0.*", + "Firefox", + "/var/vcs/l10n-central", + "langpack-fi@firefox.mozilla.og", + ctx, + {}, + ) + + data = json.loads(manifest) + self.assertEqual(data["name"], "Language: Suomi (Finnish)") + self.assertEqual( + data["description"], "Firefox Language Pack for Suomi (fi) – Finnish" + ) + self.assertEqual( + data["author"], "Suomennosprojekti (contributors: Joe Smith, Mary White)" + ) + self.assertEqual(data["version"], "57.0.20210928.100000") + + def test_manifest_truncated_name(self): + ctx = { + "langpack-creator": "Mozilla.org / Softcatalà", + "langpack-contributors": "Joe Smith, Mary White", + } + os.environ["MOZ_BUILD_DATE"] = "20210928100000" + manifest = langpack_manifest.create_webmanifest( + "ca-valencia", + "57.0.1", + "57.0", + "57.0.*", + "Firefox", + "/var/vcs/l10n-central", + "langpack-ca-valencia@firefox.mozilla.og", + ctx, + {}, + ) + + data = json.loads(manifest) + self.assertEqual(data["name"], "Language: Català (Valencià)") + self.assertEqual( + data["description"], + "Firefox Language Pack for Català (Valencià) (ca-valencia) – Catalan, Valencian", + ) + + def test_manifest_name_untranslated(self): + ctx = { + "langpack-creator": "Mozilla.org", + "langpack-contributors": "Joe Smith, Mary White", + } + os.environ["MOZ_BUILD_DATE"] = "20210928100000" + manifest = langpack_manifest.create_webmanifest( + "en-US", + "57.0.1", + "57.0", + "57.0.*", + "Firefox", + "/var/vcs/l10n-central", + "langpack-ca-valencia@firefox.mozilla.og", + ctx, + {}, + ) + + data = json.loads(manifest) + self.assertEqual(data["name"], "Language: English (US)") + self.assertEqual( + data["description"], + "Firefox Language Pack for English (US) (en-US)", + ) + + def test_manifest_without_contributors(self): + ctx = { + "langpack-creator": "Suomennosprojekti", + "langpack-contributors": "", + } + manifest = langpack_manifest.create_webmanifest( + "fi", + "57.0.1", + "57.0", + "57.0.*", + "Firefox", + "/var/vcs/l10n-central", + "langpack-fi@firefox.mozilla.og", + ctx, + {}, + ) + + data = json.loads(manifest) + self.assertEqual(data["name"], "Language: Suomi (Finnish)") + self.assertEqual( + data["description"], "Firefox Language Pack for Suomi (fi) – Finnish" + ) + self.assertEqual(data["author"], "Suomennosprojekti") + + def test_manifest_truncation(self): + locale = ( + "Long locale code that will be truncated and will cause both " + "the name and the description to exceed the maximum number of " + "characters allowed in manifest.json" + ) + title, description = langpack_manifest.get_title_and_description( + "Firefox", locale + ) + + self.assertEqual(len(title), 45) + self.assertEqual(len(description), 132) + + def test_get_version_maybe_buildid(self): + for (app_version, buildid, expected_version) in [ + ("109", "", "109"), + ("109.0", "", "109.0"), + ("109.0.0", "", "109.0.0"), + ("109", "20210928", "109"), # buildid should be 14 chars + ("109", "20210928123456", "109.20210928.123456"), + ("109.0", "20210928123456", "109.0.20210928.123456"), + ("109.0.0", "20210928123456", "109.0.20210928.123456"), + ("109", "20230215023456", "109.20230215.23456"), + ("109.0", "20230215023456", "109.0.20230215.23456"), + ("109.0.0", "20230215023456", "109.0.20230215.23456"), + ("109", "20230215003456", "109.20230215.3456"), + ("109", "20230215000456", "109.20230215.456"), + ("109", "20230215000056", "109.20230215.56"), + ("109", "20230215000006", "109.20230215.6"), + ("109", "20230215000000", "109.20230215.0"), + ("109.1.2.3", "20230201000000", "109.1.20230201.0"), + ("109.0a1", "", "109.0"), + ("109a0.0b0", "", "109.0"), + ("109.0.0b1", "", "109.0.0"), + ("109.0.b1", "", "109.0.0"), + ("109..1", "", "109.0.1"), + ]: + os.environ["MOZ_BUILD_DATE"] = buildid + version = langpack_manifest.get_version_maybe_buildid(app_version) + self.assertEqual(version, expected_version) + + def test_main(self): + # We set this env variable so that the manifest.json version string + # uses a "buildid", see: `get_version_maybe_buildid()` for more + # information. + os.environ["MOZ_BUILD_DATE"] = "20210928100000" + + TEST_CASES = [ + { + "app_version": "112.0.1", + "max_app_version": "112.*", + "expected_version": "112.0.20210928.100000", + "expected_min_version": "112.0", + "expected_max_version": "112.*", + }, + { + "app_version": "112.1.0", + "max_app_version": "112.*", + "expected_version": "112.1.20210928.100000", + # We expect the second part to be "0" even if the app version + # has a minor part equal to "1". + "expected_min_version": "112.0", + "expected_max_version": "112.*", + }, + { + "app_version": "114.0a1", + "max_app_version": "114.*", + "expected_version": "114.0.20210928.100000", + # We expect the min version to be equal to the app version + # because we don't change alpha versions. + "expected_min_version": "114.0a1", + "expected_max_version": "114.*", + }, + ] + + tmpdir = tempfile.mkdtemp() + try: + # These files are required by the `main()` function. + for file in ["chrome.manifest", "empty-metadata.ftl"]: + langpack_manifest.write_file(os.path.join(tmpdir, file), "") + + for tc in TEST_CASES: + extension_id = "some@extension-id" + locale = "fr" + + args = [ + "--input", + tmpdir, + # This file has been created right above. + "--metadata", + "empty-metadata.ftl", + "--app-name", + "Firefox", + "--l10n-basedir", + "/var/vcs/l10n-central", + "--locales", + locale, + "--langpack-eid", + extension_id, + "--app-version", + tc["app_version"], + "--max-app-ver", + tc["max_app_version"], + ] + langpack_manifest.main(args) + + with open(os.path.join(tmpdir, "manifest.json")) as manifest_file: + manifest = json.load(manifest_file) + self.assertEqual(manifest["version"], tc["expected_version"]) + self.assertEqual(manifest["langpack_id"], locale) + self.assertEqual( + manifest["browser_specific_settings"], + { + "gecko": { + "id": extension_id, + "strict_min_version": tc["expected_min_version"], + "strict_max_version": tc["expected_max_version"], + } + }, + ) + finally: + shutil.rmtree(tmpdir, ignore_errors=True) + del os.environ["MOZ_BUILD_DATE"] + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/action/test_node.py b/python/mozbuild/mozbuild/test/action/test_node.py new file mode 100644 index 0000000000..f1ab5afd17 --- /dev/null +++ b/python/mozbuild/mozbuild/test/action/test_node.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- + +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +import os +import unittest + +import buildconfig +import mozpack.path as mozpath +import mozunit + +from mozbuild.action.node import SCRIPT_ALLOWLIST, generate +from mozbuild.nodeutil import find_node_executable + +test_data_path = mozpath.abspath(mozpath.dirname(__file__)) +test_data_path = mozpath.join(test_data_path, "data", "node") + + +def data(name): + return os.path.join(test_data_path, name) + + +TEST_SCRIPT = data("node-test-script.js") +NONEXISTENT_TEST_SCRIPT = data("non-existent-test-script.js") + + +class TestNode(unittest.TestCase): + """ + Tests for node.py. + """ + + def setUp(self): + if not buildconfig.substs.get("NODEJS"): + buildconfig.substs["NODEJS"] = find_node_executable()[0] + SCRIPT_ALLOWLIST.append(TEST_SCRIPT) + + def tearDown(self): + try: + SCRIPT_ALLOWLIST.remove(TEST_SCRIPT) + except Exception: + pass + + def test_generate_no_returned_deps(self): + deps = generate("dummy_argument", TEST_SCRIPT) + + self.assertSetEqual(deps, set([])) + + def test_generate_returns_passed_deps(self): + deps = generate("dummy_argument", TEST_SCRIPT, "a", "b") + + self.assertSetEqual(deps, set(["a", "b"])) + + def test_called_process_error_handled(self): + SCRIPT_ALLOWLIST.append(NONEXISTENT_TEST_SCRIPT) + + with self.assertRaises(SystemExit) as cm: + generate("dummy_arg", NONEXISTENT_TEST_SCRIPT) + + self.assertEqual(cm.exception.code, 1) + SCRIPT_ALLOWLIST.remove(NONEXISTENT_TEST_SCRIPT) + + def test_nodejs_not_set(self): + buildconfig.substs["NODEJS"] = None + + with self.assertRaises(SystemExit) as cm: + generate("dummy_arg", TEST_SCRIPT) + + self.assertEqual(cm.exception.code, 1) + + def test_generate_missing_allowlist_entry_exit_code(self): + SCRIPT_ALLOWLIST.remove(TEST_SCRIPT) + with self.assertRaises(SystemExit) as cm: + generate("dummy_arg", TEST_SCRIPT) + + self.assertEqual(cm.exception.code, 1) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/action/test_process_install_manifest.py b/python/mozbuild/mozbuild/test/action/test_process_install_manifest.py new file mode 100644 index 0000000000..3aea4bca73 --- /dev/null +++ b/python/mozbuild/mozbuild/test/action/test_process_install_manifest.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- + +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +import os + +import mozunit +from mozpack.manifests import InstallManifest +from mozpack.test.test_files import TestWithTmpDir + +import mozbuild.action.process_install_manifest as process_install_manifest + + +class TestGenerateManifest(TestWithTmpDir): + """ + Unit tests for process_install_manifest.py. + """ + + def test_process_manifest(self): + source = self.tmppath("source") + os.mkdir(source) + os.mkdir("%s/base" % source) + os.mkdir("%s/base/foo" % source) + os.mkdir("%s/base2" % source) + + with open("%s/base/foo/file1" % source, "a"): + pass + + with open("%s/base/foo/file2" % source, "a"): + pass + + with open("%s/base2/file3" % source, "a"): + pass + + m = InstallManifest() + m.add_pattern_link("%s/base" % source, "**", "") + m.add_link("%s/base2/file3" % source, "foo/file3") + + p = self.tmppath("m") + m.write(path=p) + + dest = self.tmppath("dest") + track = self.tmppath("track") + + for i in range(2): + process_install_manifest.process_manifest(dest, [p], track) + + self.assertTrue(os.path.exists(self.tmppath("dest/foo/file1"))) + self.assertTrue(os.path.exists(self.tmppath("dest/foo/file2"))) + self.assertTrue(os.path.exists(self.tmppath("dest/foo/file3"))) + + m = InstallManifest() + m.write(path=p) + + for i in range(2): + process_install_manifest.process_manifest(dest, [p], track) + + self.assertFalse(os.path.exists(self.tmppath("dest/foo/file1"))) + self.assertFalse(os.path.exists(self.tmppath("dest/foo/file2"))) + self.assertFalse(os.path.exists(self.tmppath("dest/foo/file3"))) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/backend/__init__.py b/python/mozbuild/mozbuild/test/backend/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/common.py b/python/mozbuild/mozbuild/test/backend/common.py new file mode 100644 index 0000000000..07cfa7540f --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/common.py @@ -0,0 +1,253 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import unittest +from collections import defaultdict +from shutil import rmtree +from tempfile import mkdtemp + +import mozpack.path as mozpath +from mach.logging import LoggingManager + +from mozbuild.backend.configenvironment import ConfigEnvironment +from mozbuild.frontend.emitter import TreeMetadataEmitter +from mozbuild.frontend.reader import BuildReader + +log_manager = LoggingManager() +log_manager.add_terminal_logging() + + +test_data_path = mozpath.abspath(mozpath.dirname(__file__)) +test_data_path = mozpath.join(test_data_path, "data") + + +CONFIGS = defaultdict( + lambda: { + "defines": {}, + "substs": {"OS_TARGET": "WINNT"}, + }, + { + "binary-components": { + "defines": {}, + "substs": { + "LIB_PREFIX": "lib", + "LIB_SUFFIX": "a", + "COMPILE_ENVIRONMENT": "1", + }, + }, + "database": { + "defines": {}, + "substs": { + "CC": "clang", + "CXX": "clang++", + "LIB_PREFIX": "lib", + "LIB_SUFFIX": "a", + }, + }, + "rust-library": { + "defines": {}, + "substs": { + "COMPILE_ENVIRONMENT": "1", + "RUST_TARGET": "x86_64-unknown-linux-gnu", + "LIB_PREFIX": "lib", + "LIB_SUFFIX": "a", + }, + }, + "host-rust-library": { + "defines": {}, + "substs": { + "COMPILE_ENVIRONMENT": "1", + "RUST_HOST_TARGET": "x86_64-unknown-linux-gnu", + "RUST_TARGET": "armv7-linux-androideabi", + "LIB_PREFIX": "lib", + "LIB_SUFFIX": "a", + }, + }, + "host-rust-library-features": { + "defines": {}, + "substs": { + "COMPILE_ENVIRONMENT": "1", + "RUST_HOST_TARGET": "x86_64-unknown-linux-gnu", + "RUST_TARGET": "armv7-linux-androideabi", + "LIB_PREFIX": "lib", + "LIB_SUFFIX": "a", + }, + }, + "rust-library-features": { + "defines": {}, + "substs": { + "COMPILE_ENVIRONMENT": "1", + "RUST_TARGET": "x86_64-unknown-linux-gnu", + "LIB_PREFIX": "lib", + "LIB_SUFFIX": "a", + }, + }, + "rust-programs": { + "defines": {}, + "substs": { + "COMPILE_ENVIRONMENT": "1", + "RUST_TARGET": "i686-pc-windows-msvc", + "RUST_HOST_TARGET": "i686-pc-windows-msvc", + "BIN_SUFFIX": ".exe", + "HOST_BIN_SUFFIX": ".exe", + }, + }, + "test-support-binaries-tracked": { + "defines": {}, + "substs": { + "COMPILE_ENVIRONMENT": "1", + "LIB_SUFFIX": "dll", + "BIN_SUFFIX": ".exe", + }, + }, + "sources": { + "defines": {}, + "substs": { + "LIB_PREFIX": "lib", + "LIB_SUFFIX": "a", + }, + }, + "stub0": { + "defines": { + "MOZ_TRUE_1": "1", + "MOZ_TRUE_2": "1", + }, + "substs": { + "MOZ_FOO": "foo", + "MOZ_BAR": "bar", + }, + }, + "substitute_config_files": { + "defines": {}, + "substs": { + "MOZ_FOO": "foo", + "MOZ_BAR": "bar", + }, + }, + "test_config": { + "defines": { + "foo": "baz qux", + "baz": 1, + }, + "substs": { + "foo": "bar baz", + }, + }, + "visual-studio": { + "defines": {}, + "substs": { + "MOZ_APP_NAME": "my_app", + }, + }, + "prog-lib-c-only": { + "defines": {}, + "substs": { + "COMPILE_ENVIRONMENT": "1", + "LIB_SUFFIX": ".a", + "BIN_SUFFIX": "", + }, + }, + "gn-processor": { + "defines": {}, + "substs": { + "BUILD_BACKENDS": [ + "GnMozbuildWriter", + "RecursiveMake", + ], + "COMPILE_ENVIRONMENT": "1", + "STL_FLAGS": [], + "RUST_TARGET": "x86_64-unknown-linux-gnu", + "LIB_PREFIX": "lib", + "LIB_SUFFIX": "a", + "OS_TARGET": "Darwin", + }, + }, + "ipdl_sources": { + "defines": {}, + "substs": { + "COMPILE_ENVIRONMENT": "1", + "LIB_SUFFIX": ".a", + "BIN_SUFFIX": "", + }, + }, + "program-paths": { + "defines": {}, + "substs": { + "COMPILE_ENVIRONMENT": "1", + "BIN_SUFFIX": ".prog", + }, + }, + "linkage": { + "defines": {}, + "substs": { + "CC_TYPE": "clang", + "COMPILE_ENVIRONMENT": "1", + "LIB_SUFFIX": "a", + "BIN_SUFFIX": ".exe", + "DLL_SUFFIX": ".so", + "OBJ_SUFFIX": "o", + "EXPAND_LIBS_LIST_STYLE": "list", + }, + }, + }, +) + + +class BackendTester(unittest.TestCase): + def setUp(self): + self._old_env = dict(os.environ) + os.environ.pop("MOZ_OBJDIR", None) + + def tearDown(self): + os.environ.clear() + os.environ.update(self._old_env) + + def _get_environment(self, name): + """Obtain a new instance of a ConfigEnvironment for a known profile. + + A new temporary object directory is created for the environment. The + environment is cleaned up automatically when the test finishes. + """ + config = CONFIGS[name] + config["substs"]["MOZ_UI_LOCALE"] = "en-US" + + srcdir = mozpath.join(test_data_path, name) + config["substs"]["top_srcdir"] = srcdir + + # Create the objdir in the srcdir to ensure that they share the + # same drive on Windows. + objdir = mkdtemp(dir=srcdir) + self.addCleanup(rmtree, objdir) + + return ConfigEnvironment(srcdir, objdir, **config) + + def _emit(self, name, env=None): + env = env or self._get_environment(name) + reader = BuildReader(env) + emitter = TreeMetadataEmitter(env) + + return env, emitter.emit(reader.read_topsrcdir()) + + def _consume(self, name, cls, env=None): + env, objs = self._emit(name, env=env) + backend = cls(env) + backend.consume(objs) + + return env + + def _tree_paths(self, topdir, filename): + for dirpath, dirnames, filenames in os.walk(topdir): + for f in filenames: + if f == filename: + yield mozpath.relpath(mozpath.join(dirpath, f), topdir) + + def _mozbuild_paths(self, env): + return self._tree_paths(env.topsrcdir, "moz.build") + + def _makefile_in_paths(self, env): + return self._tree_paths(env.topsrcdir, "Makefile.in") + + +__all__ = ["BackendTester"] diff --git a/python/mozbuild/mozbuild/test/backend/data/build/app/moz.build b/python/mozbuild/mozbuild/test/backend/data/build/app/moz.build new file mode 100644 index 0000000000..27641b2080 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/app/moz.build @@ -0,0 +1,54 @@ +DIST_SUBDIR = "app" + +EXTRA_JS_MODULES += [ + "../foo.jsm", +] + +EXTRA_JS_MODULES.child += [ + "../bar.jsm", +] + +EXTRA_PP_JS_MODULES += [ + "../baz.jsm", +] + +EXTRA_PP_JS_MODULES.child2 += [ + "../qux.jsm", +] + +FINAL_TARGET_FILES += [ + "../foo.ini", +] + +FINAL_TARGET_FILES.child += [ + "../bar.ini", +] + +FINAL_TARGET_PP_FILES += [ + "../baz.ini", + "../foo.css", +] + +FINAL_TARGET_PP_FILES.child2 += [ + "../qux.ini", +] + +EXTRA_COMPONENTS += [ + "../components.manifest", + "../foo.js", +] + +EXTRA_PP_COMPONENTS += [ + "../bar.js", +] + +JS_PREFERENCE_FILES += [ + "../prefs.js", +] + +JAR_MANIFESTS += [ + "../jar.mn", +] + +DEFINES["FOO"] = "bar" +DEFINES["BAR"] = True diff --git a/python/mozbuild/mozbuild/test/backend/data/build/bar.ini b/python/mozbuild/mozbuild/test/backend/data/build/bar.ini new file mode 100644 index 0000000000..91dcbe1536 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/bar.ini @@ -0,0 +1 @@ +bar.ini diff --git a/python/mozbuild/mozbuild/test/backend/data/build/bar.js b/python/mozbuild/mozbuild/test/backend/data/build/bar.js new file mode 100644 index 0000000000..1a608e8a56 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/bar.js @@ -0,0 +1,2 @@ +#filter substitution +bar.js: FOO is @FOO@ diff --git a/python/mozbuild/mozbuild/test/backend/data/build/bar.jsm b/python/mozbuild/mozbuild/test/backend/data/build/bar.jsm new file mode 100644 index 0000000000..05db2e2f6a --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/bar.jsm @@ -0,0 +1 @@ +bar.jsm diff --git a/python/mozbuild/mozbuild/test/backend/data/build/baz.ini b/python/mozbuild/mozbuild/test/backend/data/build/baz.ini new file mode 100644 index 0000000000..975a1e437d --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/baz.ini @@ -0,0 +1,2 @@ +#filter substitution +baz.ini: FOO is @FOO@ diff --git a/python/mozbuild/mozbuild/test/backend/data/build/baz.jsm b/python/mozbuild/mozbuild/test/backend/data/build/baz.jsm new file mode 100644 index 0000000000..f39ed02082 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/baz.jsm @@ -0,0 +1,2 @@ +#filter substitution +baz.jsm: FOO is @FOO@ diff --git a/python/mozbuild/mozbuild/test/backend/data/build/components.manifest b/python/mozbuild/mozbuild/test/backend/data/build/components.manifest new file mode 100644 index 0000000000..b5bb87254c --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/components.manifest @@ -0,0 +1,2 @@ +component {foo} foo.js +component {bar} bar.js diff --git a/python/mozbuild/mozbuild/test/backend/data/build/foo.css b/python/mozbuild/mozbuild/test/backend/data/build/foo.css new file mode 100644 index 0000000000..1803d6c572 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/foo.css @@ -0,0 +1,2 @@ +%filter substitution +foo.css: FOO is @FOO@ diff --git a/python/mozbuild/mozbuild/test/backend/data/build/foo.ini b/python/mozbuild/mozbuild/test/backend/data/build/foo.ini new file mode 100644 index 0000000000..c93c9d7658 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/foo.ini @@ -0,0 +1 @@ +foo.ini diff --git a/python/mozbuild/mozbuild/test/backend/data/build/foo.js b/python/mozbuild/mozbuild/test/backend/data/build/foo.js new file mode 100644 index 0000000000..4fa71e2d27 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/foo.js @@ -0,0 +1 @@ +foo.js diff --git a/python/mozbuild/mozbuild/test/backend/data/build/foo.jsm b/python/mozbuild/mozbuild/test/backend/data/build/foo.jsm new file mode 100644 index 0000000000..d58fd61c16 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/foo.jsm @@ -0,0 +1 @@ +foo.jsm diff --git a/python/mozbuild/mozbuild/test/backend/data/build/jar.mn b/python/mozbuild/mozbuild/test/backend/data/build/jar.mn new file mode 100644 index 0000000000..393055c4ea --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/jar.mn @@ -0,0 +1,11 @@ +foo.jar: +% content bar %child/ +% content foo % + foo.js +* foo.css + bar.js (subdir/bar.js) + qux.js (subdir/bar.js) +* child/hoge.js (bar.js) +* child/baz.jsm + +% override chrome://foo/bar.svg#hello chrome://bar/bar.svg#hello diff --git a/python/mozbuild/mozbuild/test/backend/data/build/moz.build b/python/mozbuild/mozbuild/test/backend/data/build/moz.build new file mode 100644 index 0000000000..700516754d --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/moz.build @@ -0,0 +1,68 @@ +CONFIGURE_SUBST_FILES += [ + "/config/autoconf.mk", + "/config/emptyvars.mk", +] + +EXTRA_JS_MODULES += [ + "foo.jsm", +] + +EXTRA_JS_MODULES.child += [ + "bar.jsm", +] + +EXTRA_PP_JS_MODULES += [ + "baz.jsm", +] + +EXTRA_PP_JS_MODULES.child2 += [ + "qux.jsm", +] + +FINAL_TARGET_FILES += [ + "foo.ini", +] + +FINAL_TARGET_FILES.child += [ + "bar.ini", +] + +FINAL_TARGET_PP_FILES += [ + "baz.ini", +] + +FINAL_TARGET_PP_FILES.child2 += [ + "foo.css", + "qux.ini", +] + +EXTRA_COMPONENTS += [ + "components.manifest", + "foo.js", +] + +EXTRA_PP_COMPONENTS += [ + "bar.js", +] + +JS_PREFERENCE_FILES += [ + "prefs.js", +] + +RESOURCE_FILES += [ + "resource", +] + +RESOURCE_FILES.child += [ + "resource2", +] + +DEFINES["FOO"] = "foo" + +JAR_MANIFESTS += [ + "jar.mn", +] + +DIRS += [ + "app", +] diff --git a/python/mozbuild/mozbuild/test/backend/data/build/prefs.js b/python/mozbuild/mozbuild/test/backend/data/build/prefs.js new file mode 100644 index 0000000000..a030da9fd7 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/prefs.js @@ -0,0 +1 @@ +prefs.js diff --git a/python/mozbuild/mozbuild/test/backend/data/build/qux.ini b/python/mozbuild/mozbuild/test/backend/data/build/qux.ini new file mode 100644 index 0000000000..3ce157eb6d --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/qux.ini @@ -0,0 +1,5 @@ +#ifdef BAR +qux.ini: BAR is defined +#else +qux.ini: BAR is not defined +#endif diff --git a/python/mozbuild/mozbuild/test/backend/data/build/qux.jsm b/python/mozbuild/mozbuild/test/backend/data/build/qux.jsm new file mode 100644 index 0000000000..9c5fe28d58 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/qux.jsm @@ -0,0 +1,5 @@ +#ifdef BAR +qux.jsm: BAR is defined +#else +qux.jsm: BAR is not defined +#endif diff --git a/python/mozbuild/mozbuild/test/backend/data/build/resource b/python/mozbuild/mozbuild/test/backend/data/build/resource new file mode 100644 index 0000000000..91e75c679e --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/resource @@ -0,0 +1 @@ +resource diff --git a/python/mozbuild/mozbuild/test/backend/data/build/resource2 b/python/mozbuild/mozbuild/test/backend/data/build/resource2 new file mode 100644 index 0000000000..b7c2700964 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/resource2 @@ -0,0 +1 @@ +resource2 diff --git a/python/mozbuild/mozbuild/test/backend/data/build/subdir/bar.js b/python/mozbuild/mozbuild/test/backend/data/build/subdir/bar.js new file mode 100644 index 0000000000..80c887a84a --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/build/subdir/bar.js @@ -0,0 +1 @@ +bar.js diff --git a/python/mozbuild/mozbuild/test/backend/data/database/bar.c b/python/mozbuild/mozbuild/test/backend/data/database/bar.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/database/baz.cpp b/python/mozbuild/mozbuild/test/backend/data/database/baz.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/database/build/non-unified-compat b/python/mozbuild/mozbuild/test/backend/data/database/build/non-unified-compat new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/database/foo.c b/python/mozbuild/mozbuild/test/backend/data/database/foo.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/database/moz.build b/python/mozbuild/mozbuild/test/backend/data/database/moz.build new file mode 100644 index 0000000000..ebc5d05b5c --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/database/moz.build @@ -0,0 +1,14 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + +SOURCES = ["bar.c", "baz.cpp", "foo.c", "qux.cpp"] diff --git a/python/mozbuild/mozbuild/test/backend/data/database/qux.cpp b/python/mozbuild/mozbuild/test/backend/data/database/qux.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/defines/moz.build b/python/mozbuild/mozbuild/test/backend/data/defines/moz.build new file mode 100644 index 0000000000..b603cac3ff --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/defines/moz.build @@ -0,0 +1,9 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +value = "xyz" +DEFINES["FOO"] = True +DEFINES["BAZ"] = '"ab\'cd"' +DEFINES["QUX"] = False +DEFINES["BAR"] = 7 +DEFINES["VALUE"] = value diff --git a/python/mozbuild/mozbuild/test/backend/data/dist-files/install.rdf b/python/mozbuild/mozbuild/test/backend/data/dist-files/install.rdf new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/dist-files/main.js b/python/mozbuild/mozbuild/test/backend/data/dist-files/main.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/dist-files/moz.build b/python/mozbuild/mozbuild/test/backend/data/dist-files/moz.build new file mode 100644 index 0000000000..25961f149f --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/dist-files/moz.build @@ -0,0 +1,8 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +FINAL_TARGET_PP_FILES += [ + "install.rdf", + "main.js", +] diff --git a/python/mozbuild/mozbuild/test/backend/data/exports-generated/dom1.h b/python/mozbuild/mozbuild/test/backend/data/exports-generated/dom1.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/exports-generated/foo.h b/python/mozbuild/mozbuild/test/backend/data/exports-generated/foo.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/exports-generated/gfx.h b/python/mozbuild/mozbuild/test/backend/data/exports-generated/gfx.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/exports-generated/moz.build b/python/mozbuild/mozbuild/test/backend/data/exports-generated/moz.build new file mode 100644 index 0000000000..44c31a3d9c --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/exports-generated/moz.build @@ -0,0 +1,12 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +EXPORTS += ["!bar.h", "foo.h"] +EXPORTS.mozilla += ["!mozilla2.h", "mozilla1.h"] +EXPORTS.mozilla.dom += ["!dom2.h", "!dom3.h", "dom1.h"] +EXPORTS.gfx += ["gfx.h"] + +GENERATED_FILES += ["bar.h"] +GENERATED_FILES += ["mozilla2.h"] +GENERATED_FILES += ["dom2.h"] +GENERATED_FILES += ["dom3.h"] diff --git a/python/mozbuild/mozbuild/test/backend/data/exports-generated/mozilla1.h b/python/mozbuild/mozbuild/test/backend/data/exports-generated/mozilla1.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/dom1.h b/python/mozbuild/mozbuild/test/backend/data/exports/dom1.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/dom2.h b/python/mozbuild/mozbuild/test/backend/data/exports/dom2.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/foo.h b/python/mozbuild/mozbuild/test/backend/data/exports/foo.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/gfx.h b/python/mozbuild/mozbuild/test/backend/data/exports/gfx.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/moz.build b/python/mozbuild/mozbuild/test/backend/data/exports/moz.build new file mode 100644 index 0000000000..371f26f572 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/exports/moz.build @@ -0,0 +1,8 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +EXPORTS += ["foo.h"] +EXPORTS.mozilla += ["mozilla1.h", "mozilla2.h"] +EXPORTS.mozilla.dom += ["dom1.h", "dom2.h"] +EXPORTS.mozilla.gfx += ["gfx.h"] +EXPORTS.nspr.private += ["pprio.h"] diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/mozilla1.h b/python/mozbuild/mozbuild/test/backend/data/exports/mozilla1.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/mozilla2.h b/python/mozbuild/mozbuild/test/backend/data/exports/mozilla2.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/pprio.h b/python/mozbuild/mozbuild/test/backend/data/exports/pprio.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/bar.xyz b/python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/bar.xyz new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/foo.xyz b/python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/foo.xyz new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/moz.build b/python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/moz.build new file mode 100644 index 0000000000..d665855234 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/moz.build @@ -0,0 +1,5 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +FINAL_TARGET_FILES.foo += ["*.xyz"] diff --git a/python/mozbuild/mozbuild/test/backend/data/final_target/both/moz.build b/python/mozbuild/mozbuild/test/backend/data/final_target/both/moz.build new file mode 100644 index 0000000000..dfbda9183b --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/final_target/both/moz.build @@ -0,0 +1,6 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +XPI_NAME = "mycrazyxpi" +DIST_SUBDIR = "asubdir" diff --git a/python/mozbuild/mozbuild/test/backend/data/final_target/dist-subdir/moz.build b/python/mozbuild/mozbuild/test/backend/data/final_target/dist-subdir/moz.build new file mode 100644 index 0000000000..e44dd197ad --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/final_target/dist-subdir/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIST_SUBDIR = "asubdir" diff --git a/python/mozbuild/mozbuild/test/backend/data/final_target/final-target/moz.build b/python/mozbuild/mozbuild/test/backend/data/final_target/final-target/moz.build new file mode 100644 index 0000000000..e008f94478 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/final_target/final-target/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +FINAL_TARGET = "random-final-target" diff --git a/python/mozbuild/mozbuild/test/backend/data/final_target/moz.build b/python/mozbuild/mozbuild/test/backend/data/final_target/moz.build new file mode 100644 index 0000000000..319062b78f --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/final_target/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS += ["xpi-name", "dist-subdir", "both", "final-target"] diff --git a/python/mozbuild/mozbuild/test/backend/data/final_target/xpi-name/moz.build b/python/mozbuild/mozbuild/test/backend/data/final_target/xpi-name/moz.build new file mode 100644 index 0000000000..980810caa3 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/final_target/xpi-name/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +XPI_NAME = "mycrazyxpi" diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files-force/foo-data b/python/mozbuild/mozbuild/test/backend/data/generated-files-force/foo-data new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files-force/generate-bar.py b/python/mozbuild/mozbuild/test/backend/data/generated-files-force/generate-bar.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files-force/generate-foo.py b/python/mozbuild/mozbuild/test/backend/data/generated-files-force/generate-foo.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files-force/moz.build b/python/mozbuild/mozbuild/test/backend/data/generated-files-force/moz.build new file mode 100644 index 0000000000..d86b7b09ea --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/generated-files-force/moz.build @@ -0,0 +1,14 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +GENERATED_FILES += ["bar.c", "foo.c", "quux.c"] + +bar = GENERATED_FILES["bar.c"] +bar.script = "generate-bar.py:baz" +bar.force = True + +foo = GENERATED_FILES["foo.c"] +foo.script = "generate-foo.py" +foo.inputs = ["foo-data"] +foo.force = False diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files/foo-data b/python/mozbuild/mozbuild/test/backend/data/generated-files/foo-data new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files/generate-bar.py b/python/mozbuild/mozbuild/test/backend/data/generated-files/generate-bar.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files/generate-foo.py b/python/mozbuild/mozbuild/test/backend/data/generated-files/generate-foo.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files/moz.build b/python/mozbuild/mozbuild/test/backend/data/generated-files/moz.build new file mode 100644 index 0000000000..01b444238e --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/generated-files/moz.build @@ -0,0 +1,12 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +GENERATED_FILES += ["bar.c", "foo.h", "quux.c"] + +bar = GENERATED_FILES["bar.c"] +bar.script = "generate-bar.py:baz" + +foo = GENERATED_FILES["foo.h"] +foo.script = "generate-foo.py" +foo.inputs = ["foo-data"] diff --git a/python/mozbuild/mozbuild/test/backend/data/generated_includes/moz.build b/python/mozbuild/mozbuild/test/backend/data/generated_includes/moz.build new file mode 100644 index 0000000000..31f9042c0a --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/generated_includes/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCAL_INCLUDES += ["!/bar/baz", "!foo"] diff --git a/python/mozbuild/mozbuild/test/backend/data/host-defines/moz.build b/python/mozbuild/mozbuild/test/backend/data/host-defines/moz.build new file mode 100644 index 0000000000..f1a632c841 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/host-defines/moz.build @@ -0,0 +1,9 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +value = "xyz" +HOST_DEFINES["FOO"] = True +HOST_DEFINES["BAZ"] = '"ab\'cd"' +HOST_DEFINES["BAR"] = 7 +HOST_DEFINES["VALUE"] = value +HOST_DEFINES["QUX"] = False diff --git a/python/mozbuild/mozbuild/test/backend/data/host-rust-library-features/Cargo.toml b/python/mozbuild/mozbuild/test/backend/data/host-rust-library-features/Cargo.toml new file mode 100644 index 0000000000..147cb3acb3 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/host-rust-library-features/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "hostrusttool" +version = "0.1.0" +authors = ["The Mozilla Project Developers"] + +[lib] +crate-type = ["staticlib"] + +[profile.dev] +panic = "abort" + +[profile.release] +panic = "abort" diff --git a/python/mozbuild/mozbuild/test/backend/data/host-rust-library-features/moz.build b/python/mozbuild/mozbuild/test/backend/data/host-rust-library-features/moz.build new file mode 100644 index 0000000000..96fccf2063 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/host-rust-library-features/moz.build @@ -0,0 +1,22 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def HostLibrary(name): + """Template for libraries.""" + HOST_LIBRARY_NAME = name + + +@template +def HostRustLibrary(name, features=None): + """Template for Rust libraries.""" + HostLibrary(name) + + IS_RUST_LIBRARY = True + + if features: + HOST_RUST_LIBRARY_FEATURES = features + + +HostRustLibrary("hostrusttool", ["musthave", "cantlivewithout"]) diff --git a/python/mozbuild/mozbuild/test/backend/data/host-rust-library/Cargo.toml b/python/mozbuild/mozbuild/test/backend/data/host-rust-library/Cargo.toml new file mode 100644 index 0000000000..349664c621 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/host-rust-library/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "hostrusttool" +version = "0.1.0" +authors = [ + "The Mozilla Project Developers", +] + +[lib] +crate-type = ["staticlib"] + +[profile.dev] +panic = "abort" + +[profile.release] +panic = "abort" diff --git a/python/mozbuild/mozbuild/test/backend/data/host-rust-library/moz.build b/python/mozbuild/mozbuild/test/backend/data/host-rust-library/moz.build new file mode 100644 index 0000000000..515f5d1a9f --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/host-rust-library/moz.build @@ -0,0 +1,22 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def HostLibrary(name): + """Template for libraries.""" + HOST_LIBRARY_NAME = name + + +@template +def HostRustLibrary(name, features=None): + """Template for Rust libraries.""" + HostLibrary(name) + + IS_RUST_LIBRARY = True + + if features: + HOST_RUST_LIBRARY_FEATURES = features + + +HostRustLibrary("hostrusttool") diff --git a/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/moz.build b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/moz.build new file mode 100644 index 0000000000..c38b472911 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/moz.build @@ -0,0 +1,6 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +# We want to test recursion into the subdir, so do the real work in 'sub' +DIRS += ["sub"] diff --git a/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/foo.h.in b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/foo.h.in new file mode 100644 index 0000000000..da287dfcaa --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/foo.h.in @@ -0,0 +1 @@ +#define MOZ_FOO @MOZ_FOO@ diff --git a/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/moz.build b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/moz.build new file mode 100644 index 0000000000..1420a99a8f --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/moz.build @@ -0,0 +1,7 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +CONFIGURE_SUBST_FILES = ["foo.h"] + +EXPORTS.out += ["!foo.h"] diff --git a/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/bar/moz.build b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/bar/moz.build new file mode 100644 index 0000000000..f7d1560af3 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/bar/moz.build @@ -0,0 +1,16 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +PREPROCESSED_IPDL_SOURCES += [ + "bar1.ipdl", +] + +IPDL_SOURCES += [ + "bar.ipdl", + "bar2.ipdlh", +] + +FINAL_LIBRARY = "dummy" diff --git a/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/foo/moz.build b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/foo/moz.build new file mode 100644 index 0000000000..02e9f78154 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/foo/moz.build @@ -0,0 +1,16 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +PREPROCESSED_IPDL_SOURCES += [ + "foo1.ipdl", +] + +IPDL_SOURCES += [ + "foo.ipdl", + "foo2.ipdlh", +] + +FINAL_LIBRARY = "dummy" diff --git a/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/ipdl/moz.build b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/ipdl/moz.build new file mode 100644 index 0000000000..066397cb84 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/ipdl/moz.build @@ -0,0 +1,9 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# This file just exists to establish a directory as the IPDL root directory. + +FINAL_LIBRARY = "dummy" diff --git a/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/moz.build b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/moz.build new file mode 100644 index 0000000000..4f0ddaa10e --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/moz.build @@ -0,0 +1,19 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +@template +def Library(name): + LIBRARY_NAME = name + + +Library("dummy") + +DIRS += [ + "bar", + "foo", + "ipdl", +] diff --git a/python/mozbuild/mozbuild/test/backend/data/jar-manifests/moz.build b/python/mozbuild/mozbuild/test/backend/data/jar-manifests/moz.build new file mode 100644 index 0000000000..d988c0ff9b --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/jar-manifests/moz.build @@ -0,0 +1,7 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +JAR_MANIFESTS += ["jar.mn"] diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/moz.build new file mode 100644 index 0000000000..f01a012760 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/linkage/moz.build @@ -0,0 +1,11 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +include("templates.mozbuild") + +DIRS += [ + "real", + "shared", + "prog", + "static", +] diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/prog/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/prog/moz.build new file mode 100644 index 0000000000..3741f4be09 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/linkage/prog/moz.build @@ -0,0 +1,11 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS += ["qux"] + +Program("MyProgram") + +USE_LIBS += [ + "bar", + "baz", +] diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/prog/qux/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/prog/qux/moz.build new file mode 100644 index 0000000000..3152de6211 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/linkage/prog/qux/moz.build @@ -0,0 +1,6 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +SOURCES += ["qux1.c"] + +SharedLibrary("qux") diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/prog/qux/qux1.c b/python/mozbuild/mozbuild/test/backend/data/linkage/prog/qux/qux1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/foo1.c b/python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/foo1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/foo2.c b/python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/foo2.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/moz.build new file mode 100644 index 0000000000..a0bd7526e6 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/moz.build @@ -0,0 +1,6 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +SOURCES += ["foo1.c", "foo2.c"] + +FINAL_LIBRARY = "foo" diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/real/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/real/moz.build new file mode 100644 index 0000000000..32f9c1d656 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/linkage/real/moz.build @@ -0,0 +1,14 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS += [ + "foo", +] + +NO_EXPAND_LIBS = True + +OS_LIBS += ["-lbaz"] + +USE_LIBS += ["static:baz"] + +Library("foo") diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/shared/baz/baz1.c b/python/mozbuild/mozbuild/test/backend/data/linkage/shared/baz/baz1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/shared/baz/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/shared/baz/moz.build new file mode 100644 index 0000000000..3299fa28f4 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/linkage/shared/baz/moz.build @@ -0,0 +1,6 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +SOURCES += ["baz1.c"] + +FINAL_LIBRARY = "baz" diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/shared/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/shared/moz.build new file mode 100644 index 0000000000..42d79fe1fd --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/linkage/shared/moz.build @@ -0,0 +1,14 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS += [ + "baz", +] + +STATIC_LIBRARY_NAME = "baz_s" +FORCE_STATIC_LIB = True + +OS_LIBS += ["-lfoo"] +USE_LIBS += ["qux"] + +SharedLibrary("baz") diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar1.cc b/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar1.cc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar2.cc b/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar2.cc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar_helper/bar_helper1.cpp b/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar_helper/bar_helper1.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar_helper/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar_helper/moz.build new file mode 100644 index 0000000000..12d0fc83fb --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar_helper/moz.build @@ -0,0 +1,8 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +SOURCES += [ + "bar_helper1.cpp", +] + +FINAL_LIBRARY = "bar" diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/moz.build new file mode 100644 index 0000000000..d9d75803ed --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/moz.build @@ -0,0 +1,13 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +SOURCES += [ + "bar1.cc", + "bar2.cc", +] + +DIRS += [ + "bar_helper", +] + +FINAL_LIBRARY = "bar" diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/static/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/static/moz.build new file mode 100644 index 0000000000..37b3d96cc7 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/linkage/static/moz.build @@ -0,0 +1,12 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS += [ + "bar", +] + +USE_LIBS += ["foo"] + +OS_LIBS += ["-lbar"] + +Library("bar") diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/templates.mozbuild b/python/mozbuild/mozbuild/test/backend/data/linkage/templates.mozbuild new file mode 100644 index 0000000000..1f874060df --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/linkage/templates.mozbuild @@ -0,0 +1,23 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +@template +def Library(name): + LIBRARY_NAME = name + +@template +def SharedLibrary(name): + FORCE_SHARED_LIB = True + LIBRARY_NAME = name + +@template +def Binary(): + # Add -lfoo for testing purposes. + OS_LIBS += ['foo'] + + +@template +def Program(name): + PROGRAM = name + + Binary() \ No newline at end of file diff --git a/python/mozbuild/mozbuild/test/backend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory b/python/mozbuild/mozbuild/test/backend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/local_includes/foo/dummy_file_for_nonempty_directory b/python/mozbuild/mozbuild/test/backend/data/local_includes/foo/dummy_file_for_nonempty_directory new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/local_includes/moz.build b/python/mozbuild/mozbuild/test/backend/data/local_includes/moz.build new file mode 100644 index 0000000000..1c29ac2ea2 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/local_includes/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCAL_INCLUDES += ["/bar/baz", "foo"] diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-files/en-US/bar.ini b/python/mozbuild/mozbuild/test/backend/data/localized-files/en-US/bar.ini new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-files/en-US/foo.js b/python/mozbuild/mozbuild/test/backend/data/localized-files/en-US/foo.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-files/moz.build b/python/mozbuild/mozbuild/test/backend/data/localized-files/moz.build new file mode 100644 index 0000000000..93a97c7b84 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/localized-files/moz.build @@ -0,0 +1,9 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCALIZED_FILES += [ + "en-US/abc/*.abc", + "en-US/bar.ini", + "en-US/foo.js", +] diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/en-US/localized-input b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/en-US/localized-input new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/foo-data b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/foo-data new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/generate-foo.py b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/generate-foo.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/inner/locales/en-US/localized-input b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/inner/locales/en-US/localized-input new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/locales/en-US/localized-input b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/locales/en-US/localized-input new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/moz.build b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/moz.build new file mode 100644 index 0000000000..2b0cf472c9 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/moz.build @@ -0,0 +1,32 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCALIZED_GENERATED_FILES += ["foo{AB_CD}.xyz"] + +foo = LOCALIZED_GENERATED_FILES["foo{AB_CD}.xyz"] +foo.script = "generate-foo.py" +foo.inputs = [ + "en-US/localized-input", + "non-localized-input", +] + +LOCALIZED_GENERATED_FILES += ["bar{AB_rCD}.xyz"] + +bar = LOCALIZED_GENERATED_FILES["bar{AB_rCD}.xyz"] +bar.script = "generate-foo.py" +bar.inputs = [ + # Absolute source path. + "/inner/locales/en-US/localized-input", + "non-localized-input", +] + +LOCALIZED_GENERATED_FILES += ["zot{AB_rCD}.xyz"] + +bar = LOCALIZED_GENERATED_FILES["zot{AB_rCD}.xyz"] +bar.script = "generate-foo.py" +bar.inputs = [ + # Relative source path. + "locales/en-US/localized-input", + "non-localized-input", +] diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/non-localized-input b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/non-localized-input new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/en-US/localized-input b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/en-US/localized-input new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/foo-data b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/foo-data new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/generate-foo.py b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/generate-foo.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/moz.build b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/moz.build new file mode 100644 index 0000000000..26fb165e06 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/moz.build @@ -0,0 +1,22 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCALIZED_GENERATED_FILES += ["foo.xyz"] + +foo = LOCALIZED_GENERATED_FILES["foo.xyz"] +foo.script = "generate-foo.py" +foo.inputs = [ + "en-US/localized-input", + "non-localized-input", +] + +LOCALIZED_GENERATED_FILES += ["abc.xyz"] + +abc = LOCALIZED_GENERATED_FILES["abc.xyz"] +abc.script = "generate-foo.py" +abc.inputs = [ + "en-US/localized-input", + "non-localized-input", +] +abc.force = True diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/non-localized-input b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/non-localized-input new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/en-US/localized-input b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/en-US/localized-input new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/foo-data b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/foo-data new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/generate-foo.py b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/generate-foo.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/moz.build b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/moz.build new file mode 100644 index 0000000000..f44325dfb1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/moz.build @@ -0,0 +1,15 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCALIZED_GENERATED_FILES += ["foo.xyz"] + +foo = LOCALIZED_GENERATED_FILES["foo.xyz"] +foo.script = "generate-foo.py" +foo.inputs = [ + "en-US/localized-input", + "non-localized-input", +] + +# Also check that using it in LOCALIZED_FILES does the right thing. +LOCALIZED_FILES += ["!foo.xyz"] diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/non-localized-input b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/non-localized-input new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-pp-files/en-US/bar.ini b/python/mozbuild/mozbuild/test/backend/data/localized-pp-files/en-US/bar.ini new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-pp-files/en-US/foo.js b/python/mozbuild/mozbuild/test/backend/data/localized-pp-files/en-US/foo.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-pp-files/moz.build b/python/mozbuild/mozbuild/test/backend/data/localized-pp-files/moz.build new file mode 100644 index 0000000000..8cec207128 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/localized-pp-files/moz.build @@ -0,0 +1,8 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCALIZED_PP_FILES += [ + "en-US/bar.ini", + "en-US/foo.js", +] diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-library/c-library.c b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-library/c-library.c new file mode 100644 index 0000000000..3b09e769db --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-library/c-library.c @@ -0,0 +1,2 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/publicdomain/zero/1.0/ diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-library/moz.build b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-library/moz.build new file mode 100644 index 0000000000..8e15d10c43 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-library/moz.build @@ -0,0 +1,7 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +SharedLibrary("c_library") + +SOURCES = ["c-library.c"] diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-program/c_test_program.c b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-program/c_test_program.c new file mode 100644 index 0000000000..3b09e769db --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-program/c_test_program.c @@ -0,0 +1,2 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/publicdomain/zero/1.0/ diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-program/moz.build b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-program/moz.build new file mode 100644 index 0000000000..27f2cd3d5d --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-program/moz.build @@ -0,0 +1,7 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +Program("c_test_program") + +SOURCES = ["c_test_program.c"] diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-simple-programs/c_simple_program.c b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-simple-programs/c_simple_program.c new file mode 100644 index 0000000000..3b09e769db --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-simple-programs/c_simple_program.c @@ -0,0 +1,2 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/publicdomain/zero/1.0/ diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-simple-programs/moz.build b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-simple-programs/moz.build new file mode 100644 index 0000000000..db958d1d1f --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-simple-programs/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +SimplePrograms(["c_simple_program"], ext=".c") diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/c-source.c b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/c-source.c new file mode 100644 index 0000000000..3b09e769db --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/c-source.c @@ -0,0 +1,2 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/publicdomain/zero/1.0/ diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/cxx-library.cpp b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/cxx-library.cpp new file mode 100644 index 0000000000..3b09e769db --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/cxx-library.cpp @@ -0,0 +1,2 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/publicdomain/zero/1.0/ diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/moz.build b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/moz.build new file mode 100644 index 0000000000..ee75ad0cb9 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/moz.build @@ -0,0 +1,10 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +SharedLibrary("cxx-library") + +SOURCES = [ + "c-source.c", + "cxx-library.cpp", +] diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-program/cxx_test_program.cpp b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-program/cxx_test_program.cpp new file mode 100644 index 0000000000..3b09e769db --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-program/cxx_test_program.cpp @@ -0,0 +1,2 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/publicdomain/zero/1.0/ diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-program/moz.build b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-program/moz.build new file mode 100644 index 0000000000..175f18c88a --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-program/moz.build @@ -0,0 +1,7 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +Program("cxx_test_program") + +SOURCES = ["cxx_test_program.cpp"] diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-simple-programs/cxx_simple_program.cpp b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-simple-programs/cxx_simple_program.cpp new file mode 100644 index 0000000000..3b09e769db --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-simple-programs/cxx_simple_program.cpp @@ -0,0 +1,2 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/publicdomain/zero/1.0/ diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-simple-programs/moz.build b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-simple-programs/moz.build new file mode 100644 index 0000000000..e055370900 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-simple-programs/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +SimplePrograms(["cxx_simple_program"]) diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/moz.build b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/moz.build new file mode 100644 index 0000000000..7f0a6b430b --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/moz.build @@ -0,0 +1,35 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS += [ + "c-program", + "cxx-program", + "c-simple-programs", + "cxx-simple-programs", + "c-library", + "cxx-library", +] + + +@template +def Program(name): + PROGRAM = name + + +@template +def SimplePrograms(names, ext=".cpp"): + SIMPLE_PROGRAMS += names + SOURCES += ["%s%s" % (name, ext) for name in names] + + +@template +def Library(name): + LIBRARY_NAME = name + + +@template +def SharedLibrary(name): + Library(name) + + FORCE_SHARED_LIB = True diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/simple-programs/moz.build b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/simple-programs/moz.build new file mode 100644 index 0000000000..62966a58e1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/simple-programs/moz.build @@ -0,0 +1,3 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ diff --git a/python/mozbuild/mozbuild/test/backend/data/program-paths/dist-bin/moz.build b/python/mozbuild/mozbuild/test/backend/data/program-paths/dist-bin/moz.build new file mode 100644 index 0000000000..d8b952c014 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/program-paths/dist-bin/moz.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +Program("dist-bin") diff --git a/python/mozbuild/mozbuild/test/backend/data/program-paths/dist-subdir/moz.build b/python/mozbuild/mozbuild/test/backend/data/program-paths/dist-subdir/moz.build new file mode 100644 index 0000000000..fc2f664c01 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/program-paths/dist-subdir/moz.build @@ -0,0 +1,5 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIST_SUBDIR = "foo" +Program("dist-subdir") diff --git a/python/mozbuild/mozbuild/test/backend/data/program-paths/final-target/moz.build b/python/mozbuild/mozbuild/test/backend/data/program-paths/final-target/moz.build new file mode 100644 index 0000000000..a0d5805262 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/program-paths/final-target/moz.build @@ -0,0 +1,5 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +FINAL_TARGET = "final/target" +Program("final-target") diff --git a/python/mozbuild/mozbuild/test/backend/data/program-paths/moz.build b/python/mozbuild/mozbuild/test/backend/data/program-paths/moz.build new file mode 100644 index 0000000000..d1d087fd45 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/program-paths/moz.build @@ -0,0 +1,15 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Program(name): + PROGRAM = name + + +DIRS += [ + "dist-bin", + "dist-subdir", + "final-target", + "not-installed", +] diff --git a/python/mozbuild/mozbuild/test/backend/data/program-paths/not-installed/moz.build b/python/mozbuild/mozbuild/test/backend/data/program-paths/not-installed/moz.build new file mode 100644 index 0000000000..c725ab7326 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/program-paths/not-installed/moz.build @@ -0,0 +1,5 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIST_INSTALL = False +Program("not-installed") diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/bar.res.in b/python/mozbuild/mozbuild/test/backend/data/resources/bar.res.in new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/cursor.cur b/python/mozbuild/mozbuild/test/backend/data/resources/cursor.cur new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/desktop1.ttf b/python/mozbuild/mozbuild/test/backend/data/resources/desktop1.ttf new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/desktop2.ttf b/python/mozbuild/mozbuild/test/backend/data/resources/desktop2.ttf new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/extra.manifest b/python/mozbuild/mozbuild/test/backend/data/resources/extra.manifest new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/font1.ttf b/python/mozbuild/mozbuild/test/backend/data/resources/font1.ttf new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/font2.ttf b/python/mozbuild/mozbuild/test/backend/data/resources/font2.ttf new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/foo.res b/python/mozbuild/mozbuild/test/backend/data/resources/foo.res new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/mobile.ttf b/python/mozbuild/mozbuild/test/backend/data/resources/mobile.ttf new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/moz.build b/python/mozbuild/mozbuild/test/backend/data/resources/moz.build new file mode 100644 index 0000000000..619af26e64 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/resources/moz.build @@ -0,0 +1,9 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +RESOURCE_FILES += ["bar.res.in", "foo.res"] +RESOURCE_FILES.cursors += ["cursor.cur"] +RESOURCE_FILES.fonts += ["font1.ttf", "font2.ttf"] +RESOURCE_FILES.fonts.desktop += ["desktop1.ttf", "desktop2.ttf"] +RESOURCE_FILES.fonts.mobile += ["mobile.ttf"] +RESOURCE_FILES.tests += ["extra.manifest", "test.manifest"] diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/test.manifest b/python/mozbuild/mozbuild/test/backend/data/resources/test.manifest new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/rust-library-features/Cargo.toml b/python/mozbuild/mozbuild/test/backend/data/rust-library-features/Cargo.toml new file mode 100644 index 0000000000..0d778b2b0e --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/rust-library-features/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "feature-library" +version = "0.1.0" +authors = [ + "The Mozilla Project Developers", +] + +[lib] +crate-type = ["staticlib"] + +[profile.dev] +panic = "abort" + +[profile.release] +panic = "abort" diff --git a/python/mozbuild/mozbuild/test/backend/data/rust-library-features/moz.build b/python/mozbuild/mozbuild/test/backend/data/rust-library-features/moz.build new file mode 100644 index 0000000000..f17f29b0e7 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/rust-library-features/moz.build @@ -0,0 +1,20 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +@template +def RustLibrary(name, features): + """Template for Rust libraries.""" + Library(name) + + IS_RUST_LIBRARY = True + RUST_LIBRARY_FEATURES = features + + +RustLibrary("feature-library", ["musthave", "cantlivewithout"]) diff --git a/python/mozbuild/mozbuild/test/backend/data/rust-library/Cargo.toml b/python/mozbuild/mozbuild/test/backend/data/rust-library/Cargo.toml new file mode 100644 index 0000000000..5e9e44632f --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/rust-library/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "test-library" +version = "0.1.0" +authors = [ + "The Mozilla Project Developers", +] + +[lib] +crate-type = ["staticlib"] + +[profile.dev] +panic = "abort" + +[profile.release] +panic = "abort" diff --git a/python/mozbuild/mozbuild/test/backend/data/rust-library/moz.build b/python/mozbuild/mozbuild/test/backend/data/rust-library/moz.build new file mode 100644 index 0000000000..b0f29a1ef5 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/rust-library/moz.build @@ -0,0 +1,19 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +@template +def RustLibrary(name): + """Template for Rust libraries.""" + Library(name) + + IS_RUST_LIBRARY = True + + +RustLibrary("test-library") diff --git a/python/mozbuild/mozbuild/test/backend/data/rust-programs/code/Cargo.toml b/python/mozbuild/mozbuild/test/backend/data/rust-programs/code/Cargo.toml new file mode 100644 index 0000000000..e0d400e070 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/rust-programs/code/Cargo.toml @@ -0,0 +1,10 @@ +[package] +authors = ["The Mozilla Project Developers"] +name = "testing" +version = "0.0.1" + +[[bin]] +name = "target" + +[[bin]] +name = "host" diff --git a/python/mozbuild/mozbuild/test/backend/data/rust-programs/code/moz.build b/python/mozbuild/mozbuild/test/backend/data/rust-programs/code/moz.build new file mode 100644 index 0000000000..f0efdb3799 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/rust-programs/code/moz.build @@ -0,0 +1,6 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +RUST_PROGRAMS += ["target"] +HOST_RUST_PROGRAMS += ["host"] diff --git a/python/mozbuild/mozbuild/test/backend/data/rust-programs/moz.build b/python/mozbuild/mozbuild/test/backend/data/rust-programs/moz.build new file mode 100644 index 0000000000..cb635f6adb --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/rust-programs/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS += ["code"] diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/bar.cpp b/python/mozbuild/mozbuild/test/backend/data/sources/bar.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/bar.s b/python/mozbuild/mozbuild/test/backend/data/sources/bar.s new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/baz.c b/python/mozbuild/mozbuild/test/backend/data/sources/baz.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/foo.asm b/python/mozbuild/mozbuild/test/backend/data/sources/foo.asm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/foo.cpp b/python/mozbuild/mozbuild/test/backend/data/sources/foo.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/fuga.mm b/python/mozbuild/mozbuild/test/backend/data/sources/fuga.mm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/hoge.mm b/python/mozbuild/mozbuild/test/backend/data/sources/hoge.mm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/moz.build b/python/mozbuild/mozbuild/test/backend/data/sources/moz.build new file mode 100644 index 0000000000..40d5a8d38d --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/sources/moz.build @@ -0,0 +1,26 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + +SOURCES += ["bar.s", "foo.asm"] + +HOST_SOURCES += ["bar.cpp", "foo.cpp"] +HOST_SOURCES += ["baz.c", "qux.c"] + +SOURCES += ["baz.c", "qux.c"] + +SOURCES += ["fuga.mm", "hoge.mm"] + +SOURCES += ["titi.S", "toto.S"] + +WASM_SOURCES += ["bar.cpp"] +WASM_SOURCES += ["baz.c"] diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/qux.c b/python/mozbuild/mozbuild/test/backend/data/sources/qux.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/titi.S b/python/mozbuild/mozbuild/test/backend/data/sources/titi.S new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/toto.S b/python/mozbuild/mozbuild/test/backend/data/sources/toto.S new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/stub0/Makefile.in new file mode 100644 index 0000000000..02ff0a3f90 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/stub0/Makefile.in @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +FOO := foo diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/Makefile.in new file mode 100644 index 0000000000..17c147d97a --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/Makefile.in @@ -0,0 +1,7 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +include $(DEPTH)/config/autoconf.mk + +include $(topsrcdir)/config/rules.mk + diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/moz.build b/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/moz.build new file mode 100644 index 0000000000..62966a58e1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/moz.build @@ -0,0 +1,3 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/dir2/moz.build b/python/mozbuild/mozbuild/test/backend/data/stub0/dir2/moz.build new file mode 100644 index 0000000000..62966a58e1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/stub0/dir2/moz.build @@ -0,0 +1,3 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/Makefile.in new file mode 100644 index 0000000000..17c147d97a --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/Makefile.in @@ -0,0 +1,7 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +include $(DEPTH)/config/autoconf.mk + +include $(topsrcdir)/config/rules.mk + diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/moz.build b/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/moz.build new file mode 100644 index 0000000000..62966a58e1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/moz.build @@ -0,0 +1,3 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/moz.build b/python/mozbuild/mozbuild/test/backend/data/stub0/moz.build new file mode 100644 index 0000000000..4f6e7cb318 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/stub0/moz.build @@ -0,0 +1,7 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS += ["dir1"] +DIRS += ["dir2"] +TEST_DIRS += ["dir3"] diff --git a/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/Makefile.in new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/foo.in b/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/foo.in new file mode 100644 index 0000000000..5331f1f051 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/foo.in @@ -0,0 +1 @@ +TEST = @MOZ_FOO@ diff --git a/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/moz.build b/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/moz.build new file mode 100644 index 0000000000..bded13e07d --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +CONFIGURE_SUBST_FILES = ["foo"] diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/another-file.sjs b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/another-file.sjs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/browser.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/browser.ini new file mode 100644 index 0000000000..4f1335d6b1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/browser.ini @@ -0,0 +1,6 @@ +[DEFAULT] +support-files = + another-file.sjs + data/** + +[test_sub.js] \ No newline at end of file diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/one.txt b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/one.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/two.txt b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/two.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/test_sub.js b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/test_sub.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/mochitest.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/mochitest.ini new file mode 100644 index 0000000000..a9860f3de8 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/mochitest.ini @@ -0,0 +1,8 @@ +[DEFAULT] +support-files = + support-file.txt + !/child/test_sub.js + !/child/another-file.sjs + !/child/data/** + +[test_foo.js] diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/moz.build new file mode 100644 index 0000000000..9df54dbc99 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/moz.build @@ -0,0 +1,5 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +MOCHITEST_MANIFESTS += ["mochitest.ini"] +BROWSER_CHROME_MANIFESTS += ["child/browser.ini"] diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/support-file.txt b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/support-file.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/test_foo.js b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/test_foo.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/mochitest-common.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/mochitest-common.ini new file mode 100644 index 0000000000..31d07b5af3 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/mochitest-common.ini @@ -0,0 +1 @@ +[test_bar.js] diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/mochitest.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/mochitest.ini new file mode 100644 index 0000000000..cf7a3c44bd --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/mochitest.ini @@ -0,0 +1,2 @@ +[test_foo.js] +[include:mochitest-common.ini] diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/moz.build new file mode 100644 index 0000000000..8058c0b836 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/moz.build @@ -0,0 +1,6 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +MOCHITEST_MANIFESTS += [ + "mochitest.ini", +] diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/test_bar.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/test_bar.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/test_foo.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/test_foo.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest1.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest1.ini new file mode 100644 index 0000000000..1f9816a899 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest1.ini @@ -0,0 +1,4 @@ +[DEFAULT] +support-files = support-file.txt + +[test_foo.js] diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest2.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest2.ini new file mode 100644 index 0000000000..e2a2fc96a7 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest2.ini @@ -0,0 +1,4 @@ +[DEFAULT] +support-files = support-file.txt + +[test_bar.js] diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/moz.build new file mode 100644 index 0000000000..a86b934fa1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/moz.build @@ -0,0 +1,7 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +MOCHITEST_MANIFESTS += [ + "mochitest1.ini", + "mochitest2.ini", +] diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_bar.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_bar.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_foo.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_foo.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/instrumentation.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/instrumentation.ini new file mode 100644 index 0000000000..03d4f794e2 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/instrumentation.ini @@ -0,0 +1 @@ +[not_packaged.java] diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.ini new file mode 100644 index 0000000000..009b2b2239 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.ini @@ -0,0 +1 @@ +[mochitest.js] diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/moz.build new file mode 100644 index 0000000000..f0496e09d9 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/moz.build @@ -0,0 +1,10 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +MOCHITEST_MANIFESTS += [ + "mochitest.ini", +] + +ANDROID_INSTRUMENTATION_MANIFESTS += [ + "instrumentation.ini", +] diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/not_packaged.java b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/not_packaged.java new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/test_bar.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/test_bar.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/xpcshell.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/xpcshell.ini new file mode 100644 index 0000000000..0cddad8ba9 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/xpcshell.ini @@ -0,0 +1,3 @@ +[DEFAULT] + +[test_bar.js] diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.ini new file mode 100644 index 0000000000..81869e1fa0 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.ini @@ -0,0 +1,3 @@ +[DEFAULT] + +[mochitest.js] diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/moz.build new file mode 100644 index 0000000000..42462a3059 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/moz.build @@ -0,0 +1,9 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +XPCSHELL_TESTS_MANIFESTS += [ + "dir1/xpcshell.ini", + "xpcshell.ini", +] + +MOCHITEST_MANIFESTS += ["mochitest.ini"] diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.ini new file mode 100644 index 0000000000..f6a5351e94 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.ini @@ -0,0 +1,4 @@ +[DEFAULT] +support-files = support/** + +[xpcshell.js] diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/moz.build new file mode 100644 index 0000000000..eb83fd1826 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS += ["test", "src"] diff --git a/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/src/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/src/moz.build new file mode 100644 index 0000000000..69cde19c29 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/src/moz.build @@ -0,0 +1,12 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries""" + LIBRARY_NAME = name + + +Library("foo") diff --git a/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/moz.build new file mode 100644 index 0000000000..a43f4083b3 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/moz.build @@ -0,0 +1,32 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +FINAL_TARGET = "_tests/xpcshell/tests/mozbuildtest" + + +@template +def Library(name): + """Template for libraries""" + LIBRARY_NAME = name + + +@template +def SimplePrograms(names, ext=".cpp"): + """Template for simple program executables. + + Those have a single source with the same base name as the executable. + """ + SIMPLE_PROGRAMS += names + SOURCES += ["%s%s" % (name, ext) for name in names] + + +@template +def HostLibrary(name): + """Template for build tools libraries.""" + HOST_LIBRARY_NAME = name + + +Library("test-library") +HostLibrary("host-test-library") +SimplePrograms(["test-one", "test-two"]) diff --git a/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/test-one.cpp b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/test-one.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/test-two.cpp b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/test-two.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/test_config/file.in b/python/mozbuild/mozbuild/test/backend/data/test_config/file.in new file mode 100644 index 0000000000..07aa30deb6 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test_config/file.in @@ -0,0 +1,3 @@ +#ifdef foo +@foo@ +@bar@ diff --git a/python/mozbuild/mozbuild/test/backend/data/test_config/moz.build b/python/mozbuild/mozbuild/test/backend/data/test_config/moz.build new file mode 100644 index 0000000000..5cf4c78f90 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/test_config/moz.build @@ -0,0 +1,3 @@ +CONFIGURE_SUBST_FILES = [ + "file", +] diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/Makefile.in new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/baz.def b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/baz.def new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/moz.build b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/moz.build new file mode 100644 index 0000000000..81595d2db3 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/moz.build @@ -0,0 +1,11 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DELAYLOAD_DLLS = ["foo.dll", "bar.dll"] + +RCFILE = "foo.rc" +RCINCLUDE = "bar.rc" +DEFFILE = "baz.def" + +WIN32_EXE_LDFLAGS += ["-subsystem:console"] diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.c b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.cpp b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.mm b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.mm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.c b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.cpp b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.mm b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.mm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/bar.cpp b/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/bar.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/foo.cpp b/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/foo.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/moz.build b/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/moz.build new file mode 100644 index 0000000000..ae1fc0c370 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/moz.build @@ -0,0 +1,9 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +FINAL_LIBRARY = "test" +SOURCES += ["bar.cpp", "foo.cpp"] +LOCAL_INCLUDES += ["/includeA/foo"] +DEFINES["DEFINEFOO"] = True +DEFINES["DEFINEBAR"] = "bar" diff --git a/python/mozbuild/mozbuild/test/backend/data/visual-studio/moz.build b/python/mozbuild/mozbuild/test/backend/data/visual-studio/moz.build new file mode 100644 index 0000000000..a0a888fa01 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/visual-studio/moz.build @@ -0,0 +1,7 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS += ["dir1"] + +Library("test") diff --git a/python/mozbuild/mozbuild/test/backend/data/xpidl/bar.idl b/python/mozbuild/mozbuild/test/backend/data/xpidl/bar.idl new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/xpidl/config/makefiles/xpidl/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/xpidl/config/makefiles/xpidl/Makefile.in new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/xpidl/foo.idl b/python/mozbuild/mozbuild/test/backend/data/xpidl/foo.idl new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/backend/data/xpidl/moz.build b/python/mozbuild/mozbuild/test/backend/data/xpidl/moz.build new file mode 100644 index 0000000000..df521ac7c5 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/data/xpidl/moz.build @@ -0,0 +1,6 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +XPIDL_MODULE = "my_module" +XPIDL_SOURCES = ["bar.idl", "foo.idl"] diff --git a/python/mozbuild/mozbuild/test/backend/test_build.py b/python/mozbuild/mozbuild/test/backend/test_build.py new file mode 100644 index 0000000000..3287ba5e57 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/test_build.py @@ -0,0 +1,265 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import shutil +import sys +import unittest +from contextlib import contextmanager +from tempfile import mkdtemp + +import buildconfig +import mozpack.path as mozpath +import six +from mozfile import which +from mozpack.files import FileFinder +from mozunit import main + +from mozbuild.backend import get_backend_class +from mozbuild.backend.configenvironment import ConfigEnvironment +from mozbuild.backend.fastermake import FasterMakeBackend +from mozbuild.backend.recursivemake import RecursiveMakeBackend +from mozbuild.base import MozbuildObject +from mozbuild.frontend.emitter import TreeMetadataEmitter +from mozbuild.frontend.reader import BuildReader +from mozbuild.util import ensureParentDir + + +def make_path(): + try: + return buildconfig.substs["GMAKE"] + except KeyError: + fetches_dir = os.environ.get("MOZ_FETCHES_DIR") + extra_search_dirs = () + if fetches_dir: + extra_search_dirs = (os.path.join(fetches_dir, "mozmake"),) + # Fallback for when running the test without an objdir. + for name in ("gmake", "make", "mozmake", "gnumake", "mingw32-make"): + path = which(name, extra_search_dirs=extra_search_dirs) + if path: + return path + + +BASE_SUBSTS = [ + ("PYTHON", mozpath.normsep(sys.executable)), + ("PYTHON3", mozpath.normsep(sys.executable)), + ("MOZ_UI_LOCALE", "en-US"), + ("GMAKE", make_path()), +] + + +class TestBuild(unittest.TestCase): + def setUp(self): + self._old_env = dict(os.environ) + os.environ.pop("MOZCONFIG", None) + os.environ.pop("MOZ_OBJDIR", None) + os.environ.pop("MOZ_PGO", None) + + def tearDown(self): + os.environ.clear() + os.environ.update(self._old_env) + + @contextmanager + def do_test_backend(self, *backends, **kwargs): + # Create the objdir in the srcdir to ensure that they share + # the same drive on Windows. + topobjdir = mkdtemp(dir=buildconfig.topsrcdir) + try: + config = ConfigEnvironment(buildconfig.topsrcdir, topobjdir, **kwargs) + reader = BuildReader(config) + emitter = TreeMetadataEmitter(config) + moz_build = mozpath.join(config.topsrcdir, "test.mozbuild") + definitions = list(emitter.emit(reader.read_mozbuild(moz_build, config))) + for backend in backends: + backend(config).consume(definitions) + + yield config + except Exception: + raise + finally: + if not os.environ.get("MOZ_NO_CLEANUP"): + shutil.rmtree(topobjdir) + + @contextmanager + def line_handler(self): + lines = [] + + def handle_make_line(line): + lines.append(line) + + try: + yield handle_make_line + except Exception: + print("\n".join(lines)) + raise + + if os.environ.get("MOZ_VERBOSE_MAKE"): + print("\n".join(lines)) + + def test_recursive_make(self): + substs = list(BASE_SUBSTS) + with self.do_test_backend(RecursiveMakeBackend, substs=substs) as config: + build = MozbuildObject(config.topsrcdir, None, None, config.topobjdir) + build._config_environment = config + overrides = [ + "install_manifest_depends=", + "MOZ_JAR_MAKER_FILE_FORMAT=flat", + "TEST_MOZBUILD=1", + ] + with self.line_handler() as handle_make_line: + build._run_make( + directory=config.topobjdir, + target=overrides, + silent=False, + line_handler=handle_make_line, + ) + + self.validate(config) + + def test_faster_recursive_make(self): + substs = list(BASE_SUBSTS) + [ + ("BUILD_BACKENDS", "FasterMake+RecursiveMake"), + ] + with self.do_test_backend( + get_backend_class("FasterMake+RecursiveMake"), substs=substs + ) as config: + buildid = mozpath.join(config.topobjdir, "config", "buildid") + ensureParentDir(buildid) + with open(buildid, "w") as fh: + fh.write("20100101012345\n") + + build = MozbuildObject(config.topsrcdir, None, None, config.topobjdir) + build._config_environment = config + overrides = [ + "install_manifest_depends=", + "MOZ_JAR_MAKER_FILE_FORMAT=flat", + "TEST_MOZBUILD=1", + ] + with self.line_handler() as handle_make_line: + build._run_make( + directory=config.topobjdir, + target=overrides, + silent=False, + line_handler=handle_make_line, + ) + + self.validate(config) + + def test_faster_make(self): + substs = list(BASE_SUBSTS) + [ + ("MOZ_BUILD_APP", "dummy_app"), + ("MOZ_WIDGET_TOOLKIT", "dummy_widget"), + ] + with self.do_test_backend( + RecursiveMakeBackend, FasterMakeBackend, substs=substs + ) as config: + buildid = mozpath.join(config.topobjdir, "config", "buildid") + ensureParentDir(buildid) + with open(buildid, "w") as fh: + fh.write("20100101012345\n") + + build = MozbuildObject(config.topsrcdir, None, None, config.topobjdir) + build._config_environment = config + overrides = [ + "TEST_MOZBUILD=1", + ] + with self.line_handler() as handle_make_line: + build._run_make( + directory=mozpath.join(config.topobjdir, "faster"), + target=overrides, + silent=False, + line_handler=handle_make_line, + ) + + self.validate(config) + + def validate(self, config): + self.maxDiff = None + test_path = mozpath.join( + "$SRCDIR", + "python", + "mozbuild", + "mozbuild", + "test", + "backend", + "data", + "build", + ) + + result = { + p: six.ensure_text(f.open().read()) + for p, f in FileFinder(mozpath.join(config.topobjdir, "dist")) + } + self.assertTrue(len(result)) + self.assertEqual( + result, + { + "bin/baz.ini": "baz.ini: FOO is foo\n", + "bin/child/bar.ini": "bar.ini\n", + "bin/child2/foo.css": "foo.css: FOO is foo\n", + "bin/child2/qux.ini": "qux.ini: BAR is not defined\n", + "bin/chrome.manifest": "manifest chrome/foo.manifest\n" + "manifest components/components.manifest\n", + "bin/chrome/foo.manifest": "content bar foo/child/\n" + "content foo foo/\n" + "override chrome://foo/bar.svg#hello " + "chrome://bar/bar.svg#hello\n", + "bin/chrome/foo/bar.js": "bar.js\n", + "bin/chrome/foo/child/baz.jsm": '//@line 2 "%s/baz.jsm"\nbaz.jsm: FOO is foo\n' + % (test_path), + "bin/chrome/foo/child/hoge.js": '//@line 2 "%s/bar.js"\nbar.js: FOO is foo\n' + % (test_path), + "bin/chrome/foo/foo.css": "foo.css: FOO is foo\n", + "bin/chrome/foo/foo.js": "foo.js\n", + "bin/chrome/foo/qux.js": "bar.js\n", + "bin/components/bar.js": '//@line 2 "%s/bar.js"\nbar.js: FOO is foo\n' + % (test_path), + "bin/components/components.manifest": "component {foo} foo.js\ncomponent {bar} bar.js\n", # NOQA: E501 + "bin/components/foo.js": "foo.js\n", + "bin/defaults/pref/prefs.js": "prefs.js\n", + "bin/foo.ini": "foo.ini\n", + "bin/modules/baz.jsm": '//@line 2 "%s/baz.jsm"\nbaz.jsm: FOO is foo\n' + % (test_path), + "bin/modules/child/bar.jsm": "bar.jsm\n", + "bin/modules/child2/qux.jsm": '//@line 4 "%s/qux.jsm"\nqux.jsm: BAR is not defined\n' # NOQA: E501 + % (test_path), + "bin/modules/foo.jsm": "foo.jsm\n", + "bin/res/resource": "resource\n", + "bin/res/child/resource2": "resource2\n", + "bin/app/baz.ini": "baz.ini: FOO is bar\n", + "bin/app/child/bar.ini": "bar.ini\n", + "bin/app/child2/qux.ini": "qux.ini: BAR is defined\n", + "bin/app/chrome.manifest": "manifest chrome/foo.manifest\n" + "manifest components/components.manifest\n", + "bin/app/chrome/foo.manifest": "content bar foo/child/\n" + "content foo foo/\n" + "override chrome://foo/bar.svg#hello " + "chrome://bar/bar.svg#hello\n", + "bin/app/chrome/foo/bar.js": "bar.js\n", + "bin/app/chrome/foo/child/baz.jsm": '//@line 2 "%s/baz.jsm"\nbaz.jsm: FOO is bar\n' + % (test_path), + "bin/app/chrome/foo/child/hoge.js": '//@line 2 "%s/bar.js"\nbar.js: FOO is bar\n' + % (test_path), + "bin/app/chrome/foo/foo.css": "foo.css: FOO is bar\n", + "bin/app/chrome/foo/foo.js": "foo.js\n", + "bin/app/chrome/foo/qux.js": "bar.js\n", + "bin/app/components/bar.js": '//@line 2 "%s/bar.js"\nbar.js: FOO is bar\n' + % (test_path), + "bin/app/components/components.manifest": "component {foo} foo.js\ncomponent {bar} bar.js\n", # NOQA: E501 + "bin/app/components/foo.js": "foo.js\n", + "bin/app/defaults/preferences/prefs.js": "prefs.js\n", + "bin/app/foo.css": "foo.css: FOO is bar\n", + "bin/app/foo.ini": "foo.ini\n", + "bin/app/modules/baz.jsm": '//@line 2 "%s/baz.jsm"\nbaz.jsm: FOO is bar\n' + % (test_path), + "bin/app/modules/child/bar.jsm": "bar.jsm\n", + "bin/app/modules/child2/qux.jsm": '//@line 2 "%s/qux.jsm"\nqux.jsm: BAR is defined\n' # NOQA: E501 + % (test_path), + "bin/app/modules/foo.jsm": "foo.jsm\n", + }, + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/backend/test_configenvironment.py b/python/mozbuild/mozbuild/test/backend/test_configenvironment.py new file mode 100644 index 0000000000..7900cdd737 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/test_configenvironment.py @@ -0,0 +1,73 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import unittest + +import mozpack.path as mozpath +from mozunit import main + +import mozbuild.backend.configenvironment as ConfigStatus +from mozbuild.util import ReadOnlyDict + + +class ConfigEnvironment(ConfigStatus.ConfigEnvironment): + def __init__(self, *args, **kwargs): + ConfigStatus.ConfigEnvironment.__init__(self, *args, **kwargs) + # Be helpful to unit tests + if "top_srcdir" not in self.substs: + if os.path.isabs(self.topsrcdir): + top_srcdir = self.topsrcdir.replace(os.sep, "/") + else: + top_srcdir = mozpath.relpath(self.topsrcdir, self.topobjdir).replace( + os.sep, "/" + ) + + d = dict(self.substs) + d["top_srcdir"] = top_srcdir + self.substs = ReadOnlyDict(d) + + +class TestEnvironment(unittest.TestCase): + def test_auto_substs(self): + """Test the automatically set values of ACDEFINES, ALLSUBSTS + and ALLEMPTYSUBSTS. + """ + env = ConfigEnvironment( + ".", + ".", + defines={"foo": "bar", "baz": "qux 42", "abc": "d'e'f"}, + substs={ + "FOO": "bar", + "FOOBAR": "", + "ABC": "def", + "bar": "baz qux", + "zzz": '"abc def"', + "qux": "", + }, + ) + # Original order of the defines need to be respected in ACDEFINES + self.assertEqual( + env.substs["ACDEFINES"], + """-Dabc='d'\\''e'\\''f' -Dbaz='qux 42' -Dfoo=bar""", + ) + # Likewise for ALLSUBSTS, which also must contain ACDEFINES + self.assertEqual( + env.substs["ALLSUBSTS"], + '''ABC = def +ACDEFINES = -Dabc='d'\\''e'\\''f' -Dbaz='qux 42' -Dfoo=bar +FOO = bar +bar = baz qux +zzz = "abc def"''', + ) + # ALLEMPTYSUBSTS contains all substs with no value. + self.assertEqual( + env.substs["ALLEMPTYSUBSTS"], + """FOOBAR = +qux =""", + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/backend/test_database.py b/python/mozbuild/mozbuild/test/backend/test_database.py new file mode 100644 index 0000000000..3bc0dfefb1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/test_database.py @@ -0,0 +1,91 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import json +import os + +import six +from mozunit import main + +from mozbuild.backend.clangd import ClangdBackend +from mozbuild.backend.static_analysis import StaticAnalysisBackend +from mozbuild.compilation.database import CompileDBBackend +from mozbuild.test.backend.common import BackendTester + + +class TestCompileDBBackends(BackendTester): + def perform_check(self, compile_commands_path, topsrcdir, topobjdir): + self.assertTrue(os.path.exists(compile_commands_path)) + compile_db = json.loads(open(compile_commands_path, "r").read()) + + # Verify that we have the same number of items + self.assertEqual(len(compile_db), 4) + + expected_db = [ + { + "directory": topobjdir, + "command": "clang -o /dev/null -c -ferror-limit=0 {}/bar.c".format( + topsrcdir + ), + "file": "{}/bar.c".format(topsrcdir), + }, + { + "directory": topobjdir, + "command": "clang -o /dev/null -c -ferror-limit=0 {}/foo.c".format( + topsrcdir + ), + "file": "{}/foo.c".format(topsrcdir), + }, + { + "directory": topobjdir, + "command": "clang++ -o /dev/null -c -ferror-limit=0 {}/baz.cpp".format( + topsrcdir + ), + "file": "{}/baz.cpp".format(topsrcdir), + }, + { + "directory": topobjdir, + "command": "clang++ -o /dev/null -c -ferror-limit=0 {}/qux.cpp".format( + topsrcdir + ), + "file": "{}/qux.cpp".format(topsrcdir), + }, + ] + + # Verify item consistency against `expected_db` + six.assertCountEqual(self, compile_db, expected_db) + + def test_database(self): + """Ensure we can generate a `compile_commands.json` and that is correct.""" + + env = self._consume("database", CompileDBBackend) + compile_commands_path = os.path.join(env.topobjdir, "compile_commands.json") + + self.perform_check(compile_commands_path, env.topsrcdir, env.topobjdir) + + def test_clangd(self): + """Ensure we can generate a `compile_commands.json` and that is correct. + in order to be used by ClandBackend""" + + env = self._consume("database", ClangdBackend) + compile_commands_path = os.path.join( + env.topobjdir, "clangd", "compile_commands.json" + ) + + self.perform_check(compile_commands_path, env.topsrcdir, env.topobjdir) + + def test_static_analysis(self): + """Ensure we can generate a `compile_commands.json` and that is correct. + in order to be used by StaticAnalysisBackend""" + + env = self._consume("database", StaticAnalysisBackend) + compile_commands_path = os.path.join( + env.topobjdir, "static-analysis", "compile_commands.json" + ) + + self.perform_check(compile_commands_path, env.topsrcdir, env.topobjdir) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/backend/test_fastermake.py b/python/mozbuild/mozbuild/test/backend/test_fastermake.py new file mode 100644 index 0000000000..1c9670b091 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/test_fastermake.py @@ -0,0 +1,42 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os + +import mozpack.path as mozpath +from mozpack.copier import FileRegistry +from mozpack.manifests import InstallManifest +from mozunit import main + +from mozbuild.backend.fastermake import FasterMakeBackend +from mozbuild.test.backend.common import BackendTester + + +class TestFasterMakeBackend(BackendTester): + def test_basic(self): + """Ensure the FasterMakeBackend works without error.""" + env = self._consume("stub0", FasterMakeBackend) + self.assertTrue( + os.path.exists(mozpath.join(env.topobjdir, "backend.FasterMakeBackend")) + ) + self.assertTrue( + os.path.exists(mozpath.join(env.topobjdir, "backend.FasterMakeBackend.in")) + ) + + def test_final_target_files_wildcard(self): + """Ensure that wildcards in FINAL_TARGET_FILES work properly.""" + env = self._consume("final-target-files-wildcard", FasterMakeBackend) + m = InstallManifest( + path=mozpath.join(env.topobjdir, "faster", "install_dist_bin") + ) + self.assertEqual(len(m), 1) + reg = FileRegistry() + m.populate_registry(reg) + expected = [("foo/bar.xyz", "bar.xyz"), ("foo/foo.xyz", "foo.xyz")] + actual = [(path, mozpath.relpath(f.path, env.topsrcdir)) for (path, f) in reg] + self.assertEqual(expected, actual) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/backend/test_partialconfigenvironment.py b/python/mozbuild/mozbuild/test/backend/test_partialconfigenvironment.py new file mode 100644 index 0000000000..13b1656981 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/test_partialconfigenvironment.py @@ -0,0 +1,173 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import unittest +from shutil import rmtree +from tempfile import mkdtemp + +import buildconfig +import mozpack.path as mozpath +from mozunit import main + +from mozbuild.backend.configenvironment import PartialConfigEnvironment + +config = { + "defines": { + "MOZ_FOO": "1", + "MOZ_BAR": "2", + }, + "substs": { + "MOZ_SUBST_1": "1", + "MOZ_SUBST_2": "2", + "CPP": "cpp", + }, +} + + +class TestPartial(unittest.TestCase): + def setUp(self): + self._old_env = dict(os.environ) + + def tearDown(self): + os.environ.clear() + os.environ.update(self._old_env) + + def _objdir(self): + objdir = mkdtemp(dir=buildconfig.topsrcdir) + self.addCleanup(rmtree, objdir) + return objdir + + def test_auto_substs(self): + """Test the automatically set values of ACDEFINES, and ALLDEFINES""" + env = PartialConfigEnvironment(self._objdir()) + env.write_vars(config) + self.assertEqual(env.substs["ACDEFINES"], "-DMOZ_BAR=2 -DMOZ_FOO=1") + self.assertEqual( + env.defines["ALLDEFINES"], + { + "MOZ_BAR": "2", + "MOZ_FOO": "1", + }, + ) + + def test_remove_subst(self): + """Test removing a subst from the config. The file should be overwritten with 'None'""" + env = PartialConfigEnvironment(self._objdir()) + path = mozpath.join(env.topobjdir, "config.statusd", "substs", "MYSUBST") + myconfig = config.copy() + env.write_vars(myconfig) + with self.assertRaises(KeyError): + _ = env.substs["MYSUBST"] + self.assertFalse(os.path.exists(path)) + + myconfig["substs"]["MYSUBST"] = "new" + env.write_vars(myconfig) + + self.assertEqual(env.substs["MYSUBST"], "new") + self.assertTrue(os.path.exists(path)) + + del myconfig["substs"]["MYSUBST"] + env.write_vars(myconfig) + with self.assertRaises(KeyError): + _ = env.substs["MYSUBST"] + # Now that the subst is gone, the file still needs to be present so that + # make can update dependencies correctly. Overwriting the file with + # 'None' is the same as deleting it as far as the + # PartialConfigEnvironment is concerned, but make can't track a + # dependency on a file that doesn't exist. + self.assertTrue(os.path.exists(path)) + + def _assert_deps(self, env, deps): + deps = sorted( + [ + "$(wildcard %s)" % (mozpath.join(env.topobjdir, "config.statusd", d)) + for d in deps + ] + ) + self.assertEqual(sorted(env.get_dependencies()), deps) + + def test_dependencies(self): + """Test getting dependencies on defines and substs.""" + env = PartialConfigEnvironment(self._objdir()) + env.write_vars(config) + self._assert_deps(env, []) + + self.assertEqual(env.defines["MOZ_FOO"], "1") + self._assert_deps(env, ["defines/MOZ_FOO"]) + + self.assertEqual(env.defines["MOZ_BAR"], "2") + self._assert_deps(env, ["defines/MOZ_FOO", "defines/MOZ_BAR"]) + + # Getting a define again shouldn't add a redundant dependency + self.assertEqual(env.defines["MOZ_FOO"], "1") + self._assert_deps(env, ["defines/MOZ_FOO", "defines/MOZ_BAR"]) + + self.assertEqual(env.substs["MOZ_SUBST_1"], "1") + self._assert_deps( + env, ["defines/MOZ_FOO", "defines/MOZ_BAR", "substs/MOZ_SUBST_1"] + ) + + with self.assertRaises(KeyError): + _ = env.substs["NON_EXISTENT"] + self._assert_deps( + env, + [ + "defines/MOZ_FOO", + "defines/MOZ_BAR", + "substs/MOZ_SUBST_1", + "substs/NON_EXISTENT", + ], + ) + self.assertEqual(env.substs.get("NON_EXISTENT"), None) + + def test_set_subst(self): + """Test setting a subst""" + env = PartialConfigEnvironment(self._objdir()) + env.write_vars(config) + + self.assertEqual(env.substs["MOZ_SUBST_1"], "1") + env.substs["MOZ_SUBST_1"] = "updated" + self.assertEqual(env.substs["MOZ_SUBST_1"], "updated") + + # A new environment should pull the result from the file again. + newenv = PartialConfigEnvironment(env.topobjdir) + self.assertEqual(newenv.substs["MOZ_SUBST_1"], "1") + + def test_env_override(self): + """Test overriding a subst with an environment variable""" + env = PartialConfigEnvironment(self._objdir()) + env.write_vars(config) + + self.assertEqual(env.substs["MOZ_SUBST_1"], "1") + self.assertEqual(env.substs["CPP"], "cpp") + + # Reset the environment and set some environment variables. + env = PartialConfigEnvironment(env.topobjdir) + os.environ["MOZ_SUBST_1"] = "subst 1 environ" + os.environ["CPP"] = "cpp environ" + + # The MOZ_SUBST_1 should be overridden by the environment, while CPP is + # a special variable and should not. + self.assertEqual(env.substs["MOZ_SUBST_1"], "subst 1 environ") + self.assertEqual(env.substs["CPP"], "cpp") + + def test_update(self): + """Test calling update on the substs or defines pseudo dicts""" + env = PartialConfigEnvironment(self._objdir()) + env.write_vars(config) + + mysubsts = {"NEW": "new"} + mysubsts.update(env.substs.iteritems()) + self.assertEqual(mysubsts["NEW"], "new") + self.assertEqual(mysubsts["CPP"], "cpp") + + mydefines = {"DEBUG": "1"} + mydefines.update(env.defines.iteritems()) + self.assertEqual(mydefines["DEBUG"], "1") + self.assertEqual(mydefines["MOZ_FOO"], "1") + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/backend/test_recursivemake.py b/python/mozbuild/mozbuild/test/backend/test_recursivemake.py new file mode 100644 index 0000000000..acbada060b --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/test_recursivemake.py @@ -0,0 +1,1307 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import io +import os +import unittest + +import mozpack.path as mozpath +import six +import six.moves.cPickle as pickle +from mozpack.manifests import InstallManifest +from mozunit import main + +from mozbuild.backend.recursivemake import RecursiveMakeBackend, RecursiveMakeTraversal +from mozbuild.backend.test_manifest import TestManifestBackend +from mozbuild.frontend.emitter import TreeMetadataEmitter +from mozbuild.frontend.reader import BuildReader +from mozbuild.test.backend.common import BackendTester + + +class TestRecursiveMakeTraversal(unittest.TestCase): + def test_traversal(self): + traversal = RecursiveMakeTraversal() + traversal.add("", dirs=["A", "B", "C"]) + traversal.add("", dirs=["D"]) + traversal.add("A") + traversal.add("B", dirs=["E", "F"]) + traversal.add("C", dirs=["G", "H"]) + traversal.add("D", dirs=["I", "K"]) + traversal.add("D", dirs=["J", "L"]) + traversal.add("E") + traversal.add("F") + traversal.add("G") + traversal.add("H") + traversal.add("I", dirs=["M", "N"]) + traversal.add("J", dirs=["O", "P"]) + traversal.add("K", dirs=["Q", "R"]) + traversal.add("L", dirs=["S"]) + traversal.add("M") + traversal.add("N", dirs=["T"]) + traversal.add("O") + traversal.add("P", dirs=["U"]) + traversal.add("Q") + traversal.add("R", dirs=["V"]) + traversal.add("S", dirs=["W"]) + traversal.add("T") + traversal.add("U") + traversal.add("V") + traversal.add("W", dirs=["X"]) + traversal.add("X") + + parallels = set(("G", "H", "I", "J", "O", "P", "Q", "R", "U")) + + def filter(current, subdirs): + return ( + current, + [d for d in subdirs.dirs if d in parallels], + [d for d in subdirs.dirs if d not in parallels], + ) + + start, deps = traversal.compute_dependencies(filter) + self.assertEqual(start, ("X",)) + self.maxDiff = None + self.assertEqual( + deps, + { + "A": ("",), + "B": ("A",), + "C": ("F",), + "D": ("G", "H"), + "E": ("B",), + "F": ("E",), + "G": ("C",), + "H": ("C",), + "I": ("D",), + "J": ("D",), + "K": ("T", "O", "U"), + "L": ("Q", "V"), + "M": ("I",), + "N": ("M",), + "O": ("J",), + "P": ("J",), + "Q": ("K",), + "R": ("K",), + "S": ("L",), + "T": ("N",), + "U": ("P",), + "V": ("R",), + "W": ("S",), + "X": ("W",), + }, + ) + + self.assertEqual( + list(traversal.traverse("", filter)), + [ + "", + "A", + "B", + "E", + "F", + "C", + "G", + "H", + "D", + "I", + "M", + "N", + "T", + "J", + "O", + "P", + "U", + "K", + "Q", + "R", + "V", + "L", + "S", + "W", + "X", + ], + ) + + self.assertEqual(list(traversal.traverse("C", filter)), ["C", "G", "H"]) + + def test_traversal_2(self): + traversal = RecursiveMakeTraversal() + traversal.add("", dirs=["A", "B", "C"]) + traversal.add("A") + traversal.add("B", dirs=["D", "E", "F"]) + traversal.add("C", dirs=["G", "H", "I"]) + traversal.add("D") + traversal.add("E") + traversal.add("F") + traversal.add("G") + traversal.add("H") + traversal.add("I") + + start, deps = traversal.compute_dependencies() + self.assertEqual(start, ("I",)) + self.assertEqual( + deps, + { + "A": ("",), + "B": ("A",), + "C": ("F",), + "D": ("B",), + "E": ("D",), + "F": ("E",), + "G": ("C",), + "H": ("G",), + "I": ("H",), + }, + ) + + def test_traversal_filter(self): + traversal = RecursiveMakeTraversal() + traversal.add("", dirs=["A", "B", "C"]) + traversal.add("A") + traversal.add("B", dirs=["D", "E", "F"]) + traversal.add("C", dirs=["G", "H", "I"]) + traversal.add("D") + traversal.add("E") + traversal.add("F") + traversal.add("G") + traversal.add("H") + traversal.add("I") + + def filter(current, subdirs): + if current == "B": + current = None + return current, [], subdirs.dirs + + start, deps = traversal.compute_dependencies(filter) + self.assertEqual(start, ("I",)) + self.assertEqual( + deps, + { + "A": ("",), + "C": ("F",), + "D": ("A",), + "E": ("D",), + "F": ("E",), + "G": ("C",), + "H": ("G",), + "I": ("H",), + }, + ) + + def test_traversal_parallel(self): + traversal = RecursiveMakeTraversal() + traversal.add("", dirs=["A", "B", "C"]) + traversal.add("A") + traversal.add("B", dirs=["D", "E", "F"]) + traversal.add("C", dirs=["G", "H", "I"]) + traversal.add("D") + traversal.add("E") + traversal.add("F") + traversal.add("G") + traversal.add("H") + traversal.add("I") + traversal.add("J") + + def filter(current, subdirs): + return current, subdirs.dirs, [] + + start, deps = traversal.compute_dependencies(filter) + self.assertEqual(start, ("A", "D", "E", "F", "G", "H", "I", "J")) + self.assertEqual( + deps, + { + "A": ("",), + "B": ("",), + "C": ("",), + "D": ("B",), + "E": ("B",), + "F": ("B",), + "G": ("C",), + "H": ("C",), + "I": ("C",), + "J": ("",), + }, + ) + + +class TestRecursiveMakeBackend(BackendTester): + def test_basic(self): + """Ensure the RecursiveMakeBackend works without error.""" + env = self._consume("stub0", RecursiveMakeBackend) + self.assertTrue( + os.path.exists(mozpath.join(env.topobjdir, "backend.RecursiveMakeBackend")) + ) + self.assertTrue( + os.path.exists( + mozpath.join(env.topobjdir, "backend.RecursiveMakeBackend.in") + ) + ) + + def test_output_files(self): + """Ensure proper files are generated.""" + env = self._consume("stub0", RecursiveMakeBackend) + + expected = ["", "dir1", "dir2"] + + for d in expected: + out_makefile = mozpath.join(env.topobjdir, d, "Makefile") + out_backend = mozpath.join(env.topobjdir, d, "backend.mk") + + self.assertTrue(os.path.exists(out_makefile)) + self.assertTrue(os.path.exists(out_backend)) + + def test_makefile_conversion(self): + """Ensure Makefile.in is converted properly.""" + env = self._consume("stub0", RecursiveMakeBackend) + + p = mozpath.join(env.topobjdir, "Makefile") + + lines = [ + l.strip() for l in open(p, "rt").readlines()[1:] if not l.startswith("#") + ] + self.assertEqual( + lines, + [ + "DEPTH := .", + "topobjdir := %s" % env.topobjdir, + "topsrcdir := %s" % env.topsrcdir, + "srcdir := %s" % env.topsrcdir, + "srcdir_rel := %s" % mozpath.relpath(env.topsrcdir, env.topobjdir), + "relativesrcdir := .", + "include $(DEPTH)/config/autoconf.mk", + "", + "FOO := foo", + "", + "include $(topsrcdir)/config/recurse.mk", + ], + ) + + def test_missing_makefile_in(self): + """Ensure missing Makefile.in results in Makefile creation.""" + env = self._consume("stub0", RecursiveMakeBackend) + + p = mozpath.join(env.topobjdir, "dir2", "Makefile") + self.assertTrue(os.path.exists(p)) + + lines = [l.strip() for l in open(p, "rt").readlines()] + self.assertEqual(len(lines), 10) + + self.assertTrue(lines[0].startswith("# THIS FILE WAS AUTOMATICALLY")) + + def test_backend_mk(self): + """Ensure backend.mk file is written out properly.""" + env = self._consume("stub0", RecursiveMakeBackend) + + p = mozpath.join(env.topobjdir, "backend.mk") + + lines = [l.strip() for l in open(p, "rt").readlines()[2:]] + self.assertEqual(lines, ["DIRS := dir1 dir2"]) + + # Make env.substs writable to add ENABLE_TESTS + env.substs = dict(env.substs) + env.substs["ENABLE_TESTS"] = "1" + self._consume("stub0", RecursiveMakeBackend, env=env) + p = mozpath.join(env.topobjdir, "backend.mk") + + lines = [l.strip() for l in open(p, "rt").readlines()[2:]] + self.assertEqual(lines, ["DIRS := dir1 dir2 dir3"]) + + def test_mtime_no_change(self): + """Ensure mtime is not updated if file content does not change.""" + + env = self._consume("stub0", RecursiveMakeBackend) + + makefile_path = mozpath.join(env.topobjdir, "Makefile") + backend_path = mozpath.join(env.topobjdir, "backend.mk") + makefile_mtime = os.path.getmtime(makefile_path) + backend_mtime = os.path.getmtime(backend_path) + + reader = BuildReader(env) + emitter = TreeMetadataEmitter(env) + backend = RecursiveMakeBackend(env) + backend.consume(emitter.emit(reader.read_topsrcdir())) + + self.assertEqual(os.path.getmtime(makefile_path), makefile_mtime) + self.assertEqual(os.path.getmtime(backend_path), backend_mtime) + + def test_substitute_config_files(self): + """Ensure substituted config files are produced.""" + env = self._consume("substitute_config_files", RecursiveMakeBackend) + + p = mozpath.join(env.topobjdir, "foo") + self.assertTrue(os.path.exists(p)) + lines = [l.strip() for l in open(p, "rt").readlines()] + self.assertEqual(lines, ["TEST = foo"]) + + def test_install_substitute_config_files(self): + """Ensure we recurse into the dirs that install substituted config files.""" + env = self._consume("install_substitute_config_files", RecursiveMakeBackend) + + root_deps_path = mozpath.join(env.topobjdir, "root-deps.mk") + lines = [l.strip() for l in open(root_deps_path, "rt").readlines()] + + # Make sure we actually recurse into the sub directory during export to + # install the subst file. + self.assertTrue(any(l == "recurse_export: sub/export" for l in lines)) + + def test_variable_passthru(self): + """Ensure variable passthru is written out correctly.""" + env = self._consume("variable_passthru", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] + + expected = { + "RCFILE": ["RCFILE := $(srcdir)/foo.rc"], + "RCINCLUDE": ["RCINCLUDE := $(srcdir)/bar.rc"], + "WIN32_EXE_LDFLAGS": ["WIN32_EXE_LDFLAGS += -subsystem:console"], + } + + for var, val in expected.items(): + # print("test_variable_passthru[%s]" % (var)) + found = [str for str in lines if str.startswith(var)] + self.assertEqual(found, val) + + def test_sources(self): + """Ensure SOURCES, HOST_SOURCES and WASM_SOURCES are handled properly.""" + env = self._consume("sources", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] + + expected = { + "ASFILES": ["ASFILES += $(srcdir)/bar.s", "ASFILES += $(srcdir)/foo.asm"], + "CMMSRCS": ["CMMSRCS += $(srcdir)/fuga.mm", "CMMSRCS += $(srcdir)/hoge.mm"], + "CSRCS": ["CSRCS += $(srcdir)/baz.c", "CSRCS += $(srcdir)/qux.c"], + "HOST_CPPSRCS": [ + "HOST_CPPSRCS += $(srcdir)/bar.cpp", + "HOST_CPPSRCS += $(srcdir)/foo.cpp", + ], + "HOST_CSRCS": [ + "HOST_CSRCS += $(srcdir)/baz.c", + "HOST_CSRCS += $(srcdir)/qux.c", + ], + "SSRCS": ["SSRCS += $(srcdir)/titi.S", "SSRCS += $(srcdir)/toto.S"], + "WASM_CSRCS": ["WASM_CSRCS += $(srcdir)/baz.c"], + "WASM_CPPSRCS": ["WASM_CPPSRCS += $(srcdir)/bar.cpp"], + } + + for var, val in expected.items(): + found = [str for str in lines if str.startswith(var)] + self.assertEqual(found, val) + + def test_exports(self): + """Ensure EXPORTS is handled properly.""" + env = self._consume("exports", RecursiveMakeBackend) + + # EXPORTS files should appear in the dist_include install manifest. + m = InstallManifest( + path=mozpath.join( + env.topobjdir, "_build_manifests", "install", "dist_include" + ) + ) + self.assertEqual(len(m), 7) + self.assertIn("foo.h", m) + self.assertIn("mozilla/mozilla1.h", m) + self.assertIn("mozilla/dom/dom2.h", m) + + def test_generated_files(self): + """Ensure GENERATED_FILES is handled properly.""" + env = self._consume("generated-files", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] + + expected = [ + "include $(topsrcdir)/config/AB_rCD.mk", + "PRE_COMPILE_TARGETS += $(MDDEPDIR)/bar.c.stub", + "bar.c: $(MDDEPDIR)/bar.c.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar.c.pp", + "$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py" % env.topsrcdir, + "$(REPORT_BUILD)", + "$(call py_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)" # noqa + % env.topsrcdir, + "@$(TOUCH) $@", + "", + "EXPORT_TARGETS += $(MDDEPDIR)/foo.h.stub", + "foo.h: $(MDDEPDIR)/foo.h.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.h.pp", + "$(MDDEPDIR)/foo.h.stub: %s/generate-foo.py $(srcdir)/foo-data" + % (env.topsrcdir), + "$(REPORT_BUILD)", + "$(call py_action,file_generate,%s/generate-foo.py main foo.h $(MDDEPDIR)/foo.h.pp $(MDDEPDIR)/foo.h.stub $(srcdir)/foo-data)" # noqa + % (env.topsrcdir), + "@$(TOUCH) $@", + "", + ] + + self.maxDiff = None + self.assertEqual(lines, expected) + + def test_generated_files_force(self): + """Ensure GENERATED_FILES with .force is handled properly.""" + env = self._consume("generated-files-force", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] + + expected = [ + "include $(topsrcdir)/config/AB_rCD.mk", + "PRE_COMPILE_TARGETS += $(MDDEPDIR)/bar.c.stub", + "bar.c: $(MDDEPDIR)/bar.c.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar.c.pp", + "$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py FORCE" % env.topsrcdir, + "$(REPORT_BUILD)", + "$(call py_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)" # noqa + % env.topsrcdir, + "@$(TOUCH) $@", + "", + "PRE_COMPILE_TARGETS += $(MDDEPDIR)/foo.c.stub", + "foo.c: $(MDDEPDIR)/foo.c.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.c.pp", + "$(MDDEPDIR)/foo.c.stub: %s/generate-foo.py $(srcdir)/foo-data" + % (env.topsrcdir), + "$(REPORT_BUILD)", + "$(call py_action,file_generate,%s/generate-foo.py main foo.c $(MDDEPDIR)/foo.c.pp $(MDDEPDIR)/foo.c.stub $(srcdir)/foo-data)" # noqa + % (env.topsrcdir), + "@$(TOUCH) $@", + "", + ] + + self.maxDiff = None + self.assertEqual(lines, expected) + + def test_localized_generated_files(self): + """Ensure LOCALIZED_GENERATED_FILES is handled properly.""" + env = self._consume("localized-generated-files", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] + + expected = [ + "include $(topsrcdir)/config/AB_rCD.mk", + "MISC_TARGETS += $(MDDEPDIR)/foo.xyz.stub", + "foo.xyz: $(MDDEPDIR)/foo.xyz.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.xyz.pp", + "$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)" # noqa + % env.topsrcdir, + "$(REPORT_BUILD)", + "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)" # noqa + % env.topsrcdir, + "@$(TOUCH) $@", + "", + "LOCALIZED_FILES_0_FILES += foo.xyz", + "LOCALIZED_FILES_0_DEST = $(FINAL_TARGET)/", + "LOCALIZED_FILES_0_TARGET := misc", + "INSTALL_TARGETS += LOCALIZED_FILES_0", + ] + + self.maxDiff = None + self.assertEqual(lines, expected) + + def test_localized_generated_files_force(self): + """Ensure LOCALIZED_GENERATED_FILES with .force is handled properly.""" + env = self._consume("localized-generated-files-force", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] + + expected = [ + "include $(topsrcdir)/config/AB_rCD.mk", + "MISC_TARGETS += $(MDDEPDIR)/foo.xyz.stub", + "foo.xyz: $(MDDEPDIR)/foo.xyz.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.xyz.pp", + "$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)" # noqa + % env.topsrcdir, + "$(REPORT_BUILD)", + "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)" # noqa + % env.topsrcdir, + "@$(TOUCH) $@", + "", + "MISC_TARGETS += $(MDDEPDIR)/abc.xyz.stub", + "abc.xyz: $(MDDEPDIR)/abc.xyz.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/abc.xyz.pp", + "$(MDDEPDIR)/abc.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input FORCE" # noqa + % env.topsrcdir, + "$(REPORT_BUILD)", + "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main abc.xyz $(MDDEPDIR)/abc.xyz.pp $(MDDEPDIR)/abc.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)" # noqa + % env.topsrcdir, + "@$(TOUCH) $@", + "", + ] + + self.maxDiff = None + self.assertEqual(lines, expected) + + def test_localized_generated_files_AB_CD(self): + """Ensure LOCALIZED_GENERATED_FILES is handled properly + when {AB_CD} and {AB_rCD} are used.""" + env = self._consume("localized-generated-files-AB_CD", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] + + expected = [ + "include $(topsrcdir)/config/AB_rCD.mk", + "MISC_TARGETS += $(MDDEPDIR)/foo$(AB_CD).xyz.stub", + "foo$(AB_CD).xyz: $(MDDEPDIR)/foo$(AB_CD).xyz.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo$(AB_CD).xyz.pp", + "$(MDDEPDIR)/foo$(AB_CD).xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)" # noqa + % env.topsrcdir, + "$(REPORT_BUILD)", + "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo$(AB_CD).xyz $(MDDEPDIR)/foo$(AB_CD).xyz.pp $(MDDEPDIR)/foo$(AB_CD).xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)" # noqa + % env.topsrcdir, + "@$(TOUCH) $@", + "", + "bar$(AB_rCD).xyz: $(MDDEPDIR)/bar$(AB_rCD).xyz.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar$(AB_rCD).xyz.pp", + "$(MDDEPDIR)/bar$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)" # noqa + % env.topsrcdir, + "$(REPORT_BUILD)", + "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main bar$(AB_rCD).xyz $(MDDEPDIR)/bar$(AB_rCD).xyz.pp $(MDDEPDIR)/bar$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input)" # noqa + % env.topsrcdir, + "@$(TOUCH) $@", + "", + "zot$(AB_rCD).xyz: $(MDDEPDIR)/zot$(AB_rCD).xyz.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/zot$(AB_rCD).xyz.pp", + "$(MDDEPDIR)/zot$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)" # noqa + % env.topsrcdir, + "$(REPORT_BUILD)", + "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main zot$(AB_rCD).xyz $(MDDEPDIR)/zot$(AB_rCD).xyz.pp $(MDDEPDIR)/zot$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input)" # noqa + % env.topsrcdir, + "@$(TOUCH) $@", + "", + ] + + self.maxDiff = None + self.assertEqual(lines, expected) + + def test_exports_generated(self): + """Ensure EXPORTS that are listed in GENERATED_FILES + are handled properly.""" + env = self._consume("exports-generated", RecursiveMakeBackend) + + # EXPORTS files should appear in the dist_include install manifest. + m = InstallManifest( + path=mozpath.join( + env.topobjdir, "_build_manifests", "install", "dist_include" + ) + ) + self.assertEqual(len(m), 8) + self.assertIn("foo.h", m) + self.assertIn("mozilla/mozilla1.h", m) + self.assertIn("mozilla/dom/dom1.h", m) + self.assertIn("gfx/gfx.h", m) + self.assertIn("bar.h", m) + self.assertIn("mozilla/mozilla2.h", m) + self.assertIn("mozilla/dom/dom2.h", m) + self.assertIn("mozilla/dom/dom3.h", m) + # EXPORTS files that are also GENERATED_FILES should be handled as + # INSTALL_TARGETS. + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] + expected = [ + "include $(topsrcdir)/config/AB_rCD.mk", + "dist_include_FILES += bar.h", + "dist_include_DEST := $(DEPTH)/dist/include/", + "dist_include_TARGET := export", + "INSTALL_TARGETS += dist_include", + "dist_include_mozilla_FILES += mozilla2.h", + "dist_include_mozilla_DEST := $(DEPTH)/dist/include/mozilla", + "dist_include_mozilla_TARGET := export", + "INSTALL_TARGETS += dist_include_mozilla", + "dist_include_mozilla_dom_FILES += dom2.h", + "dist_include_mozilla_dom_FILES += dom3.h", + "dist_include_mozilla_dom_DEST := $(DEPTH)/dist/include/mozilla/dom", + "dist_include_mozilla_dom_TARGET := export", + "INSTALL_TARGETS += dist_include_mozilla_dom", + ] + self.maxDiff = None + self.assertEqual(lines, expected) + + def test_resources(self): + """Ensure RESOURCE_FILES is handled properly.""" + env = self._consume("resources", RecursiveMakeBackend) + + # RESOURCE_FILES should appear in the dist_bin install manifest. + m = InstallManifest( + path=os.path.join(env.topobjdir, "_build_manifests", "install", "dist_bin") + ) + self.assertEqual(len(m), 10) + self.assertIn("res/foo.res", m) + self.assertIn("res/fonts/font1.ttf", m) + self.assertIn("res/fonts/desktop/desktop2.ttf", m) + + self.assertIn("res/bar.res.in", m) + self.assertIn("res/tests/test.manifest", m) + self.assertIn("res/tests/extra.manifest", m) + + def test_test_manifests_files_written(self): + """Ensure test manifests get turned into files.""" + env = self._consume("test-manifests-written", RecursiveMakeBackend) + + tests_dir = mozpath.join(env.topobjdir, "_tests") + m_master = mozpath.join( + tests_dir, "testing", "mochitest", "tests", "mochitest.ini" + ) + x_master = mozpath.join(tests_dir, "xpcshell", "xpcshell.ini") + self.assertTrue(os.path.exists(m_master)) + self.assertTrue(os.path.exists(x_master)) + + lines = [l.strip() for l in open(x_master, "rt").readlines()] + self.assertEqual( + lines, + [ + "# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.", + "", + "[include:dir1/xpcshell.ini]", + "[include:xpcshell.ini]", + ], + ) + + def test_test_manifest_pattern_matches_recorded(self): + """Pattern matches in test manifests' support-files should be recorded.""" + env = self._consume("test-manifests-written", RecursiveMakeBackend) + m = InstallManifest( + path=mozpath.join( + env.topobjdir, "_build_manifests", "install", "_test_files" + ) + ) + + # This is not the most robust test in the world, but it gets the job + # done. + entries = [e for e in m._dests.keys() if "**" in e] + self.assertEqual(len(entries), 1) + self.assertIn("support/**", entries[0]) + + def test_test_manifest_deffered_installs_written(self): + """Shared support files are written to their own data file by the backend.""" + env = self._consume("test-manifest-shared-support", RecursiveMakeBackend) + + # First, read the generated for ini manifest contents. + test_files_manifest = mozpath.join( + env.topobjdir, "_build_manifests", "install", "_test_files" + ) + m = InstallManifest(path=test_files_manifest) + + # Then, synthesize one from the test-installs.pkl file. This should + # allow us to re-create a subset of the above. + env = self._consume("test-manifest-shared-support", TestManifestBackend) + test_installs_path = mozpath.join(env.topobjdir, "test-installs.pkl") + + with open(test_installs_path, "rb") as fh: + test_installs = pickle.load(fh) + + self.assertEqual( + set(test_installs.keys()), + set(["child/test_sub.js", "child/data/**", "child/another-file.sjs"]), + ) + for key in test_installs.keys(): + self.assertIn(key, test_installs) + + synthesized_manifest = InstallManifest() + for item, installs in test_installs.items(): + for install_info in installs: + if len(install_info) == 3: + synthesized_manifest.add_pattern_link(*install_info) + if len(install_info) == 2: + synthesized_manifest.add_link(*install_info) + + self.assertEqual(len(synthesized_manifest), 3) + for item, info in synthesized_manifest._dests.items(): + self.assertIn(item, m) + self.assertEqual(info, m._dests[item]) + + def test_xpidl_generation(self): + """Ensure xpidl files and directories are written out.""" + env = self._consume("xpidl", RecursiveMakeBackend) + + # Install manifests should contain entries. + install_dir = mozpath.join(env.topobjdir, "_build_manifests", "install") + self.assertTrue(os.path.isfile(mozpath.join(install_dir, "xpidl"))) + + m = InstallManifest(path=mozpath.join(install_dir, "xpidl")) + self.assertIn(".deps/my_module.pp", m) + + m = InstallManifest(path=mozpath.join(install_dir, "xpidl")) + self.assertIn("my_module.xpt", m) + + m = InstallManifest(path=mozpath.join(install_dir, "dist_include")) + self.assertIn("foo.h", m) + + p = mozpath.join(env.topobjdir, "config/makefiles/xpidl") + self.assertTrue(os.path.isdir(p)) + + self.assertTrue(os.path.isfile(mozpath.join(p, "Makefile"))) + + def test_test_support_files_tracked(self): + env = self._consume("test-support-binaries-tracked", RecursiveMakeBackend) + m = InstallManifest( + path=mozpath.join(env.topobjdir, "_build_manifests", "install", "_tests") + ) + self.assertEqual(len(m), 4) + self.assertIn("xpcshell/tests/mozbuildtest/test-library.dll", m) + self.assertIn("xpcshell/tests/mozbuildtest/test-one.exe", m) + self.assertIn("xpcshell/tests/mozbuildtest/test-two.exe", m) + self.assertIn("xpcshell/tests/mozbuildtest/host-test-library.dll", m) + + def test_old_install_manifest_deleted(self): + # Simulate an install manifest from a previous backend version. Ensure + # it is deleted. + env = self._get_environment("stub0") + purge_dir = mozpath.join(env.topobjdir, "_build_manifests", "install") + manifest_path = mozpath.join(purge_dir, "old_manifest") + os.makedirs(purge_dir) + m = InstallManifest() + m.write(path=manifest_path) + with open( + mozpath.join(env.topobjdir, "backend.RecursiveMakeBackend"), "w" + ) as f: + f.write("%s\n" % manifest_path) + + self.assertTrue(os.path.exists(manifest_path)) + self._consume("stub0", RecursiveMakeBackend, env) + self.assertFalse(os.path.exists(manifest_path)) + + def test_install_manifests_written(self): + env, objs = self._emit("stub0") + backend = RecursiveMakeBackend(env) + + m = InstallManifest() + backend._install_manifests["testing"] = m + m.add_link(__file__, "self") + backend.consume(objs) + + man_dir = mozpath.join(env.topobjdir, "_build_manifests", "install") + self.assertTrue(os.path.isdir(man_dir)) + + expected = ["testing"] + for e in expected: + full = mozpath.join(man_dir, e) + self.assertTrue(os.path.exists(full)) + + m2 = InstallManifest(path=full) + self.assertEqual(m, m2) + + def test_ipdl_sources(self): + """Test that PREPROCESSED_IPDL_SOURCES and IPDL_SOURCES are written to + ipdlsrcs.mk correctly.""" + env = self._get_environment("ipdl_sources") + + # Use the ipdl directory as the IPDL root for testing. + ipdl_root = mozpath.join(env.topobjdir, "ipdl") + + # Make substs writable so we can set the value of IPDL_ROOT to reflect + # the correct objdir. + env.substs = dict(env.substs) + env.substs["IPDL_ROOT"] = ipdl_root + + self._consume("ipdl_sources", RecursiveMakeBackend, env) + + manifest_path = mozpath.join(ipdl_root, "ipdlsrcs.mk") + lines = [l.strip() for l in open(manifest_path, "rt").readlines()] + + # Handle Windows paths correctly + topsrcdir = mozpath.normsep(env.topsrcdir) + + expected = [ + "ALL_IPDLSRCS := bar1.ipdl foo1.ipdl %s/bar/bar.ipdl %s/bar/bar2.ipdlh %s/foo/foo.ipdl %s/foo/foo2.ipdlh" # noqa + % tuple([topsrcdir] * 4), + "IPDLDIRS := %s %s/bar %s/foo" % (ipdl_root, topsrcdir, topsrcdir), + ] + + found = [str for str in lines if str.startswith(("ALL_IPDLSRCS", "IPDLDIRS"))] + self.assertEqual(found, expected) + + # Check that each directory declares the generated relevant .cpp files + # to be built in CPPSRCS. + # ENABLE_UNIFIED_BUILD defaults to False without mozilla-central's + # moz.configure so we don't see unified sources here. + for dir, expected in ( + (".", []), + ("ipdl", []), + ( + "bar", + [ + "CPPSRCS += " + + " ".join( + f"{ipdl_root}/{f}" + for f in [ + "bar.cpp", + "bar1.cpp", + "bar1Child.cpp", + "bar1Parent.cpp", + "bar2.cpp", + "barChild.cpp", + "barParent.cpp", + ] + ) + ], + ), + ( + "foo", + [ + "CPPSRCS += " + + " ".join( + f"{ipdl_root}/{f}" + for f in [ + "foo.cpp", + "foo1.cpp", + "foo1Child.cpp", + "foo1Parent.cpp", + "foo2.cpp", + "fooChild.cpp", + "fooParent.cpp", + ] + ) + ], + ), + ): + backend_path = mozpath.join(env.topobjdir, dir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()] + + found = [str for str in lines if str.startswith("CPPSRCS")] + self.assertEqual(found, expected) + + def test_defines(self): + """Test that DEFINES are written to backend.mk correctly.""" + env = self._consume("defines", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] + + var = "DEFINES" + defines = [val for val in lines if val.startswith(var)] + + expected = ["DEFINES += -DFOO '-DBAZ=\"ab'\\''cd\"' -UQUX -DBAR=7 -DVALUE=xyz"] + self.assertEqual(defines, expected) + + def test_local_includes(self): + """Test that LOCAL_INCLUDES are written to backend.mk correctly.""" + env = self._consume("local_includes", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] + + expected = [ + "LOCAL_INCLUDES += -I$(srcdir)/bar/baz", + "LOCAL_INCLUDES += -I$(srcdir)/foo", + ] + + found = [str for str in lines if str.startswith("LOCAL_INCLUDES")] + self.assertEqual(found, expected) + + def test_generated_includes(self): + """Test that GENERATED_INCLUDES are written to backend.mk correctly.""" + env = self._consume("generated_includes", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] + + expected = [ + "LOCAL_INCLUDES += -I$(CURDIR)/bar/baz", + "LOCAL_INCLUDES += -I$(CURDIR)/foo", + ] + + found = [str for str in lines if str.startswith("LOCAL_INCLUDES")] + self.assertEqual(found, expected) + + def test_rust_library(self): + """Test that a Rust library is written to backend.mk correctly.""" + env = self._consume("rust-library", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [ + l.strip() + for l in open(backend_path, "rt").readlines()[2:] + # Strip out computed flags, they're a PITA to test. + if not l.startswith("COMPUTED_") + ] + + expected = [ + "RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libtest_library.a" + % env.topobjdir, # noqa + "CARGO_FILE := $(srcdir)/Cargo.toml", + "CARGO_TARGET_DIR := %s" % env.topobjdir, + ] + + self.assertEqual(lines, expected) + + def test_host_rust_library(self): + """Test that a Rust library is written to backend.mk correctly.""" + env = self._consume("host-rust-library", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [ + l.strip() + for l in open(backend_path, "rt").readlines()[2:] + # Strip out computed flags, they're a PITA to test. + if not l.startswith("COMPUTED_") + ] + + expected = [ + "HOST_RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libhostrusttool.a" + % env.topobjdir, # noqa + "CARGO_FILE := $(srcdir)/Cargo.toml", + "CARGO_TARGET_DIR := %s" % env.topobjdir, + ] + + self.assertEqual(lines, expected) + + def test_host_rust_library_with_features(self): + """Test that a host Rust library with features is written to backend.mk correctly.""" + env = self._consume("host-rust-library-features", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [ + l.strip() + for l in open(backend_path, "rt").readlines()[2:] + # Strip out computed flags, they're a PITA to test. + if not l.startswith("COMPUTED_") + ] + + expected = [ + "HOST_RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libhostrusttool.a" + % env.topobjdir, # noqa + "CARGO_FILE := $(srcdir)/Cargo.toml", + "CARGO_TARGET_DIR := %s" % env.topobjdir, + "HOST_RUST_LIBRARY_FEATURES := musthave cantlivewithout", + ] + + self.assertEqual(lines, expected) + + def test_rust_library_with_features(self): + """Test that a Rust library with features is written to backend.mk correctly.""" + env = self._consume("rust-library-features", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [ + l.strip() + for l in open(backend_path, "rt").readlines()[2:] + # Strip out computed flags, they're a PITA to test. + if not l.startswith("COMPUTED_") + ] + + expected = [ + "RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libfeature_library.a" + % env.topobjdir, # noqa + "CARGO_FILE := $(srcdir)/Cargo.toml", + "CARGO_TARGET_DIR := %s" % env.topobjdir, + "RUST_LIBRARY_FEATURES := musthave cantlivewithout", + ] + + self.assertEqual(lines, expected) + + def test_rust_programs(self): + """Test that `{HOST_,}RUST_PROGRAMS` are written to backend.mk correctly.""" + env = self._consume("rust-programs", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "code/backend.mk") + lines = [ + l.strip() + for l in open(backend_path, "rt").readlines()[2:] + # Strip out computed flags, they're a PITA to test. + if not l.startswith("COMPUTED_") + ] + + expected = [ + "CARGO_FILE := %s/code/Cargo.toml" % env.topsrcdir, + "CARGO_TARGET_DIR := %s" % env.topobjdir, + "RUST_PROGRAMS += $(DEPTH)/i686-pc-windows-msvc/release/target.exe", + "RUST_CARGO_PROGRAMS += target", + "HOST_RUST_PROGRAMS += $(DEPTH)/i686-pc-windows-msvc/release/host.exe", + "HOST_RUST_CARGO_PROGRAMS += host", + ] + + self.assertEqual(lines, expected) + + root_deps_path = mozpath.join(env.topobjdir, "root-deps.mk") + lines = [l.strip() for l in open(root_deps_path, "rt").readlines()] + + self.assertTrue( + any(l == "recurse_compile: code/host code/target" for l in lines) + ) + + def test_final_target(self): + """Test that FINAL_TARGET is written to backend.mk correctly.""" + env = self._consume("final_target", RecursiveMakeBackend) + + final_target_rule = "FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)" # noqa + expected = dict() + expected[env.topobjdir] = [] + expected[mozpath.join(env.topobjdir, "both")] = [ + "XPI_NAME = mycrazyxpi", + "DIST_SUBDIR = asubdir", + final_target_rule, + ] + expected[mozpath.join(env.topobjdir, "dist-subdir")] = [ + "DIST_SUBDIR = asubdir", + final_target_rule, + ] + expected[mozpath.join(env.topobjdir, "xpi-name")] = [ + "XPI_NAME = mycrazyxpi", + final_target_rule, + ] + expected[mozpath.join(env.topobjdir, "final-target")] = [ + "FINAL_TARGET = $(DEPTH)/random-final-target" + ] + for key, expected_rules in six.iteritems(expected): + backend_path = mozpath.join(key, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] + found = [ + str + for str in lines + if str.startswith("FINAL_TARGET") + or str.startswith("XPI_NAME") + or str.startswith("DIST_SUBDIR") + ] + self.assertEqual(found, expected_rules) + + def test_final_target_pp_files(self): + """Test that FINAL_TARGET_PP_FILES is written to backend.mk correctly.""" + env = self._consume("dist-files", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] + + expected = [ + "DIST_FILES_0 += $(srcdir)/install.rdf", + "DIST_FILES_0 += $(srcdir)/main.js", + "DIST_FILES_0_PATH := $(DEPTH)/dist/bin/", + "DIST_FILES_0_TARGET := misc", + "PP_TARGETS += DIST_FILES_0", + ] + + found = [str for str in lines if "DIST_FILES" in str] + self.assertEqual(found, expected) + + def test_localized_files(self): + """Test that LOCALIZED_FILES is written to backend.mk correctly.""" + env = self._consume("localized-files", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] + + expected = [ + "LOCALIZED_FILES_0_FILES += $(wildcard $(LOCALE_SRCDIR)/abc/*.abc)", + "LOCALIZED_FILES_0_FILES += $(call MERGE_FILE,bar.ini)", + "LOCALIZED_FILES_0_FILES += $(call MERGE_FILE,foo.js)", + "LOCALIZED_FILES_0_DEST = $(FINAL_TARGET)/", + "LOCALIZED_FILES_0_TARGET := misc", + "INSTALL_TARGETS += LOCALIZED_FILES_0", + ] + + found = [str for str in lines if "LOCALIZED_FILES" in str] + self.assertEqual(found, expected) + + def test_localized_pp_files(self): + """Test that LOCALIZED_PP_FILES is written to backend.mk correctly.""" + env = self._consume("localized-pp-files", RecursiveMakeBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] + + expected = [ + "LOCALIZED_PP_FILES_0 += $(call MERGE_FILE,bar.ini)", + "LOCALIZED_PP_FILES_0 += $(call MERGE_FILE,foo.js)", + "LOCALIZED_PP_FILES_0_PATH = $(FINAL_TARGET)/", + "LOCALIZED_PP_FILES_0_TARGET := misc", + "LOCALIZED_PP_FILES_0_FLAGS := --silence-missing-directive-warnings", + "PP_TARGETS += LOCALIZED_PP_FILES_0", + ] + + found = [str for str in lines if "LOCALIZED_PP_FILES" in str] + self.assertEqual(found, expected) + + def test_config(self): + """Test that CONFIGURE_SUBST_FILES are properly handled.""" + env = self._consume("test_config", RecursiveMakeBackend) + + self.assertEqual( + open(os.path.join(env.topobjdir, "file"), "r").readlines(), + ["#ifdef foo\n", "bar baz\n", "@bar@\n"], + ) + + def test_prog_lib_c_only(self): + """Test that C-only binary artifacts are marked as such.""" + env = self._consume("prog-lib-c-only", RecursiveMakeBackend) + + # PROGRAM C-onlyness. + with open(os.path.join(env.topobjdir, "c-program", "backend.mk"), "r") as fh: + lines = fh.readlines() + lines = [line.rstrip() for line in lines] + + self.assertIn("PROG_IS_C_ONLY_c_test_program := 1", lines) + + with open(os.path.join(env.topobjdir, "cxx-program", "backend.mk"), "r") as fh: + lines = fh.readlines() + lines = [line.rstrip() for line in lines] + + # Test for only the absence of the variable, not the precise + # form of the variable assignment. + for line in lines: + self.assertNotIn("PROG_IS_C_ONLY_cxx_test_program", line) + + # SIMPLE_PROGRAMS C-onlyness. + with open( + os.path.join(env.topobjdir, "c-simple-programs", "backend.mk"), "r" + ) as fh: + lines = fh.readlines() + lines = [line.rstrip() for line in lines] + + self.assertIn("PROG_IS_C_ONLY_c_simple_program := 1", lines) + + with open( + os.path.join(env.topobjdir, "cxx-simple-programs", "backend.mk"), "r" + ) as fh: + lines = fh.readlines() + lines = [line.rstrip() for line in lines] + + for line in lines: + self.assertNotIn("PROG_IS_C_ONLY_cxx_simple_program", line) + + # Libraries C-onlyness. + with open(os.path.join(env.topobjdir, "c-library", "backend.mk"), "r") as fh: + lines = fh.readlines() + lines = [line.rstrip() for line in lines] + + self.assertIn("LIB_IS_C_ONLY := 1", lines) + + with open(os.path.join(env.topobjdir, "cxx-library", "backend.mk"), "r") as fh: + lines = fh.readlines() + lines = [line.rstrip() for line in lines] + + for line in lines: + self.assertNotIn("LIB_IS_C_ONLY", line) + + def test_linkage(self): + env = self._consume("linkage", RecursiveMakeBackend) + expected_linkage = { + "prog": { + "SHARED_LIBS": ["qux/qux.so", "../shared/baz.so"], + "STATIC_LIBS": ["../real/foo.a"], + "OS_LIBS": ["-lfoo", "-lbaz", "-lbar"], + }, + "shared": { + "OS_LIBS": ["-lfoo"], + "SHARED_LIBS": ["../prog/qux/qux.so"], + "STATIC_LIBS": [], + }, + "static": { + "STATIC_LIBS": ["../real/foo.a"], + "OS_LIBS": ["-lbar"], + "SHARED_LIBS": ["../prog/qux/qux.so"], + }, + "real": { + "STATIC_LIBS": [], + "SHARED_LIBS": ["../prog/qux/qux.so"], + "OS_LIBS": ["-lbaz"], + }, + } + actual_linkage = {} + for name in expected_linkage.keys(): + with open(os.path.join(env.topobjdir, name, "backend.mk"), "r") as fh: + actual_linkage[name] = [line.rstrip() for line in fh.readlines()] + for name in expected_linkage: + for var in expected_linkage[name]: + for val in expected_linkage[name][var]: + val = os.path.normpath(val) + line = "%s += %s" % (var, val) + self.assertIn(line, actual_linkage[name]) + actual_linkage[name].remove(line) + for line in actual_linkage[name]: + self.assertNotIn("%s +=" % var, line) + + def test_list_files(self): + env = self._consume("linkage", RecursiveMakeBackend) + expected_list_files = { + "prog/MyProgram_exe.list": [ + "../static/bar/bar1.o", + "../static/bar/bar2.o", + "../static/bar/bar_helper/bar_helper1.o", + ], + "shared/baz_so.list": ["baz/baz1.o"], + } + actual_list_files = {} + for name in expected_list_files.keys(): + with open(os.path.join(env.topobjdir, name), "r") as fh: + actual_list_files[name] = [line.rstrip() for line in fh.readlines()] + for name in expected_list_files: + self.assertEqual( + actual_list_files[name], + [os.path.normpath(f) for f in expected_list_files[name]], + ) + + # We don't produce a list file for a shared library composed only of + # object files in its directory, but instead list them in a variable. + with open(os.path.join(env.topobjdir, "prog", "qux", "backend.mk"), "r") as fh: + lines = [line.rstrip() for line in fh.readlines()] + + self.assertIn("qux.so_OBJS := qux1.o", lines) + + def test_jar_manifests(self): + env = self._consume("jar-manifests", RecursiveMakeBackend) + + with open(os.path.join(env.topobjdir, "backend.mk"), "r") as fh: + lines = fh.readlines() + + lines = [line.rstrip() for line in lines] + + self.assertIn("JAR_MANIFEST := %s/jar.mn" % env.topsrcdir, lines) + + def test_test_manifests_duplicate_support_files(self): + """Ensure duplicate support-files in test manifests work.""" + env = self._consume( + "test-manifests-duplicate-support-files", RecursiveMakeBackend + ) + + p = os.path.join(env.topobjdir, "_build_manifests", "install", "_test_files") + m = InstallManifest(p) + self.assertIn("testing/mochitest/tests/support-file.txt", m) + + def test_install_manifests_package_tests(self): + """Ensure test suites honor package_tests=False.""" + env = self._consume("test-manifests-package-tests", RecursiveMakeBackend) + + man_dir = mozpath.join(env.topobjdir, "_build_manifests", "install") + self.assertTrue(os.path.isdir(man_dir)) + + full = mozpath.join(man_dir, "_test_files") + self.assertTrue(os.path.exists(full)) + + m = InstallManifest(path=full) + + # Only mochitest.js should be in the install manifest. + self.assertTrue("testing/mochitest/tests/mochitest.js" in m) + + # The path is odd here because we do not normalize at test manifest + # processing time. This is a fragile test because there's currently no + # way to iterate the manifest. + self.assertFalse("instrumentation/./not_packaged.java" in m) + + def test_program_paths(self): + """PROGRAMs with various moz.build settings that change the destination should produce + the expected paths in backend.mk.""" + env = self._consume("program-paths", RecursiveMakeBackend) + + expected = [ + ("dist-bin", "$(DEPTH)/dist/bin/dist-bin.prog"), + ("dist-subdir", "$(DEPTH)/dist/bin/foo/dist-subdir.prog"), + ("final-target", "$(DEPTH)/final/target/final-target.prog"), + ("not-installed", "not-installed.prog"), + ] + prefix = "PROGRAM = " + for (subdir, expected_program) in expected: + with io.open(os.path.join(env.topobjdir, subdir, "backend.mk"), "r") as fh: + lines = fh.readlines() + program = [ + line.rstrip().split(prefix, 1)[1] + for line in lines + if line.startswith(prefix) + ][0] + self.assertEqual(program, expected_program) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/backend/test_test_manifest.py b/python/mozbuild/mozbuild/test/backend/test_test_manifest.py new file mode 100644 index 0000000000..fadf65e447 --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/test_test_manifest.py @@ -0,0 +1,94 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os + +import mozpack.path as mozpath +import six.moves.cPickle as pickle +from mozunit import main + +from mozbuild.backend.test_manifest import TestManifestBackend +from mozbuild.test.backend.common import BackendTester + + +class TestTestManifestBackend(BackendTester): + def test_all_tests_metadata_file_written(self): + """Ensure all-tests.pkl is generated.""" + env = self._consume("test-manifests-written", TestManifestBackend) + + all_tests_path = mozpath.join(env.topobjdir, "all-tests.pkl") + self.assertTrue(os.path.exists(all_tests_path)) + + with open(all_tests_path, "rb") as fh: + o = pickle.load(fh) + + self.assertIn("xpcshell.js", o) + self.assertIn("dir1/test_bar.js", o) + + self.assertEqual(len(o["xpcshell.js"]), 1) + + def test_test_installs_metadata_file_written(self): + """Ensure test-installs.pkl is generated.""" + env = self._consume("test-manifest-shared-support", TestManifestBackend) + all_tests_path = mozpath.join(env.topobjdir, "all-tests.pkl") + self.assertTrue(os.path.exists(all_tests_path)) + test_installs_path = mozpath.join(env.topobjdir, "test-installs.pkl") + + with open(test_installs_path, "rb") as fh: + test_installs = pickle.load(fh) + + self.assertEqual( + set(test_installs.keys()), + set(["child/test_sub.js", "child/data/**", "child/another-file.sjs"]), + ) + + for key in test_installs.keys(): + self.assertIn(key, test_installs) + + def test_test_defaults_metadata_file_written(self): + """Ensure test-defaults.pkl is generated.""" + env = self._consume("test-manifests-written", TestManifestBackend) + + test_defaults_path = mozpath.join(env.topobjdir, "test-defaults.pkl") + self.assertTrue(os.path.exists(test_defaults_path)) + + with open(test_defaults_path, "rb") as fh: + o = {mozpath.normpath(k): v for k, v in pickle.load(fh).items()} + + self.assertEqual( + set(mozpath.relpath(k, env.topsrcdir) for k in o.keys()), + set(["dir1/xpcshell.ini", "xpcshell.ini", "mochitest.ini"]), + ) + + manifest_path = mozpath.join(env.topsrcdir, "xpcshell.ini") + self.assertIn("here", o[manifest_path]) + self.assertIn("support-files", o[manifest_path]) + + def test_test_manifest_sources(self): + """Ensure that backend sources are generated correctly.""" + env = self._consume("test-manifests-backend-sources", TestManifestBackend) + + backend_path = mozpath.join(env.topobjdir, "backend.TestManifestBackend.in") + self.assertTrue(os.path.exists(backend_path)) + + status_path = mozpath.join(env.topobjdir, "config.status") + + with open(backend_path, "r") as fh: + sources = set(source.strip() for source in fh) + + self.assertEqual( + sources, + set( + [ + mozpath.join(env.topsrcdir, "mochitest.ini"), + mozpath.join(env.topsrcdir, "mochitest-common.ini"), + mozpath.join(env.topsrcdir, "moz.build"), + status_path, + ] + ), + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/backend/test_visualstudio.py b/python/mozbuild/mozbuild/test/backend/test_visualstudio.py new file mode 100644 index 0000000000..14cccb484b --- /dev/null +++ b/python/mozbuild/mozbuild/test/backend/test_visualstudio.py @@ -0,0 +1,63 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import unittest +from xml.dom.minidom import parse + +from mozunit import main + +from mozbuild.backend.visualstudio import VisualStudioBackend +from mozbuild.test.backend.common import BackendTester + + +class TestVisualStudioBackend(BackendTester): + @unittest.skip("Failing inconsistently in automation.") + def test_basic(self): + """Ensure we can consume our stub project.""" + + env = self._consume("visual-studio", VisualStudioBackend) + + msvc = os.path.join(env.topobjdir, "msvc") + self.assertTrue(os.path.isdir(msvc)) + + self.assertTrue(os.path.isfile(os.path.join(msvc, "mozilla.sln"))) + self.assertTrue(os.path.isfile(os.path.join(msvc, "mozilla.props"))) + self.assertTrue(os.path.isfile(os.path.join(msvc, "mach.bat"))) + self.assertTrue(os.path.isfile(os.path.join(msvc, "binary_my_app.vcxproj"))) + self.assertTrue(os.path.isfile(os.path.join(msvc, "target_full.vcxproj"))) + self.assertTrue(os.path.isfile(os.path.join(msvc, "library_dir1.vcxproj"))) + self.assertTrue(os.path.isfile(os.path.join(msvc, "library_dir1.vcxproj.user"))) + + d = parse(os.path.join(msvc, "library_dir1.vcxproj")) + self.assertEqual(d.documentElement.tagName, "Project") + els = d.getElementsByTagName("ClCompile") + self.assertEqual(len(els), 2) + + # mozilla-config.h should be explicitly listed as an include. + els = d.getElementsByTagName("NMakeForcedIncludes") + self.assertEqual(len(els), 1) + self.assertEqual( + els[0].firstChild.nodeValue, "$(TopObjDir)\\dist\\include\\mozilla-config.h" + ) + + # LOCAL_INCLUDES get added to the include search path. + els = d.getElementsByTagName("NMakeIncludeSearchPath") + self.assertEqual(len(els), 1) + includes = els[0].firstChild.nodeValue.split(";") + self.assertIn(os.path.normpath("$(TopSrcDir)/includeA/foo"), includes) + self.assertIn(os.path.normpath("$(TopSrcDir)/dir1"), includes) + self.assertIn(os.path.normpath("$(TopObjDir)/dir1"), includes) + self.assertIn(os.path.normpath("$(TopObjDir)\\dist\\include"), includes) + + # DEFINES get added to the project. + els = d.getElementsByTagName("NMakePreprocessorDefinitions") + self.assertEqual(len(els), 1) + defines = els[0].firstChild.nodeValue.split(";") + self.assertIn("DEFINEFOO", defines) + self.assertIn("DEFINEBAR=bar", defines) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/code_analysis/test_mach_commands.py b/python/mozbuild/mozbuild/test/code_analysis/test_mach_commands.py new file mode 100644 index 0000000000..774688c62f --- /dev/null +++ b/python/mozbuild/mozbuild/test/code_analysis/test_mach_commands.py @@ -0,0 +1,90 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +import os +import unittest +from unittest import mock + +import mozpack.path as mozpath +from mach.registrar import Registrar +from mozunit import main + +from mozbuild.base import MozbuildObject + + +class TestStaticAnalysis(unittest.TestCase): + def setUp(self): + self.remove_cats = [] + for cat in ("build", "post-build", "misc", "testing", "devenv"): + if cat in Registrar.categories: + continue + Registrar.register_category(cat, cat, cat) + self.remove_cats.append(cat) + + def tearDown(self): + for cat in self.remove_cats: + del Registrar.categories[cat] + del Registrar.commands_by_category[cat] + + def test_bug_1615884(self): + # TODO: cleaner test + # we're testing the `_is_ignored_path` but in an ideal + # world we should test the clang_analysis mach command + # since that small function is an internal detail. + # But there is zero test infra for that mach command + from mozbuild.code_analysis.mach_commands import _is_ignored_path + + config = MozbuildObject.from_environment() + context = mock.MagicMock() + context.cwd = config.topsrcdir + + command_context = mock.MagicMock() + command_context.topsrcdir = os.path.join("/root", "dir") + path = os.path.join("/root", "dir", "path1") + + ignored_dirs_re = r"path1|path2/here|path3\there" + self.assertTrue( + _is_ignored_path(command_context, ignored_dirs_re, path) is not None + ) + + # simulating a win32 env + win32_path = "\\root\\dir\\path1" + command_context.topsrcdir = "\\root\\dir" + old_sep = os.sep + os.sep = "\\" + try: + self.assertTrue( + _is_ignored_path(command_context, ignored_dirs_re, win32_path) + is not None + ) + finally: + os.sep = old_sep + + self.assertTrue( + _is_ignored_path(command_context, ignored_dirs_re, "path2") is None + ) + + def test_get_files(self): + from mozbuild.code_analysis.mach_commands import get_abspath_files + + config = MozbuildObject.from_environment() + context = mock.MagicMock() + context.cwd = config.topsrcdir + + command_context = mock.MagicMock() + command_context.topsrcdir = mozpath.join("/root", "dir") + source = get_abspath_files( + command_context, ["file1", mozpath.join("directory", "file2")] + ) + + self.assertTrue( + source + == [ + mozpath.join(command_context.topsrcdir, "file1"), + mozpath.join(command_context.topsrcdir, "directory", "file2"), + ] + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/codecoverage/sample_lcov.info b/python/mozbuild/mozbuild/test/codecoverage/sample_lcov.info new file mode 100644 index 0000000000..996ccac215 --- /dev/null +++ b/python/mozbuild/mozbuild/test/codecoverage/sample_lcov.info @@ -0,0 +1,1895 @@ +SF:lcov_test_newTab.js +FN:1,top-level +FN:31,top-level +FN:232,Transformation_rearrangeSites/ eval + ( + "resource://gre/modules/osfile/osfile_async_worker.js line 3 > eval", + None, + ), + # Path which ends with > Function + ( + "resource://gre/modules/osfile/osfile_async_worker.js line 3 > Function", + None, + ), + # Path which contains "->" + ( + "resource://gre/modules/addons/XPIProvider.jsm -> resource://gre/modules/osfile/osfile_async_worker.js", # noqa + ("toolkit/components/osfile/modules/osfile_async_worker.js", None), + ), + # Path with pp_info + ( + "resource://gre/modules/AppConstants.sys.mjs", + ( + "toolkit/modules/AppConstants.sys.mjs", + { + "101,102": ["toolkit/modules/AppConstants.sys.mjs", 135], + }, + ), + ), + # Path with query + ( + "resource://activity-stream/lib/PrefsFeed.jsm?q=0.9098419174803978", + ("browser/components/newtab/lib/PrefsFeed.jsm", None), + ), + ] + + url_finder = lcov_rewriter.UrlFinder(self._chrome_map_file, "", "dist/bin/", []) + for path, expected in paths: + self.assertEqual(url_finder.rewrite_url(path), expected) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/common.py b/python/mozbuild/mozbuild/test/common.py new file mode 100644 index 0000000000..47f04a8dd3 --- /dev/null +++ b/python/mozbuild/mozbuild/test/common.py @@ -0,0 +1,69 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import errno +import os +import shutil + +import mozpack.path as mozpath +from buildconfig import topsrcdir +from mach.logging import LoggingManager + +from mozbuild.util import ReadOnlyDict + +# By including this module, tests get structured logging. +log_manager = LoggingManager() +log_manager.add_terminal_logging() + + +def prepare_tmp_topsrcdir(path): + for p in ( + "build/autoconf/config.guess", + "build/autoconf/config.sub", + "build/moz.configure/checks.configure", + "build/moz.configure/init.configure", + "build/moz.configure/util.configure", + ): + file_path = os.path.join(path, p) + try: + os.makedirs(os.path.dirname(file_path)) + except OSError as e: + if e.errno != errno.EEXIST: + raise + shutil.copy(os.path.join(topsrcdir, p), file_path) + + +# mozconfig is not a reusable type (it's actually a module) so, we +# have to mock it. +class MockConfig(object): + def __init__( + self, + topsrcdir="/path/to/topsrcdir", + extra_substs={}, + error_is_fatal=True, + ): + self.topsrcdir = mozpath.abspath(topsrcdir) + self.topobjdir = mozpath.abspath("/path/to/topobjdir") + + self.substs = ReadOnlyDict( + { + "MOZ_FOO": "foo", + "MOZ_BAR": "bar", + "MOZ_TRUE": "1", + "MOZ_FALSE": "", + "DLL_PREFIX": "lib", + "DLL_SUFFIX": ".so", + }, + **extra_substs + ) + + self.defines = self.substs + + self.lib_prefix = "lib" + self.lib_suffix = ".a" + self.import_prefix = "lib" + self.import_suffix = ".so" + self.dll_prefix = "lib" + self.dll_suffix = ".so" + self.error_is_fatal = error_is_fatal diff --git a/python/mozbuild/mozbuild/test/compilation/__init__.py b/python/mozbuild/mozbuild/test/compilation/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/compilation/test_warnings.py b/python/mozbuild/mozbuild/test/compilation/test_warnings.py new file mode 100644 index 0000000000..1769e2e333 --- /dev/null +++ b/python/mozbuild/mozbuild/test/compilation/test_warnings.py @@ -0,0 +1,240 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import unittest + +from mozfile.mozfile import NamedTemporaryFile +from mozunit import main + +from mozbuild.compilation.warnings import ( + CompilerWarning, + WarningsCollector, + WarningsDatabase, +) + +CLANG_TESTS = [ + ( + "foobar.cpp:123:10: warning: you messed up [-Wfoo]", + "foobar.cpp", + 123, + 10, + "warning", + "you messed up", + "-Wfoo", + ), + ( + "c_locale_dummy.c:457:1: error: (near initialization for " + "'full_wmonthname[0]') [clang-diagnostic-error]", + "c_locale_dummy.c", + 457, + 1, + "error", + "(near initialization for 'full_wmonthname[0]')", + "clang-diagnostic-error", + ), +] + +CURRENT_LINE = 1 + + +def get_warning(): + global CURRENT_LINE + + w = CompilerWarning() + w["filename"] = "/foo/bar/baz.cpp" + w["line"] = CURRENT_LINE + w["column"] = 12 + w["message"] = "This is irrelevant" + + CURRENT_LINE += 1 + + return w + + +class TestCompilerWarning(unittest.TestCase): + def test_equivalence(self): + w1 = CompilerWarning() + w2 = CompilerWarning() + + s = set() + + # Empty warnings should be equal. + self.assertEqual(w1, w2) + + s.add(w1) + s.add(w2) + + self.assertEqual(len(s), 1) + + w1["filename"] = "/foo.c" + w2["filename"] = "/bar.c" + + self.assertNotEqual(w1, w2) + + s = set() + s.add(w1) + s.add(w2) + + self.assertEqual(len(s), 2) + + w1["filename"] = "/foo.c" + w1["line"] = 5 + w2["line"] = 5 + + w2["filename"] = "/foo.c" + w1["column"] = 3 + w2["column"] = 3 + + self.assertEqual(w1, w2) + + def test_comparison(self): + w1 = CompilerWarning() + w2 = CompilerWarning() + + w1["filename"] = "/aaa.c" + w1["line"] = 5 + w1["column"] = 5 + + w2["filename"] = "/bbb.c" + w2["line"] = 5 + w2["column"] = 5 + + self.assertLess(w1, w2) + self.assertGreater(w2, w1) + self.assertGreaterEqual(w2, w1) + + w2["filename"] = "/aaa.c" + w2["line"] = 4 + w2["column"] = 6 + + self.assertLess(w2, w1) + self.assertGreater(w1, w2) + self.assertGreaterEqual(w1, w2) + + w2["filename"] = "/aaa.c" + w2["line"] = 5 + w2["column"] = 10 + + self.assertLess(w1, w2) + self.assertGreater(w2, w1) + self.assertGreaterEqual(w2, w1) + + w2["filename"] = "/aaa.c" + w2["line"] = 5 + w2["column"] = 5 + + self.assertLessEqual(w1, w2) + self.assertLessEqual(w2, w1) + self.assertGreaterEqual(w2, w1) + self.assertGreaterEqual(w1, w2) + + +class TestWarningsAndErrorsParsing(unittest.TestCase): + def test_clang_parsing(self): + for source, filename, line, column, diag_type, message, flag in CLANG_TESTS: + collector = WarningsCollector(lambda w: None) + warning = collector.process_line(source) + + self.assertIsNotNone(warning) + + self.assertEqual(warning["filename"], filename) + self.assertEqual(warning["line"], line) + self.assertEqual(warning["column"], column) + self.assertEqual(warning["type"], diag_type) + self.assertEqual(warning["message"], message) + self.assertEqual(warning["flag"], flag) + + +class TestWarningsDatabase(unittest.TestCase): + def test_basic(self): + db = WarningsDatabase() + + self.assertEqual(len(db), 0) + + for i in range(10): + db.insert(get_warning(), compute_hash=False) + + self.assertEqual(len(db), 10) + + warnings = list(db) + self.assertEqual(len(warnings), 10) + + def test_hashing(self): + """Ensure that hashing files on insert works.""" + db = WarningsDatabase() + + temp = NamedTemporaryFile(mode="wt") + temp.write("x" * 100) + temp.flush() + + w = CompilerWarning() + w["filename"] = temp.name + w["line"] = 1 + w["column"] = 4 + w["message"] = "foo bar" + + # Should not throw. + db.insert(w) + + w["filename"] = "DOES_NOT_EXIST" + + with self.assertRaises(Exception): + db.insert(w) + + def test_pruning(self): + """Ensure old warnings are removed from database appropriately.""" + db = WarningsDatabase() + + source_files = [] + for i in range(1, 21): + temp = NamedTemporaryFile(mode="wt") + temp.write("x" * (100 * i)) + temp.flush() + + # Keep reference so it doesn't get GC'd and deleted. + source_files.append(temp) + + w = CompilerWarning() + w["filename"] = temp.name + w["line"] = 1 + w["column"] = i * 10 + w["message"] = "irrelevant" + + db.insert(w) + + self.assertEqual(len(db), 20) + + # If we change a source file, inserting a new warning should nuke the + # old one. + source_files[0].write("extra") + source_files[0].flush() + + w = CompilerWarning() + w["filename"] = source_files[0].name + w["line"] = 1 + w["column"] = 50 + w["message"] = "replaced" + + db.insert(w) + + self.assertEqual(len(db), 20) + + warnings = list(db.warnings_for_file(source_files[0].name)) + self.assertEqual(len(warnings), 1) + self.assertEqual(warnings[0]["column"], w["column"]) + + # If we delete the source file, calling prune should cause the warnings + # to go away. + old_filename = source_files[0].name + del source_files[0] + + self.assertFalse(os.path.exists(old_filename)) + + db.prune() + self.assertEqual(len(db), 19) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/configure/common.py b/python/mozbuild/mozbuild/test/configure/common.py new file mode 100644 index 0000000000..7dc1b85b22 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/common.py @@ -0,0 +1,307 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import copy +import errno +import os +import subprocess +import sys +import tempfile +import unittest + +import six +from buildconfig import topobjdir, topsrcdir +from mozpack import path as mozpath +from six import StringIO, string_types + +from mozbuild.configure import ConfigureSandbox +from mozbuild.util import ReadOnlyNamespace, memoized_property + + +def fake_short_path(path): + if sys.platform.startswith("win"): + return "/".join( + p.split(" ", 1)[0] + "~1" if " " in p else p for p in mozpath.split(path) + ) + return path + + +def ensure_exe_extension(path): + if sys.platform.startswith("win"): + return path + ".exe" + return path + + +class ConfigureTestVFS(object): + def __init__(self, paths): + self._paths = set(mozpath.abspath(p) for p in paths) + + def _real_file(self, path): + return mozpath.basedir(path, [topsrcdir, topobjdir, tempfile.gettempdir()]) + + def exists(self, path): + if path in self._paths: + return True + if self._real_file(path): + return os.path.exists(path) + return False + + def isfile(self, path): + path = mozpath.abspath(path) + if path in self._paths: + return True + if self._real_file(path): + return os.path.isfile(path) + return False + + def expanduser(self, path): + return os.path.expanduser(path) + + def isdir(self, path): + path = mozpath.abspath(path) + if any(mozpath.basedir(mozpath.dirname(p), [path]) for p in self._paths): + return True + if self._real_file(path): + return os.path.isdir(path) + return False + + def getsize(self, path): + if not self._real_file(path): + raise FileNotFoundError(path) + return os.path.getsize(path) + + +class ConfigureTestSandbox(ConfigureSandbox): + """Wrapper around the ConfigureSandbox for testing purposes. + + Its arguments are the same as ConfigureSandbox, except for the additional + `paths` argument, which is a dict where the keys are file paths and the + values are either None or a function that will be called when the sandbox + calls an implemented function from subprocess with the key as command. + When the command is CONFIG_SHELL, the function for the path of the script + that follows will be called. + + The API for those functions is: + retcode, stdout, stderr = func(stdin, args) + + This class is only meant to implement the minimal things to make + moz.configure testing possible. As such, it takes shortcuts. + """ + + def __init__(self, paths, config, environ, *args, **kwargs): + self._search_path = environ.get("PATH", "").split(os.pathsep) + + self._subprocess_paths = { + mozpath.abspath(k): v for k, v in six.iteritems(paths) if v + } + + paths = list(paths) + + environ = copy.copy(environ) + if "CONFIG_SHELL" not in environ: + environ["CONFIG_SHELL"] = mozpath.abspath("/bin/sh") + self._subprocess_paths[environ["CONFIG_SHELL"]] = self.shell + paths.append(environ["CONFIG_SHELL"]) + self._subprocess_paths[ + mozpath.join(topsrcdir, "build/win32/vswhere.exe") + ] = self.vswhere + + vfs = ConfigureTestVFS(paths) + + os_path = {k: getattr(vfs, k) for k in dir(vfs) if not k.startswith("_")} + + os_path.update(self.OS.path.__dict__) + + os_contents = {} + exec("from os import *", {}, os_contents) + os_contents["path"] = ReadOnlyNamespace(**os_path) + os_contents["environ"] = dict(environ) + self.imported_os = ReadOnlyNamespace(**os_contents) + + super(ConfigureTestSandbox, self).__init__(config, environ, *args, **kwargs) + + @memoized_property + def _wrapped_mozfile(self): + return ReadOnlyNamespace(which=self.which) + + @memoized_property + def _wrapped_os(self): + return self.imported_os + + @memoized_property + def _wrapped_subprocess(self): + return ReadOnlyNamespace( + CalledProcessError=subprocess.CalledProcessError, + check_output=self.check_output, + PIPE=subprocess.PIPE, + STDOUT=subprocess.STDOUT, + Popen=self.Popen, + ) + + @memoized_property + def _wrapped_ctypes(self): + class CTypesFunc(object): + def __init__(self, func): + self._func = func + + def __call__(self, *args, **kwargs): + return self._func(*args, **kwargs) + + return ReadOnlyNamespace( + create_unicode_buffer=self.create_unicode_buffer, + windll=ReadOnlyNamespace( + kernel32=ReadOnlyNamespace( + GetShortPathNameW=CTypesFunc(self.GetShortPathNameW) + ) + ), + wintypes=ReadOnlyNamespace(LPCWSTR=0, LPWSTR=1, DWORD=2), + ) + + @memoized_property + def _wrapped__winreg(self): + def OpenKey(*args, **kwargs): + raise WindowsError() + + return ReadOnlyNamespace(HKEY_LOCAL_MACHINE=0, OpenKey=OpenKey) + + def create_unicode_buffer(self, *args, **kwargs): + class Buffer(object): + def __init__(self): + self.value = "" + + return Buffer() + + def GetShortPathNameW(self, path_in, path_out, length): + path_out.value = fake_short_path(path_in) + return length + + def which(self, command, mode=None, path=None, exts=None): + if isinstance(path, string_types): + path = path.split(os.pathsep) + + for parent in path or self._search_path: + c = mozpath.abspath(mozpath.join(parent, command)) + for candidate in (c, ensure_exe_extension(c)): + if self.imported_os.path.exists(candidate): + return candidate + return None + + def Popen(self, args, stdin=None, stdout=None, stderr=None, **kargs): + program = self.which(args[0]) + if not program: + raise OSError(errno.ENOENT, "File not found") + + func = self._subprocess_paths.get(program) + retcode, stdout, stderr = func(stdin, args[1:]) + + class Process(object): + def communicate(self, stdin=None): + return stdout, stderr + + def wait(self): + return retcode + + return Process() + + def check_output(self, args, **kwargs): + proc = self.Popen(args, **kwargs) + stdout, stderr = proc.communicate() + retcode = proc.wait() + if retcode: + raise subprocess.CalledProcessError(retcode, args, stdout) + return stdout + + def shell(self, stdin, args): + script = mozpath.abspath(args[0]) + if script in self._subprocess_paths: + return self._subprocess_paths[script](stdin, args[1:]) + return 127, "", "File not found" + + def vswhere(self, stdin, args): + return 0, "[]", "" + + def get_config(self, name): + # Like the loop in ConfigureSandbox.run, but only execute the code + # associated with the given config item. + for func, args in self._execution_queue: + if ( + func == self._resolve_and_set + and args[0] is self._config + and args[1] == name + ): + func(*args) + return self._config.get(name) + + +class BaseConfigureTest(unittest.TestCase): + HOST = "x86_64-pc-linux-gnu" + + def setUp(self): + self._cwd = os.getcwd() + os.chdir(topobjdir) + + def tearDown(self): + os.chdir(self._cwd) + + def config_guess(self, stdin, args): + return 0, self.HOST, "" + + def config_sub(self, stdin, args): + return 0, args[0], "" + + def get_sandbox( + self, + paths, + config, + args=[], + environ={}, + mozconfig="", + out=None, + logger=None, + cls=ConfigureTestSandbox, + ): + kwargs = {} + if logger: + kwargs["logger"] = logger + else: + if not out: + out = StringIO() + kwargs["stdout"] = out + kwargs["stderr"] = out + + if hasattr(self, "TARGET"): + target = ["--target=%s" % self.TARGET] + else: + target = [] + + if mozconfig: + fh, mozconfig_path = tempfile.mkstemp(text=True) + os.write(fh, six.ensure_binary(mozconfig)) + os.close(fh) + else: + mozconfig_path = os.path.join( + os.path.dirname(__file__), "data", "empty_mozconfig" + ) + + try: + environ = dict( + environ, + OLD_CONFIGURE=os.path.join(topsrcdir, "old-configure"), + MOZCONFIG=mozconfig_path, + ) + + paths = dict(paths) + autoconf_dir = mozpath.join(topsrcdir, "build", "autoconf") + paths[mozpath.join(autoconf_dir, "config.guess")] = self.config_guess + paths[mozpath.join(autoconf_dir, "config.sub")] = self.config_sub + + sandbox = cls( + paths, config, environ, ["configure"] + target + args, **kwargs + ) + sandbox.include_file(os.path.join(topsrcdir, "moz.configure")) + + return sandbox + finally: + if mozconfig: + os.remove(mozconfig_path) diff --git a/python/mozbuild/mozbuild/test/configure/data/decorators.configure b/python/mozbuild/mozbuild/test/configure/data/decorators.configure new file mode 100644 index 0000000000..b98eb26f3f --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/data/decorators.configure @@ -0,0 +1,53 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +@template +def simple_decorator(func): + return func + + +@template +def wrapper_decorator(func): + def wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return wrapper + + +@template +def function_decorator(*args, **kwargs): + # We could return wrapper_decorator from above here, but then we wouldn't + # know if this works as expected because wrapper_decorator itself was + # modified or because the right thing happened here. + def wrapper_decorator(func): + def wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return wrapper + + return wrapper_decorator + + +@depends("--help") +@simple_decorator +def foo(help): + global FOO + FOO = 1 + + +@depends("--help") +@wrapper_decorator +def bar(help): + global BAR + BAR = 1 + + +@depends("--help") +@function_decorator("a", "b", "c") +def qux(help): + global QUX + QUX = 1 diff --git a/python/mozbuild/mozbuild/test/configure/data/empty_mozconfig b/python/mozbuild/mozbuild/test/configure/data/empty_mozconfig new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/configure/data/extra.configure b/python/mozbuild/mozbuild/test/configure/data/extra.configure new file mode 100644 index 0000000000..e54a93dbc3 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/data/extra.configure @@ -0,0 +1,15 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +option("--extra", help="Extra") + + +@depends("--extra") +def extra(extra): + return extra + + +set_config("EXTRA", extra) diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/imm.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/imm.configure new file mode 100644 index 0000000000..f20a4a7149 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/imm.configure @@ -0,0 +1,37 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +imply_option("--enable-foo", True) + +option("--enable-foo", help="enable foo") + + +@depends("--enable-foo", "--help") +def foo(value, help): + if value: + return True + + +imply_option("--enable-bar", ("foo", "bar")) + +option("--enable-bar", nargs="*", help="enable bar") + + +@depends("--enable-bar") +def bar(value): + if value: + return value + + +imply_option("--enable-baz", "BAZ") + +option("--enable-baz", nargs=1, help="enable baz") + + +@depends("--enable-baz") +def bar(value): + if value: + return value diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/infer.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/infer.configure new file mode 100644 index 0000000000..b73be9a720 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/infer.configure @@ -0,0 +1,28 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +option("--enable-foo", help="enable foo") + + +@depends("--enable-foo", "--help") +def foo(value, help): + if value: + return True + + +imply_option("--enable-bar", foo) + + +option("--enable-bar", help="enable bar") + + +@depends("--enable-bar") +def bar(value): + if value: + return value + + +set_config("BAR", bar) diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/infer_ko.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/infer_ko.configure new file mode 100644 index 0000000000..9b3761c3c3 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/infer_ko.configure @@ -0,0 +1,36 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +option("--enable-hoge", help="enable hoge") + + +@depends("--enable-hoge") +def hoge(value): + return value + + +option("--enable-foo", help="enable foo") + + +@depends("--enable-foo", hoge) +def foo(value, hoge): + if value: + return True + + +imply_option("--enable-bar", foo) + + +option("--enable-bar", help="enable bar") + + +@depends("--enable-bar") +def bar(value): + if value: + return value + + +set_config("BAR", bar) diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/negative.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/negative.configure new file mode 100644 index 0000000000..e953231f5e --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/negative.configure @@ -0,0 +1,40 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +option("--enable-foo", help="enable foo") + + +@depends("--enable-foo") +def foo(value): + if value: + return False + + +imply_option("--enable-bar", foo) + + +option("--disable-hoge", help="enable hoge") + + +@depends("--disable-hoge") +def hoge(value): + if not value: + return False + + +imply_option("--enable-bar", hoge) + + +option("--enable-bar", default=True, help="enable bar") + + +@depends("--enable-bar") +def bar(value): + if not value: + return value + + +set_config("BAR", bar) diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/simple.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/simple.configure new file mode 100644 index 0000000000..6aa225cc45 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/simple.configure @@ -0,0 +1,28 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +option("--enable-foo", help="enable foo") + + +@depends("--enable-foo") +def foo(value): + if value: + return True + + +imply_option("--enable-bar", foo) + + +option("--enable-bar", help="enable bar") + + +@depends("--enable-bar") +def bar(value): + if value: + return value + + +set_config("BAR", bar) diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/values.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/values.configure new file mode 100644 index 0000000000..93198a8295 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/values.configure @@ -0,0 +1,28 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +option("--enable-foo", nargs="*", help="enable foo") + + +@depends("--enable-foo") +def foo(value): + if value: + return value + + +imply_option("--enable-bar", foo) + + +option("--enable-bar", nargs="*", help="enable bar") + + +@depends("--enable-bar") +def bar(value): + if value: + return value + + +set_config("BAR", bar) diff --git a/python/mozbuild/mozbuild/test/configure/data/included.configure b/python/mozbuild/mozbuild/test/configure/data/included.configure new file mode 100644 index 0000000000..97166618ec --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/data/included.configure @@ -0,0 +1,68 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# For more complex and repetitive things, we can create templates +@template +def check_compiler_flag(flag): + @depends(is_gcc) + def check(value): + if value: + return [flag] + + set_config("CFLAGS", check) + return check + + +check_compiler_flag("-Werror=foobar") + +# Normal functions can be used in @depends functions. +def fortytwo(): + return 42 + + +def twentyone(): + yield 21 + + +@depends(is_gcc) +def check(value): + if value: + return fortytwo() + + +set_config("TEMPLATE_VALUE", check) + + +@depends(is_gcc) +def check(value): + if value: + for val in twentyone(): + return val + + +set_config("TEMPLATE_VALUE_2", check) + +# Normal functions can use @imports too to import modules. +@imports("sys") +def platform(): + return sys.platform + + +option("--enable-imports-in-template", help="Imports in template") + + +@depends("--enable-imports-in-template") +def check(value): + if value: + return platform() + + +set_config("PLATFORM", check) + + +@template +def indirectly_define_option(*args, **kwargs): + option(*args, **kwargs) diff --git a/python/mozbuild/mozbuild/test/configure/data/moz.configure b/python/mozbuild/mozbuild/test/configure/data/moz.configure new file mode 100644 index 0000000000..4d57eabbb9 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/data/moz.configure @@ -0,0 +1,205 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +option("--enable-simple", help="Enable simple") + +# Setting MOZ_WITH_ENV in the environment has the same effect as passing +# --enable-with-env. +option("--enable-with-env", env="MOZ_WITH_ENV", help="Enable with env") + +# Optional values +option("--enable-values", nargs="*", help="Enable values") + +# Everything supported in the Option class is supported in option(). Assume +# the tests of the Option class are extensive about this. + +# Alternatively to --enable/--disable, there also is --with/--without. The +# difference is semantic only. Behavior is the same as --enable/--disable. + +# When the option name starts with --disable/--without, the default is for +# the option to be enabled. +option("--without-thing", help="Build without thing") + +# A --enable/--with option with a default of False is equivalent to a +# --disable/--without option. This can be used to change the defaults +# depending on e.g. the target or the built application. +option("--with-stuff", default=False, help="Build with stuff") + +# Other kinds of arbitrary options are also allowed. This is effectively +# equivalent to --enable/--with, with no possibility of --disable/--without. +option("--option", env="MOZ_OPTION", help="Option") + +# It is also possible to pass options through the environment only. +option(env="CC", nargs=1, help="C Compiler") + +# Call the function when the --enable-simple option is processed, with its +# OptionValue as argument. +@depends("--enable-simple") +def simple(simple): + if simple: + return simple + + +set_config("ENABLED_SIMPLE", simple) + +# There can be multiple functions depending on the same option. +@depends("--enable-simple") +def simple(simple): + return simple + + +set_config("SIMPLE", simple) + + +@depends("--enable-with-env") +def with_env(with_env): + return with_env + + +set_config("WITH_ENV", with_env) + +# It doesn't matter if the dependency is on --enable or --disable +@depends("--disable-values") +def with_env2(values): + return values + + +set_config("VALUES", with_env2) + +# It is possible to @depends on environment-only options. +@depends("CC") +def is_gcc(cc): + return cc and "gcc" in cc[0] + + +set_config("IS_GCC", is_gcc) + +# It is possible to depend on the result from another function. +@depends(with_env2) +def with_env3(values): + return values + + +set_config("VALUES2", with_env3) + +# @depends functions can also return results for use as input to another +# @depends. +@depends(with_env3) +def with_env4(values): + return values + + +@depends(with_env4) +def with_env5(values): + return values + + +set_config("VALUES3", with_env5) + +# The result from @depends functions can also be used as input to options. +# The result must be returned, not implied. +@depends("--enable-simple") +def simple(simple): + return "simple" if simple else "not-simple" + + +option("--with-returned-default", default=simple, help="Returned default") + + +@depends("--with-returned-default") +def default(value): + return value + + +set_config("DEFAULTED", default) + + +@depends("--enable-values") +def choices(values): + if len(values): + return { + "alpha": ("a", "b", "c"), + "numeric": ("0", "1", "2"), + }.get(values[0]) + + +option("--returned-choices", choices=choices, help="Choices") + + +@depends("--returned-choices") +def returned_choices(values): + return values + + +set_config("CHOICES", returned_choices) + +# All options must be referenced by some @depends function. +# It is possible to depend on multiple options/functions +@depends("--without-thing", "--with-stuff", with_env4, "--option") +def remainder(*args): + return args + + +set_config("REMAINDER", remainder) + +# It is possible to include other files to extend the configuration script. +include("included.configure") + +# It is also possible for the include file path to come from the result of a +# @depends function. +option("--enable-include", nargs=1, help="Include") + + +@depends("--enable-include") +def include_path(path): + return path[0] if path else None + + +include(include_path) + +# Sandboxed functions can import from modules through the use of the @imports +# decorator. +# The order of the decorators matter: @imports needs to appear after other +# decorators. +option("--with-imports", nargs="?", help="Imports") + +# A limited set of functions from os.path are exposed by default. +@depends("--with-imports") +def with_imports(value): + if len(value): + return hasattr(os.path, "abspath") + + +set_config("HAS_ABSPATH", with_imports) + +# It is still possible to import the full set from os.path. +# It is also possible to cherry-pick builtins. +@depends("--with-imports") +@imports("os.path") +def with_imports(value): + if len(value): + return hasattr(os.path, "getatime") + + +set_config("HAS_GETATIME", with_imports) + + +@depends("--with-imports") +def with_imports(value): + if len(value): + return hasattr(os.path, "getatime") + + +set_config("HAS_GETATIME2", with_imports) + +# This option should be attributed to this file in the --help output even though +# included.configure is the actual file that defines the option. +indirectly_define_option("--indirect-option", help="Indirectly defined option") + + +@depends("--indirect-option") +def indirect_option(option): + return option diff --git a/python/mozbuild/mozbuild/test/configure/data/set_config.configure b/python/mozbuild/mozbuild/test/configure/data/set_config.configure new file mode 100644 index 0000000000..0ae5fef6d6 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/data/set_config.configure @@ -0,0 +1,51 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +option("--set-foo", help="set foo") + + +@depends("--set-foo") +def foo(value): + if value: + return True + + +set_config("FOO", foo) + + +option("--set-bar", help="set bar") + + +@depends("--set-bar") +def bar(value): + return bool(value) + + +set_config("BAR", bar) + + +option("--set-value", nargs=1, help="set value") + + +@depends("--set-value") +def set_value(value): + if value: + return value[0] + + +set_config("VALUE", set_value) + + +option("--set-name", nargs=1, help="set name") + + +@depends("--set-name") +def set_name(value): + if value: + return value[0] + + +set_config(set_name, True) diff --git a/python/mozbuild/mozbuild/test/configure/data/set_define.configure b/python/mozbuild/mozbuild/test/configure/data/set_define.configure new file mode 100644 index 0000000000..ce9a60d7f1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/data/set_define.configure @@ -0,0 +1,51 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +option("--set-foo", help="set foo") + + +@depends("--set-foo") +def foo(value): + if value: + return True + + +set_define("FOO", foo) + + +option("--set-bar", help="set bar") + + +@depends("--set-bar") +def bar(value): + return bool(value) + + +set_define("BAR", bar) + + +option("--set-value", nargs=1, help="set value") + + +@depends("--set-value") +def set_value(value): + if value: + return value[0] + + +set_define("VALUE", set_value) + + +option("--set-name", nargs=1, help="set name") + + +@depends("--set-name") +def set_name(value): + if value: + return value[0] + + +set_define(set_name, True) diff --git a/python/mozbuild/mozbuild/test/configure/data/subprocess.configure b/python/mozbuild/mozbuild/test/configure/data/subprocess.configure new file mode 100644 index 0000000000..3316fee087 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/data/subprocess.configure @@ -0,0 +1,24 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +@depends("--help") +@imports("codecs") +@imports(_from="mozbuild.configure.util", _import="getpreferredencoding") +@imports("os") +@imports(_from="__builtin__", _import="open") +def dies_when_logging(_): + test_file = "test.txt" + quote_char = "'" + if getpreferredencoding().lower() == "utf-8": + quote_char = "\u00B4" + try: + with open(test_file, "w+") as fh: + fh.write(quote_char) + out = check_cmd_output("cat", "test.txt") + log.info(out) + finally: + os.remove(test_file) diff --git a/python/mozbuild/mozbuild/test/configure/lint.py b/python/mozbuild/mozbuild/test/configure/lint.py new file mode 100644 index 0000000000..59d41da264 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/lint.py @@ -0,0 +1,62 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import unittest + +import six +from buildconfig import topobjdir, topsrcdir +from mozunit import main + +from mozbuild.configure.lint import LintSandbox + +test_path = os.path.abspath(__file__) + + +class LintMeta(type): + def __new__(mcs, name, bases, attrs): + def create_test(project, func): + def test(self): + return func(self, project) + + return test + + for project in ( + "browser", + "js", + "memory", + "mobile/android", + ): + attrs["test_%s" % project.replace("/", "_")] = create_test( + project, attrs["lint"] + ) + + return type.__new__(mcs, name, bases, attrs) + + +# We don't actually need python2 compat, but this makes flake8 happy. +@six.add_metaclass(LintMeta) +class Lint(unittest.TestCase): + def setUp(self): + self._curdir = os.getcwd() + os.chdir(topobjdir) + + def tearDown(self): + os.chdir(self._curdir) + + def lint(self, project): + sandbox = LintSandbox( + { + "OLD_CONFIGURE": os.path.join(topsrcdir, "old-configure"), + "MOZCONFIG": os.path.join( + os.path.dirname(test_path), "data", "empty_mozconfig" + ), + }, + ["configure", "--enable-project=%s" % project, "--help"], + ) + sandbox.run(os.path.join(topsrcdir, "moz.configure")) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/configure/macos_fake_sdk/SDKSettings.plist b/python/mozbuild/mozbuild/test/configure/macos_fake_sdk/SDKSettings.plist new file mode 100644 index 0000000000..f0d6e1949f --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/macos_fake_sdk/SDKSettings.plist @@ -0,0 +1,8 @@ + + + + + Version + 13.3 + + diff --git a/python/mozbuild/mozbuild/test/configure/test_bootstrap.py b/python/mozbuild/mozbuild/test/configure/test_bootstrap.py new file mode 100644 index 0000000000..eaa417d566 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/test_bootstrap.py @@ -0,0 +1,43 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from mozunit import main + +from common import BaseConfigureTest + + +class TestBootstrap(BaseConfigureTest): + def test_bootstrap(self): + def get_value_for(arg): + sandbox = self.get_sandbox({}, {}, [arg], {}) + return sandbox._value_for(sandbox["enable_bootstrap"]) + + self.assertEqual(None, get_value_for("--disable-bootstrap")) + + # With `--enable-bootstrap`, anything is bootstrappable + bootstrap = get_value_for("--enable-bootstrap") + self.assertTrue(bootstrap("foo")) + self.assertTrue(bootstrap("bar")) + + # With `--enable-bootstrap=foo,bar`, only foo and bar are bootstrappable + bootstrap = get_value_for("--enable-bootstrap=foo,bar") + self.assertTrue(bootstrap("foo")) + self.assertTrue(bootstrap("bar")) + self.assertFalse(bootstrap("qux")) + + # With `--enable-bootstrap=-foo`, anything is bootstrappable, except foo + bootstrap = get_value_for("--enable-bootstrap=-foo") + self.assertFalse(bootstrap("foo")) + self.assertTrue(bootstrap("bar")) + self.assertTrue(bootstrap("qux")) + + # Corner case. + bootstrap = get_value_for("--enable-bootstrap=-foo,foo,bar") + self.assertFalse(bootstrap("foo")) + self.assertTrue(bootstrap("bar")) + self.assertFalse(bootstrap("qux")) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/configure/test_checks_configure.py b/python/mozbuild/mozbuild/test/configure/test_checks_configure.py new file mode 100644 index 0000000000..53361ff199 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/test_checks_configure.py @@ -0,0 +1,1169 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import sys +import textwrap +import unittest + +from buildconfig import topsrcdir +from mozpack import path as mozpath +from mozunit import MockedOpen, main +from six import StringIO + +from common import ConfigureTestSandbox, ensure_exe_extension, fake_short_path +from mozbuild.configure import ConfigureError, ConfigureSandbox +from mozbuild.shellutil import quote as shell_quote +from mozbuild.util import exec_ + + +class TestChecksConfigure(unittest.TestCase): + def test_checking(self): + def make_test(to_exec): + def test(val, msg): + out = StringIO() + sandbox = ConfigureSandbox({}, stdout=out, stderr=out) + base_dir = os.path.join(topsrcdir, "build", "moz.configure") + sandbox.include_file(os.path.join(base_dir, "checks.configure")) + exec_(to_exec, sandbox) + sandbox["foo"](val) + self.assertEqual(out.getvalue(), msg) + + return test + + test = make_test( + textwrap.dedent( + """ + @checking('for a thing') + def foo(value): + return value + """ + ) + ) + test(True, "checking for a thing... yes\n") + test(False, "checking for a thing... no\n") + test(42, "checking for a thing... 42\n") + test("foo", "checking for a thing... foo\n") + data = ["foo", "bar"] + test(data, "checking for a thing... %r\n" % data) + + # When the function given to checking does nothing interesting, the + # behavior is not altered + test = make_test( + textwrap.dedent( + """ + @checking('for a thing', lambda x: x) + def foo(value): + return value + """ + ) + ) + test(True, "checking for a thing... yes\n") + test(False, "checking for a thing... no\n") + test(42, "checking for a thing... 42\n") + test("foo", "checking for a thing... foo\n") + data = ["foo", "bar"] + test(data, "checking for a thing... %r\n" % data) + + test = make_test( + textwrap.dedent( + """ + def munge(x): + if not x: + return 'not found' + if isinstance(x, (str, bool, int)): + return x + return ' '.join(x) + + @checking('for a thing', munge) + def foo(value): + return value + """ + ) + ) + test(True, "checking for a thing... yes\n") + test(False, "checking for a thing... not found\n") + test(42, "checking for a thing... 42\n") + test("foo", "checking for a thing... foo\n") + data = ["foo", "bar"] + test(data, "checking for a thing... foo bar\n") + + KNOWN_A = ensure_exe_extension(mozpath.abspath("/usr/bin/known-a")) + KNOWN_B = ensure_exe_extension(mozpath.abspath("/usr/local/bin/known-b")) + KNOWN_C = ensure_exe_extension(mozpath.abspath("/home/user/bin/known c")) + OTHER_A = ensure_exe_extension(mozpath.abspath("/lib/other/known-a")) + + def get_result( + self, + command="", + args=[], + environ={}, + prog="/bin/configure", + extra_paths=None, + includes=("util.configure", "checks.configure"), + ): + config = {} + out = StringIO() + paths = {self.KNOWN_A: None, self.KNOWN_B: None, self.KNOWN_C: None} + if extra_paths: + paths.update(extra_paths) + environ = dict(environ) + if "PATH" not in environ: + environ["PATH"] = os.pathsep.join(os.path.dirname(p) for p in paths) + paths[self.OTHER_A] = None + sandbox = ConfigureTestSandbox(paths, config, environ, [prog] + args, out, out) + base_dir = os.path.join(topsrcdir, "build", "moz.configure") + for f in includes: + sandbox.include_file(os.path.join(base_dir, f)) + + status = 0 + try: + exec_(command, sandbox) + sandbox.run() + except SystemExit as e: + status = e.code + + return config, out.getvalue(), status + + def test_check_prog(self): + config, out, status = self.get_result('check_prog("FOO", ("known-a",))') + self.assertEqual(status, 0) + self.assertEqual(config, {"FOO": self.KNOWN_A}) + self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A) + + config, out, status = self.get_result( + 'check_prog("FOO", ("unknown", "known-b", "known c"))' + ) + self.assertEqual(status, 0) + self.assertEqual(config, {"FOO": self.KNOWN_B}) + self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_B) + + config, out, status = self.get_result( + 'check_prog("FOO", ("unknown", "unknown-2", "known c"))' + ) + self.assertEqual(status, 0) + self.assertEqual(config, {"FOO": fake_short_path(self.KNOWN_C)}) + self.assertEqual( + out, "checking for foo... %s\n" % shell_quote(fake_short_path(self.KNOWN_C)) + ) + + config, out, status = self.get_result('check_prog("FOO", ("unknown",))') + self.assertEqual(status, 1) + self.assertEqual(config, {}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for foo... not found + DEBUG: foo: Looking for unknown + ERROR: Cannot find foo + """ + ), + ) + + config, out, status = self.get_result( + 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"))' + ) + self.assertEqual(status, 1) + self.assertEqual(config, {}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for foo... not found + DEBUG: foo: Looking for unknown + DEBUG: foo: Looking for unknown-2 + DEBUG: foo: Looking for 'unknown 3' + ERROR: Cannot find foo + """ + ), + ) + + config, out, status = self.get_result( + 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"), ' + "allow_missing=True)" + ) + self.assertEqual(status, 0) + self.assertEqual(config, {}) + self.assertEqual(out, "checking for foo... not found\n") + + @unittest.skipIf(not sys.platform.startswith("win"), "Windows-only test") + def test_check_prog_exe(self): + config, out, status = self.get_result( + 'check_prog("FOO", ("unknown", "known-b", "known c"))', ["FOO=known-a.exe"] + ) + self.assertEqual(status, 0) + self.assertEqual(config, {"FOO": self.KNOWN_A}) + self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A) + + config, out, status = self.get_result( + 'check_prog("FOO", ("unknown", "known-b", "known c"))', + ["FOO=%s" % os.path.splitext(self.KNOWN_A)[0]], + ) + self.assertEqual(status, 0) + self.assertEqual(config, {"FOO": self.KNOWN_A}) + self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A) + + def test_check_prog_with_args(self): + config, out, status = self.get_result( + 'check_prog("FOO", ("unknown", "known-b", "known c"))', ["FOO=known-a"] + ) + self.assertEqual(status, 0) + self.assertEqual(config, {"FOO": self.KNOWN_A}) + self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A) + + config, out, status = self.get_result( + 'check_prog("FOO", ("unknown", "known-b", "known c"))', + ["FOO=%s" % self.KNOWN_A], + ) + self.assertEqual(status, 0) + self.assertEqual(config, {"FOO": self.KNOWN_A}) + self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A) + + path = self.KNOWN_B.replace("known-b", "known-a") + config, out, status = self.get_result( + 'check_prog("FOO", ("unknown", "known-b", "known c"))', ["FOO=%s" % path] + ) + self.assertEqual(status, 1) + self.assertEqual(config, {}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for foo... not found + DEBUG: foo: Looking for %s + ERROR: Cannot find foo + """ + ) + % path, + ) + + config, out, status = self.get_result( + 'check_prog("FOO", ("unknown",))', ["FOO=known c"] + ) + self.assertEqual(status, 0) + self.assertEqual(config, {"FOO": fake_short_path(self.KNOWN_C)}) + self.assertEqual( + out, "checking for foo... %s\n" % shell_quote(fake_short_path(self.KNOWN_C)) + ) + + config, out, status = self.get_result( + 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"), ' + "allow_missing=True)", + ["FOO=unknown"], + ) + self.assertEqual(status, 1) + self.assertEqual(config, {}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for foo... not found + DEBUG: foo: Looking for unknown + ERROR: Cannot find foo + """ + ), + ) + + def test_check_prog_what(self): + config, out, status = self.get_result( + 'check_prog("CC", ("known-a",), what="the target C compiler")' + ) + self.assertEqual(status, 0) + self.assertEqual(config, {"CC": self.KNOWN_A}) + self.assertEqual( + out, "checking for the target C compiler... %s\n" % self.KNOWN_A + ) + + config, out, status = self.get_result( + 'check_prog("CC", ("unknown", "unknown-2", "unknown 3"),' + ' what="the target C compiler")' + ) + self.assertEqual(status, 1) + self.assertEqual(config, {}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for the target C compiler... not found + DEBUG: cc: Looking for unknown + DEBUG: cc: Looking for unknown-2 + DEBUG: cc: Looking for 'unknown 3' + ERROR: Cannot find the target C compiler + """ + ), + ) + + def test_check_prog_input(self): + config, out, status = self.get_result( + textwrap.dedent( + """ + option("--with-ccache", nargs=1, help="ccache") + check_prog("CCACHE", ("known-a",), input="--with-ccache") + """ + ), + ["--with-ccache=known-b"], + ) + self.assertEqual(status, 0) + self.assertEqual(config, {"CCACHE": self.KNOWN_B}) + self.assertEqual(out, "checking for ccache... %s\n" % self.KNOWN_B) + + script = textwrap.dedent( + """ + option(env="CC", nargs=1, help="compiler") + @depends("CC") + def compiler(value): + return value[0].split()[0] if value else None + check_prog("CC", ("known-a",), input=compiler) + """ + ) + config, out, status = self.get_result(script) + self.assertEqual(status, 0) + self.assertEqual(config, {"CC": self.KNOWN_A}) + self.assertEqual(out, "checking for cc... %s\n" % self.KNOWN_A) + + config, out, status = self.get_result(script, ["CC=known-b"]) + self.assertEqual(status, 0) + self.assertEqual(config, {"CC": self.KNOWN_B}) + self.assertEqual(out, "checking for cc... %s\n" % self.KNOWN_B) + + config, out, status = self.get_result(script, ["CC=known-b -m32"]) + self.assertEqual(status, 0) + self.assertEqual(config, {"CC": self.KNOWN_B}) + self.assertEqual(out, "checking for cc... %s\n" % self.KNOWN_B) + + def test_check_prog_progs(self): + config, out, status = self.get_result('check_prog("FOO", ())') + self.assertEqual(status, 0) + self.assertEqual(config, {}) + self.assertEqual(out, "") + + config, out, status = self.get_result('check_prog("FOO", ())', ["FOO=known-a"]) + self.assertEqual(status, 0) + self.assertEqual(config, {"FOO": self.KNOWN_A}) + self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A) + + script = textwrap.dedent( + """ + option(env="TARGET", nargs=1, default="linux", help="target") + @depends("TARGET") + def compiler(value): + if value: + if value[0] == "linux": + return ("gcc", "clang") + if value[0] == "winnt": + return ("cl", "clang-cl") + check_prog("CC", compiler) + """ + ) + config, out, status = self.get_result(script) + self.assertEqual(status, 1) + self.assertEqual(config, {}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for cc... not found + DEBUG: cc: Looking for gcc + DEBUG: cc: Looking for clang + ERROR: Cannot find cc + """ + ), + ) + + config, out, status = self.get_result(script, ["TARGET=linux"]) + self.assertEqual(status, 1) + self.assertEqual(config, {}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for cc... not found + DEBUG: cc: Looking for gcc + DEBUG: cc: Looking for clang + ERROR: Cannot find cc + """ + ), + ) + + config, out, status = self.get_result(script, ["TARGET=winnt"]) + self.assertEqual(status, 1) + self.assertEqual(config, {}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for cc... not found + DEBUG: cc: Looking for cl + DEBUG: cc: Looking for clang-cl + ERROR: Cannot find cc + """ + ), + ) + + config, out, status = self.get_result(script, ["TARGET=none"]) + self.assertEqual(status, 0) + self.assertEqual(config, {}) + self.assertEqual(out, "") + + config, out, status = self.get_result(script, ["TARGET=winnt", "CC=known-a"]) + self.assertEqual(status, 0) + self.assertEqual(config, {"CC": self.KNOWN_A}) + self.assertEqual(out, "checking for cc... %s\n" % self.KNOWN_A) + + config, out, status = self.get_result(script, ["TARGET=none", "CC=known-a"]) + self.assertEqual(status, 0) + self.assertEqual(config, {"CC": self.KNOWN_A}) + self.assertEqual(out, "checking for cc... %s\n" % self.KNOWN_A) + + def test_check_prog_configure_error(self): + with self.assertRaises(ConfigureError) as e: + self.get_result('check_prog("FOO", "foo")') + + self.assertEqual(str(e.exception), "progs must resolve to a list or tuple!") + + with self.assertRaises(ConfigureError) as e: + self.get_result( + 'foo = depends(when=True)(lambda: ("a", "b"))\n' + 'check_prog("FOO", ("known-a",), input=foo)' + ) + + self.assertEqual( + str(e.exception), + "input must resolve to a tuple or a list with a " + "single element, or a string", + ) + + with self.assertRaises(ConfigureError) as e: + self.get_result( + 'foo = depends(when=True)(lambda: {"a": "b"})\n' + 'check_prog("FOO", ("known-a",), input=foo)' + ) + + self.assertEqual( + str(e.exception), + "input must resolve to a tuple or a list with a " + "single element, or a string", + ) + + def test_check_prog_with_path(self): + config, out, status = self.get_result( + 'check_prog("A", ("known-a",), paths=["/some/path"])' + ) + self.assertEqual(status, 1) + self.assertEqual(config, {}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for a... not found + DEBUG: a: Looking for known-a + ERROR: Cannot find a + """ + ), + ) + + config, out, status = self.get_result( + 'check_prog("A", ("known-a",), paths=["%s"])' + % os.path.dirname(self.OTHER_A) + ) + self.assertEqual(status, 0) + self.assertEqual(config, {"A": self.OTHER_A}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for a... %s + """ + % self.OTHER_A + ), + ) + + dirs = map(mozpath.dirname, (self.OTHER_A, self.KNOWN_A)) + config, out, status = self.get_result( + textwrap.dedent( + """\ + check_prog("A", ("known-a",), paths=["%s"]) + """ + % os.pathsep.join(dirs) + ) + ) + self.assertEqual(status, 0) + self.assertEqual(config, {"A": self.OTHER_A}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for a... %s + """ + % self.OTHER_A + ), + ) + + dirs = map(mozpath.dirname, (self.KNOWN_A, self.KNOWN_B)) + config, out, status = self.get_result( + textwrap.dedent( + """\ + check_prog("A", ("known-a",), paths=["%s", "%s"]) + """ + % (os.pathsep.join(dirs), self.OTHER_A) + ) + ) + self.assertEqual(status, 0) + self.assertEqual(config, {"A": self.KNOWN_A}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for a... %s + """ + % self.KNOWN_A + ), + ) + + config, out, status = self.get_result( + 'check_prog("A", ("known-a",), paths="%s")' % os.path.dirname(self.OTHER_A) + ) + + self.assertEqual(status, 1) + self.assertEqual(config, {}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for a... """ # noqa # trailing whitespace... + """ + DEBUG: a: Looking for known-a + ERROR: Paths provided to find_program must be a list of strings, not %r + """ + % mozpath.dirname(self.OTHER_A) + ), + ) + + @unittest.skipIf( + not sys.platform.startswith("linux"), + "Linux-only test, assumes Java is located from a $PATH", + ) + def test_java_tool_checks_linux(self): + def run_configure_java( + mock_fs_paths, mock_java_home=None, mock_path=None, args=[] + ): + script = textwrap.dedent( + """\ + @depends('--help') + def host(_): + return namespace(os='unknown', kernel='unknown') + toolchains_base_dir = depends(when=True)(lambda: '/mozbuild') + include('%(topsrcdir)s/build/moz.configure/java.configure') + """ + % {"topsrcdir": topsrcdir} + ) + + # Don't let system JAVA_HOME influence the test + original_java_home = os.environ.pop("JAVA_HOME", None) + configure_environ = {} + + if mock_java_home: + os.environ["JAVA_HOME"] = mock_java_home + configure_environ["JAVA_HOME"] = mock_java_home + + if mock_path: + configure_environ["PATH"] = mock_path + + # * Even if the real file sysphabtem has a symlink at the mocked path, don't let + # realpath follow it, as it may influence the test. + # * When finding a binary, check the mock paths rather than the real filesystem. + # Note: Python doesn't allow the different "with" bits to be put in parenthesis, + # because then it thinks it's an un-with-able tuple. Additionally, if this is cleanly + # lined up with "\", black removes them and autoformats them to the block that is + # below. + result = self.get_result( + args=args, + command=script, + extra_paths=paths, + environ=configure_environ, + ) + + if original_java_home: + os.environ["JAVA_HOME"] = original_java_home + return result + + java = mozpath.abspath("/usr/bin/java") + javac = mozpath.abspath("/usr/bin/javac") + paths = {java: None, javac: None} + expected_error_message = ( + "ERROR: Could not locate Java at /mozbuild/jdk/jdk-17.0.7+7/bin, " + "please run ./mach bootstrap --no-system-changes\n" + ) + + config, out, status = run_configure_java(paths) + self.assertEqual(status, 1) + self.assertEqual(config, {}) + self.assertEqual(out, expected_error_message) + + # An alternative valid set of tools referred to by JAVA_HOME. + alt_java = mozpath.abspath("/usr/local/bin/java") + alt_javac = mozpath.abspath("/usr/local/bin/javac") + alt_java_home = mozpath.dirname(mozpath.dirname(alt_java)) + paths = {alt_java: None, alt_javac: None, java: None, javac: None} + + alt_path = mozpath.dirname(java) + config, out, status = run_configure_java(paths, alt_java_home, alt_path) + self.assertEqual(status, 1) + self.assertEqual(config, {}) + self.assertEqual(out, expected_error_message) + + # We can use --with-java-bin-path instead of JAVA_HOME to similar + # effect. + config, out, status = run_configure_java( + paths, + mock_path=mozpath.dirname(java), + args=["--with-java-bin-path=%s" % mozpath.dirname(alt_java)], + ) + self.assertEqual(status, 0) + self.assertEqual(config, {"JAVA": alt_java, "MOZ_JAVA_CODE_COVERAGE": False}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for java... %s + """ + % alt_java + ), + ) + + # If --with-java-bin-path and JAVA_HOME are both set, + # --with-java-bin-path takes precedence. + config, out, status = run_configure_java( + paths, + mock_java_home=mozpath.dirname(mozpath.dirname(java)), + mock_path=mozpath.dirname(java), + args=["--with-java-bin-path=%s" % mozpath.dirname(alt_java)], + ) + self.assertEqual(status, 0) + self.assertEqual(config, {"JAVA": alt_java, "MOZ_JAVA_CODE_COVERAGE": False}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for java... %s + """ + % alt_java + ), + ) + + # --enable-java-coverage should set MOZ_JAVA_CODE_COVERAGE. + alt_java_home = mozpath.dirname(mozpath.dirname(java)) + config, out, status = run_configure_java( + paths, + mock_java_home=alt_java_home, + mock_path=mozpath.dirname(java), + args=["--enable-java-coverage"], + ) + self.assertEqual(status, 1) + self.assertEqual(config, {}) + + # Any missing tool is fatal when these checks run. + paths = {} + config, out, status = run_configure_java( + mock_fs_paths={}, + mock_path=mozpath.dirname(java), + args=["--enable-java-coverage"], + ) + self.assertEqual(status, 1) + self.assertEqual(config, {}) + self.assertEqual(out, expected_error_message) + + def test_pkg_check_modules(self): + mock_pkg_config_version = "0.10.0" + mock_pkg_config_path = mozpath.abspath("/usr/bin/pkg-config") + + seen_flags = set() + + def mock_pkg_config(_, args): + if "--dont-define-prefix" in args: + args = list(args) + seen_flags.add(args.pop(args.index("--dont-define-prefix"))) + args = tuple(args) + if args[0:2] == ("--errors-to-stdout", "--print-errors"): + assert len(args) == 3 + package = args[2] + if package == "unknown": + return ( + 1, + "Package unknown was not found in the pkg-config search path.\n" + "Perhaps you should add the directory containing `unknown.pc'\n" + "to the PKG_CONFIG_PATH environment variable\n" + "No package 'unknown' found", + "", + ) + if package == "valid": + return 0, "", "" + if package == "new > 1.1": + return 1, "Requested 'new > 1.1' but version of new is 1.1", "" + if args[0] == "--cflags": + assert len(args) == 2 + return 0, "-I/usr/include/%s" % args[1], "" + if args[0] == "--libs": + assert len(args) == 2 + return 0, "-l%s" % args[1], "" + if args[0] == "--version": + return 0, mock_pkg_config_version, "" + if args[0] == "--about": + return 1, "Unknown option --about", "" + self.fail("Unexpected arguments to mock_pkg_config: %s" % (args,)) + + def mock_pkgconf(_, args): + if args[0] == "--shared": + seen_flags.add(args[0]) + args = args[1:] + if args[0] == "--about": + return 0, "pkgconf {}".format(mock_pkg_config_version), "" + return mock_pkg_config(_, args) + + def get_result(cmd, args=[], bootstrapped_sysroot=False, extra_paths=None): + return self.get_result( + textwrap.dedent( + """\ + option('--disable-compile-environment', help='compile env') + compile_environment = depends(when='--enable-compile-environment')(lambda: True) + toolchain_prefix = depends(when=True)(lambda: None) + target_multiarch_dir = depends(when=True)(lambda: None) + target_sysroot = depends(when=True)(lambda: %(sysroot)s) + target = depends(when=True)(lambda: None) + include('%(topsrcdir)s/build/moz.configure/util.configure') + include('%(topsrcdir)s/build/moz.configure/checks.configure') + # Skip bootstrapping. + @template + def check_prog(*args, **kwargs): + del kwargs["bootstrap"] + return check_prog(*args, **kwargs) + include('%(topsrcdir)s/build/moz.configure/pkg.configure') + """ + % { + "topsrcdir": topsrcdir, + "sysroot": "namespace(bootstrapped=True)" + if bootstrapped_sysroot + else "None", + } + ) + + cmd, + args=args, + extra_paths=extra_paths, + includes=(), + ) + + extra_paths = {mock_pkg_config_path: mock_pkg_config} + + config, output, status = get_result("pkg_check_modules('MOZ_VALID', 'valid')") + self.assertEqual(status, 1) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for pkg_config... not found + ERROR: *** The pkg-config script could not be found. Make sure it is + *** in your path, or set the PKG_CONFIG environment variable + *** to the full path to pkg-config. + """ + ), + ) + + for pkg_config, version, bootstrapped_sysroot, is_pkgconf in ( + (mock_pkg_config, "0.10.0", False, False), + (mock_pkg_config, "0.30.0", False, False), + (mock_pkg_config, "0.30.0", True, False), + (mock_pkgconf, "1.1.0", True, True), + (mock_pkgconf, "1.6.0", False, True), + (mock_pkgconf, "1.8.0", False, True), + (mock_pkgconf, "1.8.0", True, True), + ): + seen_flags = set() + mock_pkg_config_version = version + config, output, status = get_result( + "pkg_check_modules('MOZ_VALID', 'valid')", + bootstrapped_sysroot=bootstrapped_sysroot, + extra_paths={mock_pkg_config_path: pkg_config}, + ) + self.assertEqual(status, 0) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for pkg_config... %s + checking for pkg-config version... %s + checking whether pkg-config is pkgconf... %s + checking for valid... yes + checking MOZ_VALID_CFLAGS... -I/usr/include/valid + checking MOZ_VALID_LIBS... -lvalid + """ + % ( + mock_pkg_config_path, + mock_pkg_config_version, + "yes" if is_pkgconf else "no", + ) + ), + ) + self.assertEqual( + config, + { + "PKG_CONFIG": mock_pkg_config_path, + "MOZ_VALID_CFLAGS": ("-I/usr/include/valid",), + "MOZ_VALID_LIBS": ("-lvalid",), + }, + ) + if version == "1.8.0" and bootstrapped_sysroot: + self.assertEqual(seen_flags, set(["--shared", "--dont-define-prefix"])) + elif version == "1.8.0": + self.assertEqual(seen_flags, set(["--shared"])) + elif version in ("1.6.0", "0.30.0") and bootstrapped_sysroot: + self.assertEqual(seen_flags, set(["--dont-define-prefix"])) + else: + self.assertEqual(seen_flags, set()) + + config, output, status = get_result( + "pkg_check_modules('MOZ_UKNOWN', 'unknown')", extra_paths=extra_paths + ) + self.assertEqual(status, 1) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for pkg_config... %s + checking for pkg-config version... %s + checking whether pkg-config is pkgconf... no + checking for unknown... no + ERROR: Package unknown was not found in the pkg-config search path. + ERROR: Perhaps you should add the directory containing `unknown.pc' + ERROR: to the PKG_CONFIG_PATH environment variable + ERROR: No package 'unknown' found + """ + % (mock_pkg_config_path, mock_pkg_config_version) + ), + ) + self.assertEqual(config, {"PKG_CONFIG": mock_pkg_config_path}) + + config, output, status = get_result( + "pkg_check_modules('MOZ_NEW', 'new > 1.1')", extra_paths=extra_paths + ) + self.assertEqual(status, 1) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for pkg_config... %s + checking for pkg-config version... %s + checking whether pkg-config is pkgconf... no + checking for new > 1.1... no + ERROR: Requested 'new > 1.1' but version of new is 1.1 + """ + % (mock_pkg_config_path, mock_pkg_config_version) + ), + ) + self.assertEqual(config, {"PKG_CONFIG": mock_pkg_config_path}) + + # allow_missing makes missing packages non-fatal. + cmd = textwrap.dedent( + """\ + have_new_module = pkg_check_modules('MOZ_NEW', 'new > 1.1', allow_missing=True) + @depends(have_new_module) + def log_new_module_error(mod): + if mod is not True: + log.info('Module not found.') + """ + ) + + config, output, status = get_result(cmd, extra_paths=extra_paths) + self.assertEqual(status, 0) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for pkg_config... %s + checking for pkg-config version... %s + checking whether pkg-config is pkgconf... no + checking for new > 1.1... no + WARNING: Requested 'new > 1.1' but version of new is 1.1 + Module not found. + """ + % (mock_pkg_config_path, mock_pkg_config_version) + ), + ) + self.assertEqual(config, {"PKG_CONFIG": mock_pkg_config_path}) + + config, output, status = get_result( + cmd, args=["--disable-compile-environment"], extra_paths=extra_paths + ) + self.assertEqual(status, 0) + self.assertEqual(output, "Module not found.\n") + self.assertEqual(config, {}) + + def mock_old_pkg_config(_, args): + if args[0] == "--version": + return 0, "0.8.10", "" + if args[0] == "--about": + return 1, "Unknown option --about", "" + self.fail("Unexpected arguments to mock_old_pkg_config: %s" % args) + + extra_paths = {mock_pkg_config_path: mock_old_pkg_config} + + config, output, status = get_result( + "pkg_check_modules('MOZ_VALID', 'valid')", extra_paths=extra_paths + ) + self.assertEqual(status, 1) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for pkg_config... %s + checking for pkg-config version... 0.8.10 + checking whether pkg-config is pkgconf... no + ERROR: *** Your version of pkg-config is too old. You need version 0.9.0 or newer. + """ + % mock_pkg_config_path + ), + ) + + def test_simple_keyfile(self): + includes = ("util.configure", "checks.configure", "keyfiles.configure") + + config, output, status = self.get_result( + "simple_keyfile('Mozilla API')", includes=includes + ) + self.assertEqual(status, 0) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for the Mozilla API key... no + """ + ), + ) + self.assertEqual(config, {"MOZ_MOZILLA_API_KEY": "no-mozilla-api-key"}) + + config, output, status = self.get_result( + "simple_keyfile('Mozilla API')", + args=["--with-mozilla-api-keyfile=/foo/bar/does/not/exist"], + includes=includes, + ) + self.assertEqual(status, 1) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for the Mozilla API key... no + ERROR: '/foo/bar/does/not/exist': No such file or directory. + """ + ), + ) + self.assertEqual(config, {}) + + with MockedOpen({"key": ""}): + config, output, status = self.get_result( + "simple_keyfile('Mozilla API')", + args=["--with-mozilla-api-keyfile=key"], + includes=includes, + ) + self.assertEqual(status, 1) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for the Mozilla API key... no + ERROR: 'key' is empty. + """ + ), + ) + self.assertEqual(config, {}) + + with MockedOpen({"key": "fake-key\n"}): + config, output, status = self.get_result( + "simple_keyfile('Mozilla API')", + args=["--with-mozilla-api-keyfile=key"], + includes=includes, + ) + self.assertEqual(status, 0) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for the Mozilla API key... yes + """ + ), + ) + self.assertEqual(config, {"MOZ_MOZILLA_API_KEY": "fake-key"}) + + with MockedOpen({"default": "default-key\n"}): + config, output, status = self.get_result( + "simple_keyfile('Mozilla API', default='default')", includes=includes + ) + self.assertEqual(status, 0) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for the Mozilla API key... yes + """ + ), + ) + self.assertEqual(config, {"MOZ_MOZILLA_API_KEY": "default-key"}) + + with MockedOpen({"default": "default-key\n", "key": "fake-key\n"}): + config, output, status = self.get_result( + "simple_keyfile('Mozilla API', default='key')", includes=includes + ) + self.assertEqual(status, 0) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for the Mozilla API key... yes + """ + ), + ) + self.assertEqual(config, {"MOZ_MOZILLA_API_KEY": "fake-key"}) + + def test_id_and_secret_keyfile(self): + includes = ("util.configure", "checks.configure", "keyfiles.configure") + + config, output, status = self.get_result( + "id_and_secret_keyfile('Bing API')", includes=includes + ) + self.assertEqual(status, 0) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for the Bing API key... no + """ + ), + ) + self.assertEqual( + config, + { + "MOZ_BING_API_CLIENTID": "no-bing-api-clientid", + "MOZ_BING_API_KEY": "no-bing-api-key", + }, + ) + + config, output, status = self.get_result( + "id_and_secret_keyfile('Bing API')", + args=["--with-bing-api-keyfile=/foo/bar/does/not/exist"], + includes=includes, + ) + self.assertEqual(status, 1) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for the Bing API key... no + ERROR: '/foo/bar/does/not/exist': No such file or directory. + """ + ), + ) + self.assertEqual(config, {}) + + with MockedOpen({"key": ""}): + config, output, status = self.get_result( + "id_and_secret_keyfile('Bing API')", + args=["--with-bing-api-keyfile=key"], + includes=includes, + ) + self.assertEqual(status, 1) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for the Bing API key... no + ERROR: 'key' is empty. + """ + ), + ) + self.assertEqual(config, {}) + + with MockedOpen({"key": "fake-id fake-key\n"}): + config, output, status = self.get_result( + "id_and_secret_keyfile('Bing API')", + args=["--with-bing-api-keyfile=key"], + includes=includes, + ) + self.assertEqual(status, 0) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for the Bing API key... yes + """ + ), + ) + self.assertEqual( + config, + {"MOZ_BING_API_CLIENTID": "fake-id", "MOZ_BING_API_KEY": "fake-key"}, + ) + + with MockedOpen({"key": "fake-key\n"}): + config, output, status = self.get_result( + "id_and_secret_keyfile('Bing API')", + args=["--with-bing-api-keyfile=key"], + includes=includes, + ) + self.assertEqual(status, 1) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for the Bing API key... no + ERROR: Bing API key file has an invalid format. + """ + ), + ) + self.assertEqual(config, {}) + + with MockedOpen({"default-key": "default-id default-key\n"}): + config, output, status = self.get_result( + "id_and_secret_keyfile('Bing API', default='default-key')", + includes=includes, + ) + self.assertEqual(status, 0) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for the Bing API key... yes + """ + ), + ) + self.assertEqual( + config, + { + "MOZ_BING_API_CLIENTID": "default-id", + "MOZ_BING_API_KEY": "default-key", + }, + ) + + with MockedOpen( + {"default-key": "default-id default-key\n", "key": "fake-id fake-key\n"} + ): + config, output, status = self.get_result( + "id_and_secret_keyfile('Bing API', default='default-key')", + args=["--with-bing-api-keyfile=key"], + includes=includes, + ) + self.assertEqual(status, 0) + self.assertEqual( + output, + textwrap.dedent( + """\ + checking for the Bing API key... yes + """ + ), + ) + self.assertEqual( + config, + {"MOZ_BING_API_CLIENTID": "fake-id", "MOZ_BING_API_KEY": "fake-key"}, + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/configure/test_compile_checks.py b/python/mozbuild/mozbuild/test/configure/test_compile_checks.py new file mode 100644 index 0000000000..37988d535f --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/test_compile_checks.py @@ -0,0 +1,599 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import textwrap +import unittest + +import mozpack.path as mozpath +from buildconfig import topsrcdir +from mozunit import main +from six import StringIO +from test_toolchain_helpers import FakeCompiler + +from common import ConfigureTestSandbox +from mozbuild.util import exec_ + + +class BaseCompileChecks(unittest.TestCase): + def get_mock_compiler(self, expected_test_content=None, expected_flags=None): + expected_flags = expected_flags or [] + + def mock_compiler(stdin, args): + if args != ["--version"]: + test_file = [a for a in args if not a.startswith("-")] + self.assertEqual(len(test_file), 1) + test_file = test_file[0] + args = [a for a in args if a.startswith("-")] + self.assertIn("-c", args) + for flag in expected_flags: + self.assertIn(flag, args) + + if expected_test_content: + with open(test_file) as fh: + test_content = fh.read() + self.assertEqual(test_content, expected_test_content) + + return FakeCompiler()(None, args) + + return mock_compiler + + def do_compile_test(self, command, expected_test_content=None, expected_flags=None): + + paths = { + os.path.abspath("/usr/bin/mockcc"): self.get_mock_compiler( + expected_test_content=expected_test_content, + expected_flags=expected_flags, + ), + } + + base_dir = os.path.join(topsrcdir, "build", "moz.configure") + + mock_compiler_defs = textwrap.dedent( + """\ + @depends(when=True) + def extra_toolchain_flags(): + return [] + + @depends(when=True) + def linker_ldflags(): + return [] + + target = depends(when=True)(lambda: True) + + @depends(when=True) + def configure_cache(): + + class ConfigureCache(dict): + pass + + cache_data = {} + + cache = ConfigureCache(cache_data) + cache.version_checked_compilers = set() + + return cache + + include('%s/compilers-util.configure') + + @template + def wrap_compiler(compiler): + return compiler_class(compiler, False) + + @wrap_compiler + @depends(when=True) + def c_compiler(): + return namespace( + flags=[], + type='gcc', + compiler=os.path.abspath('/usr/bin/mockcc'), + wrapper=[], + language='C', + ) + + @wrap_compiler + @depends(when=True) + def host_c_compiler(): + return namespace( + flags=[], + type='gcc', + compiler=os.path.abspath('/usr/bin/mockcc'), + wrapper=[], + language='C', + ) + + @wrap_compiler + @depends(when=True) + def cxx_compiler(): + return namespace( + flags=[], + type='gcc', + compiler=os.path.abspath('/usr/bin/mockcc'), + wrapper=[], + language='C++', + ) + + @wrap_compiler + @depends(when=True) + def host_cxx_compiler(): + return namespace( + flags=[], + type='gcc', + compiler=os.path.abspath('/usr/bin/mockcc'), + wrapper=[], + language='C++', + ) + """ + % mozpath.normsep(base_dir) + ) + + config = {} + out = StringIO() + sandbox = ConfigureTestSandbox(paths, config, {}, ["/bin/configure"], out, out) + sandbox.include_file(os.path.join(base_dir, "util.configure")) + sandbox.include_file(os.path.join(base_dir, "checks.configure")) + exec_(mock_compiler_defs, sandbox) + sandbox.include_file(os.path.join(base_dir, "compile-checks.configure")) + + status = 0 + try: + exec_(command, sandbox) + sandbox.run() + except SystemExit as e: + status = e.code + + return config, out.getvalue(), status + + +class TestHeaderChecks(BaseCompileChecks): + def test_try_compile_include(self): + expected_test_content = textwrap.dedent( + """\ + #include + #include + int + main(void) + { + + ; + return 0; + } + """ + ) + + cmd = textwrap.dedent( + """\ + try_compile(['foo.h', 'bar.h'], language='C') + """ + ) + + config, out, status = self.do_compile_test(cmd, expected_test_content) + self.assertEqual(status, 0) + self.assertEqual(config, {}) + + def test_try_compile_flags(self): + expected_flags = ["--extra", "--flags"] + + cmd = textwrap.dedent( + """\ + try_compile(language='C++', flags=['--flags', '--extra']) + """ + ) + + config, out, status = self.do_compile_test(cmd, expected_flags=expected_flags) + self.assertEqual(status, 0) + self.assertEqual(config, {}) + + def test_try_compile_failure(self): + cmd = textwrap.dedent( + """\ + have_fn = try_compile(body='somefn();', flags=['-funknown-flag']) + set_config('HAVE_SOMEFN', have_fn) + + have_another = try_compile(body='anotherfn();', language='C') + set_config('HAVE_ANOTHERFN', have_another) + """ + ) + + config, out, status = self.do_compile_test(cmd) + self.assertEqual(status, 0) + self.assertEqual( + config, + { + "HAVE_ANOTHERFN": True, + }, + ) + + def test_try_compile_msg(self): + cmd = textwrap.dedent( + """\ + known_flag = try_compile(language='C++', flags=['-fknown-flag'], + check_msg='whether -fknown-flag works') + set_config('HAVE_KNOWN_FLAG', known_flag) + """ + ) + config, out, status = self.do_compile_test(cmd) + self.assertEqual(status, 0) + self.assertEqual(config, {"HAVE_KNOWN_FLAG": True}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking whether -fknown-flag works... yes + """ + ), + ) + + def test_check_header(self): + expected_test_content = textwrap.dedent( + """\ + #include + int + main(void) + { + + ; + return 0; + } + """ + ) + + cmd = textwrap.dedent( + """\ + check_header('foo.h') + """ + ) + + config, out, status = self.do_compile_test( + cmd, expected_test_content=expected_test_content + ) + self.assertEqual(status, 0) + self.assertEqual(config, {"DEFINES": {"HAVE_FOO_H": True}}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for foo.h... yes + """ + ), + ) + + def test_check_header_conditional(self): + cmd = textwrap.dedent( + """\ + check_headers('foo.h', 'bar.h', when=never) + """ + ) + + config, out, status = self.do_compile_test(cmd) + self.assertEqual(status, 0) + self.assertEqual(out, "") + self.assertEqual(config, {"DEFINES": {}}) + + def test_check_header_include(self): + expected_test_content = textwrap.dedent( + """\ + #include + #include + #include + int + main(void) + { + + ; + return 0; + } + """ + ) + + cmd = textwrap.dedent( + """\ + have_foo = check_header('foo.h', includes=['std.h', 'bar.h']) + set_config('HAVE_FOO_H', have_foo) + """ + ) + + config, out, status = self.do_compile_test( + cmd, expected_test_content=expected_test_content + ) + + self.assertEqual(status, 0) + self.assertEqual( + config, + { + "HAVE_FOO_H": True, + "DEFINES": { + "HAVE_FOO_H": True, + }, + }, + ) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for foo.h... yes + """ + ), + ) + + def test_check_headers_multiple(self): + cmd = textwrap.dedent( + """\ + baz_bar, quux_bar = check_headers('baz/foo-bar.h', 'baz-quux/foo-bar.h') + set_config('HAVE_BAZ_BAR', baz_bar) + set_config('HAVE_QUUX_BAR', quux_bar) + """ + ) + + config, out, status = self.do_compile_test(cmd) + self.assertEqual(status, 0) + self.assertEqual( + config, + { + "HAVE_BAZ_BAR": True, + "HAVE_QUUX_BAR": True, + "DEFINES": { + "HAVE_BAZ_FOO_BAR_H": True, + "HAVE_BAZ_QUUX_FOO_BAR_H": True, + }, + }, + ) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for baz/foo-bar.h... yes + checking for baz-quux/foo-bar.h... yes + """ + ), + ) + + def test_check_headers_not_found(self): + + cmd = textwrap.dedent( + """\ + baz_bar, quux_bar = check_headers('baz/foo-bar.h', 'baz-quux/foo-bar.h', + flags=['-funknown-flag']) + set_config('HAVE_BAZ_BAR', baz_bar) + set_config('HAVE_QUUX_BAR', quux_bar) + """ + ) + + config, out, status = self.do_compile_test(cmd) + self.assertEqual(status, 0) + self.assertEqual(config, {"DEFINES": {}}) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking for baz/foo-bar.h... no + checking for baz-quux/foo-bar.h... no + """ + ), + ) + + +class TestWarningChecks(BaseCompileChecks): + def get_warnings(self): + return textwrap.dedent( + """\ + set_config('_WARNINGS_CFLAGS', warnings_flags.cflags) + set_config('_WARNINGS_CXXFLAGS', warnings_flags.cxxflags) + """ + ) + + def test_check_and_add_warning(self): + for flag, expected_flags in ( + ("-Wfoo", ["-Werror", "-Wfoo"]), + ("-Wno-foo", ["-Werror", "-Wfoo"]), + ("-Werror=foo", ["-Werror=foo"]), + ("-Wno-error=foo", ["-Wno-error=foo"]), + ): + cmd = ( + textwrap.dedent( + """\ + check_and_add_warning('%s') + """ + % flag + ) + + self.get_warnings() + ) + + config, out, status = self.do_compile_test( + cmd, expected_flags=expected_flags + ) + self.assertEqual(status, 0) + self.assertEqual( + config, + { + "_WARNINGS_CFLAGS": [flag], + "_WARNINGS_CXXFLAGS": [flag], + }, + ) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking whether the C compiler supports {flag}... yes + checking whether the C++ compiler supports {flag}... yes + """.format( + flag=flag + ) + ), + ) + + def test_check_and_add_warning_one(self): + cmd = ( + textwrap.dedent( + """\ + check_and_add_warning('-Wfoo', cxx_compiler) + """ + ) + + self.get_warnings() + ) + + config, out, status = self.do_compile_test(cmd) + self.assertEqual(status, 0) + self.assertEqual( + config, + { + "_WARNINGS_CFLAGS": [], + "_WARNINGS_CXXFLAGS": ["-Wfoo"], + }, + ) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking whether the C++ compiler supports -Wfoo... yes + """ + ), + ) + + def test_check_and_add_warning_when(self): + cmd = ( + textwrap.dedent( + """\ + @depends(when=True) + def never(): + return False + check_and_add_warning('-Wfoo', cxx_compiler, when=never) + """ + ) + + self.get_warnings() + ) + + config, out, status = self.do_compile_test(cmd) + self.assertEqual(status, 0) + self.assertEqual( + config, + { + "_WARNINGS_CFLAGS": [], + "_WARNINGS_CXXFLAGS": [], + }, + ) + self.assertEqual(out, "") + + cmd = ( + textwrap.dedent( + """\ + @depends(when=True) + def always(): + return True + check_and_add_warning('-Wfoo', cxx_compiler, when=always) + """ + ) + + self.get_warnings() + ) + + config, out, status = self.do_compile_test(cmd) + self.assertEqual(status, 0) + self.assertEqual( + config, + { + "_WARNINGS_CFLAGS": [], + "_WARNINGS_CXXFLAGS": ["-Wfoo"], + }, + ) + self.assertEqual( + out, + textwrap.dedent( + """\ + checking whether the C++ compiler supports -Wfoo... yes + """ + ), + ) + + def test_add_warning(self): + cmd = ( + textwrap.dedent( + """\ + add_warning('-Wfoo') + """ + ) + + self.get_warnings() + ) + + config, out, status = self.do_compile_test(cmd) + self.assertEqual(status, 0) + self.assertEqual( + config, + { + "_WARNINGS_CFLAGS": ["-Wfoo"], + "_WARNINGS_CXXFLAGS": ["-Wfoo"], + }, + ) + self.assertEqual(out, "") + + def test_add_warning_one(self): + cmd = ( + textwrap.dedent( + """\ + add_warning('-Wfoo', c_compiler) + """ + ) + + self.get_warnings() + ) + + config, out, status = self.do_compile_test(cmd) + self.assertEqual(status, 0) + self.assertEqual( + config, + { + "_WARNINGS_CFLAGS": ["-Wfoo"], + "_WARNINGS_CXXFLAGS": [], + }, + ) + self.assertEqual(out, "") + + def test_add_warning_when(self): + cmd = ( + textwrap.dedent( + """\ + @depends(when=True) + def never(): + return False + add_warning('-Wfoo', c_compiler, when=never) + """ + ) + + self.get_warnings() + ) + + config, out, status = self.do_compile_test(cmd) + self.assertEqual(status, 0) + self.assertEqual( + config, + { + "_WARNINGS_CFLAGS": [], + "_WARNINGS_CXXFLAGS": [], + }, + ) + self.assertEqual(out, "") + + cmd = ( + textwrap.dedent( + """\ + @depends(when=True) + def always(): + return True + add_warning('-Wfoo', c_compiler, when=always) + """ + ) + + self.get_warnings() + ) + + config, out, status = self.do_compile_test(cmd) + self.assertEqual(status, 0) + self.assertEqual( + config, + { + "_WARNINGS_CFLAGS": ["-Wfoo"], + "_WARNINGS_CXXFLAGS": [], + }, + ) + self.assertEqual(out, "") + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/configure/test_configure.py b/python/mozbuild/mozbuild/test/configure/test_configure.py new file mode 100644 index 0000000000..a5e42faae3 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/test_configure.py @@ -0,0 +1,1986 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import sys +import textwrap +import unittest + +import mozpack.path as mozpath +import six +from mozunit import MockedOpen, main +from six import StringIO + +from mozbuild.configure import ConfigureError, ConfigureSandbox +from mozbuild.configure.options import ( + InvalidOptionError, + NegativeOptionValue, + PositiveOptionValue, +) +from mozbuild.util import ReadOnlyNamespace, exec_, memoized_property + +test_data_path = mozpath.abspath(mozpath.dirname(__file__)) +test_data_path = mozpath.join(test_data_path, "data") + + +class TestConfigure(unittest.TestCase): + def get_config( + self, options=[], env={}, configure="moz.configure", prog="/bin/configure" + ): + config = {} + out = StringIO() + sandbox = ConfigureSandbox(config, env, [prog] + options, out, out) + + sandbox.run(mozpath.join(test_data_path, configure)) + + if "--help" in options: + return six.ensure_text(out.getvalue()), config + self.assertEqual("", out.getvalue()) + return config + + def moz_configure(self, source): + return MockedOpen( + {os.path.join(test_data_path, "moz.configure"): textwrap.dedent(source)} + ) + + def test_defaults(self): + config = self.get_config() + self.maxDiff = None + self.assertEqual( + { + "CHOICES": NegativeOptionValue(), + "DEFAULTED": PositiveOptionValue(("not-simple",)), + "IS_GCC": NegativeOptionValue(), + "REMAINDER": ( + PositiveOptionValue(), + NegativeOptionValue(), + NegativeOptionValue(), + NegativeOptionValue(), + ), + "SIMPLE": NegativeOptionValue(), + "VALUES": NegativeOptionValue(), + "VALUES2": NegativeOptionValue(), + "VALUES3": NegativeOptionValue(), + "WITH_ENV": NegativeOptionValue(), + }, + config, + ) + + def test_help(self): + help, config = self.get_config(["--help"], prog="configure") + + self.assertEqual({}, config) + self.maxDiff = None + self.assertEqual( + "Usage: configure [options]\n" + "\n" + "Options: [defaults in brackets after descriptions]\n" + " Help options:\n" + " --help print this message\n" + "\n" + " Options from python/mozbuild/mozbuild/test/configure/data/included.configure:\n" + " --enable-imports-in-template\n Imports in template\n" + "\n" + " Options from python/mozbuild/mozbuild/test/configure/data/moz.configure:\n" + " --enable-include Include\n" + " --enable-simple Enable simple\n" + " --enable-values Enable values\n" + " --enable-with-env Enable with env\n" + " --indirect-option Indirectly defined option\n" + " --option Option\n" + " --returned-choices Choices\n" + " --with-imports Imports\n" + " --with-returned-default Returned default [not-simple]\n" + " --with-stuff Build with stuff\n" + " --without-thing Build without thing\n" + "\n" + "\n" + "Environment variables:\n" + " Options from python/mozbuild/mozbuild/test/configure/data/moz.configure:\n" + " CC C Compiler\n" + "\n", + help.replace("\\", "/"), + ) + + def test_unknown(self): + with self.assertRaises(InvalidOptionError): + self.get_config(["--unknown"]) + + def test_simple(self): + for config in ( + self.get_config(), + self.get_config(["--disable-simple"]), + # Last option wins. + self.get_config(["--enable-simple", "--disable-simple"]), + ): + self.assertNotIn("ENABLED_SIMPLE", config) + self.assertIn("SIMPLE", config) + self.assertEqual(NegativeOptionValue(), config["SIMPLE"]) + + for config in ( + self.get_config(["--enable-simple"]), + self.get_config(["--disable-simple", "--enable-simple"]), + ): + self.assertIn("ENABLED_SIMPLE", config) + self.assertIn("SIMPLE", config) + self.assertEqual(PositiveOptionValue(), config["SIMPLE"]) + self.assertIs(config["SIMPLE"], config["ENABLED_SIMPLE"]) + + # --enable-simple doesn't take values. + with self.assertRaises(InvalidOptionError): + self.get_config(["--enable-simple=value"]) + + def test_with_env(self): + for config in ( + self.get_config(), + self.get_config(["--disable-with-env"]), + self.get_config(["--enable-with-env", "--disable-with-env"]), + self.get_config(env={"MOZ_WITH_ENV": ""}), + # Options win over environment + self.get_config(["--disable-with-env"], env={"MOZ_WITH_ENV": "1"}), + ): + self.assertIn("WITH_ENV", config) + self.assertEqual(NegativeOptionValue(), config["WITH_ENV"]) + + for config in ( + self.get_config(["--enable-with-env"]), + self.get_config(["--disable-with-env", "--enable-with-env"]), + self.get_config(env={"MOZ_WITH_ENV": "1"}), + self.get_config(["--enable-with-env"], env={"MOZ_WITH_ENV": ""}), + ): + self.assertIn("WITH_ENV", config) + self.assertEqual(PositiveOptionValue(), config["WITH_ENV"]) + + with self.assertRaises(InvalidOptionError): + self.get_config(["--enable-with-env=value"]) + + with self.assertRaises(InvalidOptionError): + self.get_config(env={"MOZ_WITH_ENV": "value"}) + + def test_values(self, name="VALUES"): + for config in ( + self.get_config(), + self.get_config(["--disable-values"]), + self.get_config(["--enable-values", "--disable-values"]), + ): + self.assertIn(name, config) + self.assertEqual(NegativeOptionValue(), config[name]) + + for config in ( + self.get_config(["--enable-values"]), + self.get_config(["--disable-values", "--enable-values"]), + ): + self.assertIn(name, config) + self.assertEqual(PositiveOptionValue(), config[name]) + + config = self.get_config(["--enable-values=foo"]) + self.assertIn(name, config) + self.assertEqual(PositiveOptionValue(("foo",)), config[name]) + + config = self.get_config(["--enable-values=foo,bar"]) + self.assertIn(name, config) + self.assertTrue(config[name]) + self.assertEqual(PositiveOptionValue(("foo", "bar")), config[name]) + + def test_values2(self): + self.test_values("VALUES2") + + def test_values3(self): + self.test_values("VALUES3") + + def test_returned_default(self): + config = self.get_config(["--enable-simple"]) + self.assertIn("DEFAULTED", config) + self.assertEqual(PositiveOptionValue(("simple",)), config["DEFAULTED"]) + + config = self.get_config(["--disable-simple"]) + self.assertIn("DEFAULTED", config) + self.assertEqual(PositiveOptionValue(("not-simple",)), config["DEFAULTED"]) + + def test_returned_choices(self): + for val in ("a", "b", "c"): + config = self.get_config( + ["--enable-values=alpha", "--returned-choices=%s" % val] + ) + self.assertIn("CHOICES", config) + self.assertEqual(PositiveOptionValue((val,)), config["CHOICES"]) + + for val in ("0", "1", "2"): + config = self.get_config( + ["--enable-values=numeric", "--returned-choices=%s" % val] + ) + self.assertIn("CHOICES", config) + self.assertEqual(PositiveOptionValue((val,)), config["CHOICES"]) + + with self.assertRaises(InvalidOptionError): + self.get_config(["--enable-values=numeric", "--returned-choices=a"]) + + with self.assertRaises(InvalidOptionError): + self.get_config(["--enable-values=alpha", "--returned-choices=0"]) + + def test_included(self): + config = self.get_config(env={"CC": "gcc"}) + self.assertIn("IS_GCC", config) + self.assertEqual(config["IS_GCC"], True) + + config = self.get_config(["--enable-include=extra.configure", "--extra"]) + self.assertIn("EXTRA", config) + self.assertEqual(PositiveOptionValue(), config["EXTRA"]) + + with self.assertRaises(InvalidOptionError): + self.get_config(["--extra"]) + + def test_template(self): + config = self.get_config(env={"CC": "gcc"}) + self.assertIn("CFLAGS", config) + self.assertEqual(config["CFLAGS"], ["-Werror=foobar"]) + + config = self.get_config(env={"CC": "clang"}) + self.assertNotIn("CFLAGS", config) + + def test_imports(self): + config = {} + out = StringIO() + sandbox = ConfigureSandbox(config, {}, ["configure"], out, out) + + with self.assertRaises(ImportError): + exec_( + textwrap.dedent( + """ + @template + def foo(): + import sys + foo()""" + ), + sandbox, + ) + + exec_( + textwrap.dedent( + """ + @template + @imports('sys') + def foo(): + return sys""" + ), + sandbox, + ) + + self.assertIs(sandbox["foo"](), sys) + + # os.path after an import is a mix of vanilla os.path and sandbox os.path. + os_path = {} + exec_("from os.path import *", {}, os_path) + os_path.update(sandbox.OS.path.__dict__) + os_path = ReadOnlyNamespace(**os_path) + + exec_( + textwrap.dedent( + """ + @template + @imports(_from='os', _import='path') + def foo(): + return path""" + ), + sandbox, + ) + + self.assertEqual(sandbox["foo"](), os_path) + + exec_( + textwrap.dedent( + """ + @template + @imports(_from='os', _import='path', _as='os_path') + def foo(): + return os_path""" + ), + sandbox, + ) + + self.assertEqual(sandbox["foo"](), os_path) + + exec_( + textwrap.dedent( + """ + @template + @imports('__builtin__') + def foo(): + return __builtin__""" + ), + sandbox, + ) + + self.assertIs(sandbox["foo"](), six.moves.builtins) + + exec_( + textwrap.dedent( + """ + @template + @imports(_from='__builtin__', _import='open') + def foo(): + return open('%s')""" + % os.devnull + ), + sandbox, + ) + + f = sandbox["foo"]() + self.assertEqual(f.name, os.devnull) + f.close() + + # This unlocks the sandbox + exec_( + textwrap.dedent( + """ + @template + @imports(_import='__builtin__', _as='__builtins__') + def foo(): + import sys + return sys""" + ), + sandbox, + ) + + self.assertIs(sandbox["foo"](), sys) + + exec_( + textwrap.dedent( + """ + @template + @imports('__sandbox__') + def foo(): + return __sandbox__""" + ), + sandbox, + ) + + self.assertIs(sandbox["foo"](), sandbox) + + exec_( + textwrap.dedent( + """ + @template + @imports(_import='__sandbox__', _as='s') + def foo(): + return s""" + ), + sandbox, + ) + + self.assertIs(sandbox["foo"](), sandbox) + + # Nothing leaked from the function being executed + self.assertEqual(list(sandbox), ["__builtins__", "foo"]) + self.assertEqual(sandbox["__builtins__"], ConfigureSandbox.BUILTINS) + + exec_( + textwrap.dedent( + """ + @template + @imports('sys') + def foo(): + @depends(when=True) + def bar(): + return sys + return bar + bar = foo()""" + ), + sandbox, + ) + + with self.assertRaises(NameError) as e: + sandbox._depends[sandbox["bar"]].result() + + self.assertIn("name 'sys' is not defined", str(e.exception)) + + def test_apply_imports(self): + imports = [] + + class CountApplyImportsSandbox(ConfigureSandbox): + def _apply_imports(self, *args, **kwargs): + imports.append((args, kwargs)) + super(CountApplyImportsSandbox, self)._apply_imports(*args, **kwargs) + + config = {} + out = StringIO() + sandbox = CountApplyImportsSandbox(config, {}, ["configure"], out, out) + + exec_( + textwrap.dedent( + """ + @template + @imports('sys') + def foo(): + return sys + foo() + foo()""" + ), + sandbox, + ) + + self.assertEqual(len(imports), 1) + + def test_import_wrapping(self): + bar = object() + foo = ReadOnlyNamespace(bar=bar) + + class BasicWrappingSandbox(ConfigureSandbox): + @memoized_property + def _wrapped_foo(self): + return foo + + config = {} + out = StringIO() + sandbox = BasicWrappingSandbox(config, {}, ["configure"], out, out) + + exec_( + textwrap.dedent( + """ + @template + @imports('foo') + def toplevel(): + return foo + @template + @imports('foo.bar') + def bar(): + return foo.bar + @template + @imports('foo.bar') + def bar_upper(): + return foo + @template + @imports(_from='foo', _import='bar') + def from_import(): + return bar + @template + @imports(_from='foo', _import='bar', _as='custom_name') + def from_import_as(): + return custom_name + @template + @imports(_import='foo', _as='custom_name') + def import_as(): + return custom_name + """ + ), + sandbox, + ) + self.assertIs(sandbox["toplevel"](), foo) + self.assertIs(sandbox["bar"](), bar) + self.assertIs(sandbox["bar_upper"](), foo) + self.assertIs(sandbox["from_import"](), bar) + self.assertIs(sandbox["from_import_as"](), bar) + self.assertIs(sandbox["import_as"](), foo) + + def test_os_path(self): + config = self.get_config(["--with-imports=%s" % __file__]) + self.assertIn("HAS_ABSPATH", config) + self.assertEqual(config["HAS_ABSPATH"], True) + self.assertIn("HAS_GETATIME", config) + self.assertEqual(config["HAS_GETATIME"], True) + self.assertIn("HAS_GETATIME2", config) + self.assertEqual(config["HAS_GETATIME2"], False) + + def test_template_call(self): + config = self.get_config(env={"CC": "gcc"}) + self.assertIn("TEMPLATE_VALUE", config) + self.assertEqual(config["TEMPLATE_VALUE"], 42) + self.assertIn("TEMPLATE_VALUE_2", config) + self.assertEqual(config["TEMPLATE_VALUE_2"], 21) + + def test_template_imports(self): + config = self.get_config(["--enable-imports-in-template"]) + self.assertIn("PLATFORM", config) + self.assertEqual(config["PLATFORM"], sys.platform) + + def test_decorators(self): + config = {} + out = StringIO() + sandbox = ConfigureSandbox(config, {}, ["configure"], out, out) + + sandbox.include_file(mozpath.join(test_data_path, "decorators.configure")) + + self.assertNotIn("FOO", sandbox) + self.assertNotIn("BAR", sandbox) + self.assertNotIn("QUX", sandbox) + + def test_set_config(self): + def get_config(*args): + return self.get_config(*args, configure="set_config.configure") + + help, config = get_config(["--help"]) + self.assertEqual(config, {}) + + config = get_config(["--set-foo"]) + self.assertIn("FOO", config) + self.assertEqual(config["FOO"], True) + + config = get_config(["--set-bar"]) + self.assertNotIn("FOO", config) + self.assertIn("BAR", config) + self.assertEqual(config["BAR"], True) + + config = get_config(["--set-value=qux"]) + self.assertIn("VALUE", config) + self.assertEqual(config["VALUE"], "qux") + + config = get_config(["--set-name=hoge"]) + self.assertIn("hoge", config) + self.assertEqual(config["hoge"], True) + + config = get_config([]) + self.assertEqual(config, {"BAR": False}) + + with self.assertRaises(ConfigureError): + # Both --set-foo and --set-name=FOO are going to try to + # set_config('FOO'...) + get_config(["--set-foo", "--set-name=FOO"]) + + def test_set_config_when(self): + with self.moz_configure( + """ + option('--with-qux', help='qux') + set_config('FOO', 'foo', when=True) + set_config('BAR', 'bar', when=False) + set_config('QUX', 'qux', when='--with-qux') + """ + ): + config = self.get_config() + self.assertEqual( + config, + { + "FOO": "foo", + }, + ) + config = self.get_config(["--with-qux"]) + self.assertEqual( + config, + { + "FOO": "foo", + "QUX": "qux", + }, + ) + + def test_set_config_when_disable(self): + with self.moz_configure( + """ + option('--disable-baz', help='Disable baz') + set_config('BAZ', True, when='--enable-baz') + """ + ): + config = self.get_config() + self.assertEqual(config["BAZ"], True) + config = self.get_config(["--enable-baz"]) + self.assertEqual(config["BAZ"], True) + config = self.get_config(["--disable-baz"]) + self.assertEqual(config, {}) + + def test_set_define(self): + def get_config(*args): + return self.get_config(*args, configure="set_define.configure") + + help, config = get_config(["--help"]) + self.assertEqual(config, {"DEFINES": {}}) + + config = get_config(["--set-foo"]) + self.assertIn("FOO", config["DEFINES"]) + self.assertEqual(config["DEFINES"]["FOO"], True) + + config = get_config(["--set-bar"]) + self.assertNotIn("FOO", config["DEFINES"]) + self.assertIn("BAR", config["DEFINES"]) + self.assertEqual(config["DEFINES"]["BAR"], True) + + config = get_config(["--set-value=qux"]) + self.assertIn("VALUE", config["DEFINES"]) + self.assertEqual(config["DEFINES"]["VALUE"], "qux") + + config = get_config(["--set-name=hoge"]) + self.assertIn("hoge", config["DEFINES"]) + self.assertEqual(config["DEFINES"]["hoge"], True) + + config = get_config([]) + self.assertEqual(config["DEFINES"], {"BAR": False}) + + with self.assertRaises(ConfigureError): + # Both --set-foo and --set-name=FOO are going to try to + # set_define('FOO'...) + get_config(["--set-foo", "--set-name=FOO"]) + + def test_set_define_when(self): + with self.moz_configure( + """ + option('--with-qux', help='qux') + set_define('FOO', 'foo', when=True) + set_define('BAR', 'bar', when=False) + set_define('QUX', 'qux', when='--with-qux') + """ + ): + config = self.get_config() + self.assertEqual( + config["DEFINES"], + { + "FOO": "foo", + }, + ) + config = self.get_config(["--with-qux"]) + self.assertEqual( + config["DEFINES"], + { + "FOO": "foo", + "QUX": "qux", + }, + ) + + def test_set_define_when_disable(self): + with self.moz_configure( + """ + option('--disable-baz', help='Disable baz') + set_define('BAZ', True, when='--enable-baz') + """ + ): + config = self.get_config() + self.assertEqual(config["DEFINES"]["BAZ"], True) + config = self.get_config(["--enable-baz"]) + self.assertEqual(config["DEFINES"]["BAZ"], True) + config = self.get_config(["--disable-baz"]) + self.assertEqual(config["DEFINES"], {}) + + def test_imply_option_simple(self): + def get_config(*args): + return self.get_config(*args, configure="imply_option/simple.configure") + + help, config = get_config(["--help"]) + self.assertEqual(config, {}) + + config = get_config([]) + self.assertEqual(config, {}) + + config = get_config(["--enable-foo"]) + self.assertIn("BAR", config) + self.assertEqual(config["BAR"], PositiveOptionValue()) + + with self.assertRaises(InvalidOptionError) as e: + get_config(["--enable-foo", "--disable-bar"]) + + self.assertEqual( + str(e.exception), + "'--enable-bar' implied by '--enable-foo' conflicts with " + "'--disable-bar' from the command-line", + ) + + def test_imply_option_negative(self): + def get_config(*args): + return self.get_config(*args, configure="imply_option/negative.configure") + + help, config = get_config(["--help"]) + self.assertEqual(config, {}) + + config = get_config([]) + self.assertEqual(config, {}) + + config = get_config(["--enable-foo"]) + self.assertIn("BAR", config) + self.assertEqual(config["BAR"], NegativeOptionValue()) + + with self.assertRaises(InvalidOptionError) as e: + get_config(["--enable-foo", "--enable-bar"]) + + self.assertEqual( + str(e.exception), + "'--disable-bar' implied by '--enable-foo' conflicts with " + "'--enable-bar' from the command-line", + ) + + config = get_config(["--disable-hoge"]) + self.assertIn("BAR", config) + self.assertEqual(config["BAR"], NegativeOptionValue()) + + with self.assertRaises(InvalidOptionError) as e: + get_config(["--disable-hoge", "--enable-bar"]) + + self.assertEqual( + str(e.exception), + "'--disable-bar' implied by '--disable-hoge' conflicts with " + "'--enable-bar' from the command-line", + ) + + def test_imply_option_values(self): + def get_config(*args): + return self.get_config(*args, configure="imply_option/values.configure") + + help, config = get_config(["--help"]) + self.assertEqual(config, {}) + + config = get_config([]) + self.assertEqual(config, {}) + + config = get_config(["--enable-foo=a"]) + self.assertIn("BAR", config) + self.assertEqual(config["BAR"], PositiveOptionValue(("a",))) + + config = get_config(["--enable-foo=a,b"]) + self.assertIn("BAR", config) + self.assertEqual(config["BAR"], PositiveOptionValue(("a", "b"))) + + with self.assertRaises(InvalidOptionError) as e: + get_config(["--enable-foo=a,b", "--disable-bar"]) + + self.assertEqual( + str(e.exception), + "'--enable-bar=a,b' implied by '--enable-foo' conflicts with " + "'--disable-bar' from the command-line", + ) + + def test_imply_option_infer(self): + def get_config(*args): + return self.get_config(*args, configure="imply_option/infer.configure") + + help, config = get_config(["--help"]) + self.assertEqual(config, {}) + + config = get_config([]) + self.assertEqual(config, {}) + + with self.assertRaises(InvalidOptionError) as e: + get_config(["--enable-foo", "--disable-bar"]) + + self.assertEqual( + str(e.exception), + "'--enable-bar' implied by '--enable-foo' conflicts with " + "'--disable-bar' from the command-line", + ) + + with self.assertRaises(ConfigureError) as e: + self.get_config([], configure="imply_option/infer_ko.configure") + + self.assertEqual( + str(e.exception), + "Cannot infer what implies '--enable-bar'. Please add a `reason` " + "to the `imply_option` call.", + ) + + def test_imply_option_immediate_value(self): + def get_config(*args): + return self.get_config(*args, configure="imply_option/imm.configure") + + help, config = get_config(["--help"]) + self.assertEqual(config, {}) + + config = get_config([]) + self.assertEqual(config, {}) + + config_path = mozpath.abspath( + mozpath.join(test_data_path, "imply_option", "imm.configure") + ) + + with self.assertRaisesRegexp( + InvalidOptionError, + "--enable-foo' implied by 'imply_option at %s:7' conflicts " + "with '--disable-foo' from the command-line" % config_path, + ): + get_config(["--disable-foo"]) + + with self.assertRaisesRegexp( + InvalidOptionError, + "--enable-bar=foo,bar' implied by 'imply_option at %s:18' " + "conflicts with '--enable-bar=a,b,c' from the command-line" % config_path, + ): + get_config(["--enable-bar=a,b,c"]) + + with self.assertRaisesRegexp( + InvalidOptionError, + "--enable-baz=BAZ' implied by 'imply_option at %s:29' " + "conflicts with '--enable-baz=QUUX' from the command-line" % config_path, + ): + get_config(["--enable-baz=QUUX"]) + + def test_imply_option_failures(self): + with self.assertRaises(ConfigureError) as e: + with self.moz_configure( + """ + imply_option('--with-foo', ('a',), 'bar') + """ + ): + self.get_config() + + self.assertEqual( + str(e.exception), + "`--with-foo`, emitted from `%s` line 2, is unknown." + % mozpath.join(test_data_path, "moz.configure"), + ) + + with self.assertRaises(TypeError) as e: + with self.moz_configure( + """ + imply_option('--with-foo', 42, 'bar') + + option('--with-foo', help='foo') + @depends('--with-foo') + def foo(value): + return value + """ + ): + self.get_config() + + self.assertEqual(str(e.exception), "Unexpected type: 'int'") + + def test_imply_option_when(self): + with self.moz_configure( + """ + option('--with-foo', help='foo') + imply_option('--with-qux', True, when='--with-foo') + option('--with-qux', help='qux') + set_config('QUX', depends('--with-qux')(lambda x: x)) + """ + ): + config = self.get_config() + self.assertEqual( + config, + { + "QUX": NegativeOptionValue(), + }, + ) + + config = self.get_config(["--with-foo"]) + self.assertEqual( + config, + { + "QUX": PositiveOptionValue(), + }, + ) + + def test_imply_option_dependency_loop(self): + with self.moz_configure( + """ + option('--without-foo', help='foo') + + @depends('--with-foo') + def qux_default(foo): + return bool(foo) + + option('--with-qux', default=qux_default, help='qux') + + imply_option('--with-foo', depends('--with-qux')(lambda x: x or None)) + + set_config('FOO', depends('--with-foo')(lambda x: x)) + set_config('QUX', depends('--with-qux')(lambda x: x)) + """ + ): + config = self.get_config() + self.assertEqual( + config, + { + "FOO": PositiveOptionValue(), + "QUX": PositiveOptionValue(), + }, + ) + + config = self.get_config(["--without-foo"]) + self.assertEqual( + config, + { + "FOO": NegativeOptionValue(), + "QUX": NegativeOptionValue(), + }, + ) + + config = self.get_config(["--with-qux"]) + self.assertEqual( + config, + { + "FOO": PositiveOptionValue(), + "QUX": PositiveOptionValue(), + }, + ) + + with self.assertRaises(InvalidOptionError) as e: + config = self.get_config(["--without-foo", "--with-qux"]) + + self.assertEqual( + str(e.exception), + "'--with-foo' implied by '--with-qux' conflicts " + "with '--without-foo' from the command-line", + ) + + config = self.get_config(["--without-qux"]) + self.assertEqual( + config, + { + "FOO": PositiveOptionValue(), + "QUX": NegativeOptionValue(), + }, + ) + + with self.moz_configure( + """ + option('--with-foo', help='foo') + + @depends('--with-foo') + def qux_default(foo): + return bool(foo) + + option('--with-qux', default=qux_default, help='qux') + + imply_option('--with-foo', depends('--with-qux')(lambda x: x or None)) + + set_config('FOO', depends('--with-foo')(lambda x: x)) + set_config('QUX', depends('--with-qux')(lambda x: x)) + """ + ): + config = self.get_config() + self.assertEqual( + config, + { + "FOO": NegativeOptionValue(), + "QUX": NegativeOptionValue(), + }, + ) + + config = self.get_config(["--with-foo"]) + self.assertEqual( + config, + { + "FOO": PositiveOptionValue(), + "QUX": PositiveOptionValue(), + }, + ) + + with self.assertRaises(InvalidOptionError) as e: + config = self.get_config(["--with-qux"]) + + self.assertEqual( + str(e.exception), + "'--with-foo' implied by '--with-qux' conflicts " + "with '--without-foo' from the default", + ) + + with self.assertRaises(InvalidOptionError) as e: + config = self.get_config(["--without-foo", "--with-qux"]) + + self.assertEqual( + str(e.exception), + "'--with-foo' implied by '--with-qux' conflicts " + "with '--without-foo' from the command-line", + ) + + config = self.get_config(["--without-qux"]) + self.assertEqual( + config, + { + "FOO": NegativeOptionValue(), + "QUX": NegativeOptionValue(), + }, + ) + + config_path = mozpath.abspath(mozpath.join(test_data_path, "moz.configure")) + + # Same test as above, but using `when` in the `imply_option`. + with self.moz_configure( + """ + option('--with-foo', help='foo') + + @depends('--with-foo') + def qux_default(foo): + return bool(foo) + + option('--with-qux', default=qux_default, help='qux') + + imply_option('--with-foo', True, when='--with-qux') + + set_config('FOO', depends('--with-foo')(lambda x: x)) + set_config('QUX', depends('--with-qux')(lambda x: x)) + """ + ): + config = self.get_config() + self.assertEqual( + config, + { + "FOO": NegativeOptionValue(), + "QUX": NegativeOptionValue(), + }, + ) + + config = self.get_config(["--with-foo"]) + self.assertEqual( + config, + { + "FOO": PositiveOptionValue(), + "QUX": PositiveOptionValue(), + }, + ) + + with self.assertRaises(InvalidOptionError) as e: + config = self.get_config(["--with-qux"]) + + self.assertEqual( + str(e.exception), + "'--with-foo' implied by 'imply_option at %s:10' conflicts " + "with '--without-foo' from the default" % config_path, + ) + + with self.assertRaises(InvalidOptionError) as e: + config = self.get_config(["--without-foo", "--with-qux"]) + + self.assertEqual( + str(e.exception), + "'--with-foo' implied by 'imply_option at %s:10' conflicts " + "with '--without-foo' from the command-line" % config_path, + ) + + config = self.get_config(["--without-qux"]) + self.assertEqual( + config, + { + "FOO": NegativeOptionValue(), + "QUX": NegativeOptionValue(), + }, + ) + + def test_imply_option_recursion(self): + config_path = mozpath.abspath(mozpath.join(test_data_path, "moz.configure")) + + message = ( + "'--without-foo' appears somewhere in the direct or indirect dependencies " + "when resolving imply_option at %s:8" % config_path + ) + + with self.moz_configure( + """ + option('--without-foo', help='foo') + + imply_option('--with-qux', depends('--with-foo')(lambda x: x or None)) + + option('--with-qux', help='qux') + + imply_option('--with-foo', depends('--with-qux')(lambda x: x or None)) + + set_config('FOO', depends('--with-foo')(lambda x: x)) + set_config('QUX', depends('--with-qux')(lambda x: x)) + """ + ): + # Note: no error is detected when the depends function in the + # imply_options resolve to None, which disables the imply_option. + + with self.assertRaises(ConfigureError) as e: + self.get_config() + + self.assertEqual(str(e.exception), message) + + with self.assertRaises(ConfigureError) as e: + self.get_config(["--with-qux"]) + + self.assertEqual(str(e.exception), message) + + with self.assertRaises(ConfigureError) as e: + self.get_config(["--without-foo", "--with-qux"]) + + self.assertEqual(str(e.exception), message) + + def test_option_failures(self): + with self.assertRaises(ConfigureError) as e: + with self.moz_configure('option("--with-foo", help="foo")'): + self.get_config() + + self.assertEqual( + str(e.exception), + "Option `--with-foo` is not handled ; reference it with a @depends", + ) + + with self.assertRaises(ConfigureError) as e: + with self.moz_configure( + """ + option("--with-foo", help="foo") + option("--with-foo", help="foo") + """ + ): + self.get_config() + + self.assertEqual(str(e.exception), "Option `--with-foo` already defined") + + with self.assertRaises(ConfigureError) as e: + with self.moz_configure( + """ + option(env="MOZ_FOO", help="foo") + option(env="MOZ_FOO", help="foo") + """ + ): + self.get_config() + + self.assertEqual(str(e.exception), "Option `MOZ_FOO` already defined") + + with self.assertRaises(ConfigureError) as e: + with self.moz_configure( + """ + option('--with-foo', env="MOZ_FOO", help="foo") + option(env="MOZ_FOO", help="foo") + """ + ): + self.get_config() + + self.assertEqual(str(e.exception), "Option `MOZ_FOO` already defined") + + with self.assertRaises(ConfigureError) as e: + with self.moz_configure( + """ + option(env="MOZ_FOO", help="foo") + option('--with-foo', env="MOZ_FOO", help="foo") + """ + ): + self.get_config() + + self.assertEqual(str(e.exception), "Option `MOZ_FOO` already defined") + + with self.assertRaises(ConfigureError) as e: + with self.moz_configure( + """ + option('--with-foo', env="MOZ_FOO", help="foo") + option('--with-foo', help="foo") + """ + ): + self.get_config() + + self.assertEqual(str(e.exception), "Option `--with-foo` already defined") + + def test_option_when(self): + with self.moz_configure( + """ + option('--with-foo', help='foo', when=True) + option('--with-bar', help='bar', when=False) + option('--with-qux', env="QUX", help='qux', when='--with-foo') + + set_config('FOO', depends('--with-foo', when=True)(lambda x: x)) + set_config('BAR', depends('--with-bar', when=False)(lambda x: x)) + set_config('QUX', depends('--with-qux', when='--with-foo')(lambda x: x)) + """ + ): + config = self.get_config() + self.assertEqual( + config, + { + "FOO": NegativeOptionValue(), + }, + ) + + config = self.get_config(["--with-foo"]) + self.assertEqual( + config, + { + "FOO": PositiveOptionValue(), + "QUX": NegativeOptionValue(), + }, + ) + + config = self.get_config(["--with-foo", "--with-qux"]) + self.assertEqual( + config, + { + "FOO": PositiveOptionValue(), + "QUX": PositiveOptionValue(), + }, + ) + + with self.assertRaises(InvalidOptionError) as e: + self.get_config(["--with-bar"]) + + self.assertEqual( + str(e.exception), "--with-bar is not available in this configuration" + ) + + with self.assertRaises(InvalidOptionError) as e: + self.get_config(["--with-qux"]) + + self.assertEqual( + str(e.exception), "--with-qux is not available in this configuration" + ) + + with self.assertRaises(InvalidOptionError) as e: + self.get_config(["QUX=1"]) + + self.assertEqual( + str(e.exception), "QUX is not available in this configuration" + ) + + config = self.get_config(env={"QUX": "1"}) + self.assertEqual( + config, + { + "FOO": NegativeOptionValue(), + }, + ) + + help, config = self.get_config(["--help"]) + self.assertEqual( + help.replace("\\", "/"), + textwrap.dedent( + """\ + Usage: configure [options] + + Options: [defaults in brackets after descriptions] + Help options: + --help print this message + + Options from python/mozbuild/mozbuild/test/configure/data/moz.configure: + --with-foo foo + + + Environment variables: + """ + ), + ) + + help, config = self.get_config(["--help", "--with-foo"]) + self.assertEqual( + help.replace("\\", "/"), + textwrap.dedent( + """\ + Usage: configure [options] + + Options: [defaults in brackets after descriptions] + Help options: + --help print this message + + Options from python/mozbuild/mozbuild/test/configure/data/moz.configure: + --with-foo foo + --with-qux qux + + + Environment variables: + """ + ), + ) + + with self.moz_configure( + """ + option('--with-foo', help='foo', when=True) + set_config('FOO', depends('--with-foo')(lambda x: x)) + """ + ): + with self.assertRaises(ConfigureError) as e: + self.get_config() + + self.assertEqual( + str(e.exception), + "@depends function needs the same `when` as " "options it depends on", + ) + + with self.moz_configure( + """ + @depends(when=True) + def always(): + return True + @depends(when=True) + def always2(): + return True + option('--with-foo', help='foo', when=always) + set_config('FOO', depends('--with-foo', when=always2)(lambda x: x)) + """ + ): + with self.assertRaises(ConfigureError) as e: + self.get_config() + + self.assertEqual( + str(e.exception), + "@depends function needs the same `when` as " "options it depends on", + ) + + with self.moz_configure( + """ + @depends(when=True) + def always(): + return True + @depends(when=True) + def always2(): + return True + with only_when(always2): + option('--with-foo', help='foo', when=always) + # include() triggers resolution of its dependencies, and their + # side effects. + include(depends('--with-foo', when=always)(lambda x: x)) + # The sandbox should figure that the `when` here is + # appropriate. Bad behavior in CombinedDependsFunction.__eq__ + # made this fail in the past. + set_config('FOO', depends('--with-foo', when=always)(lambda x: x)) + """ + ): + self.get_config() + + with self.moz_configure( + """ + option('--with-foo', help='foo') + option('--without-bar', help='bar', when='--with-foo') + option('--with-qux', help='qux', when='--with-bar') + set_config('QUX', True, when='--with-qux') + """ + ): + # These are valid: + self.get_config(["--with-foo"]) + self.get_config(["--with-foo", "--with-bar"]) + self.get_config(["--with-foo", "--without-bar"]) + self.get_config(["--with-foo", "--with-bar", "--with-qux"]) + self.get_config(["--with-foo", "--with-bar", "--without-qux"]) + with self.assertRaises(InvalidOptionError) as e: + self.get_config(["--with-bar"]) + with self.assertRaises(InvalidOptionError) as e: + self.get_config(["--without-bar"]) + with self.assertRaises(InvalidOptionError) as e: + self.get_config(["--with-qux"]) + with self.assertRaises(InvalidOptionError) as e: + self.get_config(["--without-qux"]) + with self.assertRaises(InvalidOptionError) as e: + self.get_config(["--with-foo", "--without-bar", "--with-qux"]) + with self.assertRaises(InvalidOptionError) as e: + self.get_config(["--with-foo", "--without-bar", "--without-qux"]) + + def test_include_failures(self): + with self.assertRaises(ConfigureError) as e: + with self.moz_configure('include("../foo.configure")'): + self.get_config() + + self.assertEqual( + str(e.exception), + "Cannot include `%s` because it is not in a subdirectory of `%s`" + % ( + mozpath.normpath(mozpath.join(test_data_path, "..", "foo.configure")), + mozpath.normsep(test_data_path), + ), + ) + + with self.assertRaises(ConfigureError) as e: + with self.moz_configure( + """ + include('extra.configure') + include('extra.configure') + """ + ): + self.get_config() + + self.assertEqual( + str(e.exception), + "Cannot include `%s` because it was included already." + % mozpath.normpath(mozpath.join(test_data_path, "extra.configure")), + ) + + with self.assertRaises(TypeError) as e: + with self.moz_configure( + """ + include(42) + """ + ): + self.get_config() + + self.assertEqual(str(e.exception), "Unexpected type: 'int'") + + def test_include_when(self): + with MockedOpen( + { + os.path.join(test_data_path, "moz.configure"): textwrap.dedent( + """ + option('--with-foo', help='foo') + + include('always.configure', when=True) + include('never.configure', when=False) + include('foo.configure', when='--with-foo') + + set_config('FOO', foo) + set_config('BAR', bar) + set_config('QUX', qux) + """ + ), + os.path.join(test_data_path, "always.configure"): textwrap.dedent( + """ + option('--with-bar', help='bar') + @depends('--with-bar') + def bar(x): + if x: + return 'bar' + """ + ), + os.path.join(test_data_path, "never.configure"): textwrap.dedent( + """ + option('--with-qux', help='qux') + @depends('--with-qux') + def qux(x): + if x: + return 'qux' + """ + ), + os.path.join(test_data_path, "foo.configure"): textwrap.dedent( + """ + option('--with-foo-really', help='really foo') + @depends('--with-foo-really') + def foo(x): + if x: + return 'foo' + + include('foo2.configure', when='--with-foo-really') + """ + ), + os.path.join(test_data_path, "foo2.configure"): textwrap.dedent( + """ + set_config('FOO2', True) + """ + ), + } + ): + config = self.get_config() + self.assertEqual(config, {}) + + config = self.get_config(["--with-foo"]) + self.assertEqual(config, {}) + + config = self.get_config(["--with-bar"]) + self.assertEqual( + config, + { + "BAR": "bar", + }, + ) + + with self.assertRaises(InvalidOptionError) as e: + self.get_config(["--with-qux"]) + + self.assertEqual( + str(e.exception), "--with-qux is not available in this configuration" + ) + + config = self.get_config(["--with-foo", "--with-foo-really"]) + self.assertEqual( + config, + { + "FOO": "foo", + "FOO2": True, + }, + ) + + def test_sandbox_failures(self): + with self.assertRaises(KeyError) as e: + with self.moz_configure( + """ + include = 42 + """ + ): + self.get_config() + + self.assertIn("Cannot reassign builtins", str(e.exception)) + + with self.assertRaises(KeyError) as e: + with self.moz_configure( + """ + foo = 42 + """ + ): + self.get_config() + + self.assertIn( + "Cannot assign `foo` because it is neither a @depends nor a " "@template", + str(e.exception), + ) + + def test_depends_failures(self): + with self.assertRaises(ConfigureError) as e: + with self.moz_configure( + """ + @depends() + def foo(): + return + """ + ): + self.get_config() + + self.assertEqual(str(e.exception), "@depends needs at least one argument") + + with self.assertRaises(ConfigureError) as e: + with self.moz_configure( + """ + @depends('--with-foo') + def foo(value): + return value + """ + ): + self.get_config() + + self.assertEqual( + str(e.exception), + "'--with-foo' is not a known option. Maybe it's " "declared too late?", + ) + + with self.assertRaises(ConfigureError) as e: + with self.moz_configure( + """ + @depends('--with-foo=42') + def foo(value): + return value + """ + ): + self.get_config() + + self.assertEqual(str(e.exception), "Option must not contain an '='") + + with self.assertRaises(TypeError) as e: + with self.moz_configure( + """ + @depends(42) + def foo(value): + return value + """ + ): + self.get_config() + + self.assertEqual( + str(e.exception), + "Cannot use object of type 'int' as argument " "to @depends", + ) + + with self.assertRaises(ConfigureError) as e: + with self.moz_configure( + """ + @depends('--help') + def foo(value): + yield + """ + ): + self.get_config() + + self.assertEqual( + str(e.exception), "Cannot decorate generator functions with @depends" + ) + + with self.assertRaises(TypeError) as e: + with self.moz_configure( + """ + @depends('--help') + def foo(value): + return value + + depends('--help')(foo) + """ + ): + self.get_config() + + self.assertEqual(str(e.exception), "Cannot nest @depends functions") + + with self.assertRaises(TypeError) as e: + with self.moz_configure( + """ + @template + def foo(f): + pass + + depends('--help')(foo) + """ + ): + self.get_config() + + self.assertEqual(str(e.exception), "Cannot use a @template function here") + + with self.assertRaises(ConfigureError) as e: + with self.moz_configure( + """ + option('--foo', help='foo') + @depends('--foo') + def foo(value): + return value + + foo() + """ + ): + self.get_config() + + self.assertEqual(str(e.exception), "The `foo` function may not be called") + + with self.assertRaises(TypeError) as e: + with self.moz_configure( + """ + @depends('--help', foo=42) + def foo(_): + return + """ + ): + self.get_config() + + self.assertEqual( + str(e.exception), "depends_impl() got an unexpected keyword argument 'foo'" + ) + + def test_depends_when(self): + with self.moz_configure( + """ + @depends(when=True) + def foo(): + return 'foo' + + set_config('FOO', foo) + + @depends(when=False) + def bar(): + return 'bar' + + set_config('BAR', bar) + + option('--with-qux', help='qux') + @depends(when='--with-qux') + def qux(): + return 'qux' + + set_config('QUX', qux) + """ + ): + config = self.get_config() + self.assertEqual( + config, + { + "FOO": "foo", + }, + ) + + config = self.get_config(["--with-qux"]) + self.assertEqual( + config, + { + "FOO": "foo", + "QUX": "qux", + }, + ) + + def test_depends_value(self): + with self.moz_configure( + """ + foo = depends(when=True)('foo') + + set_config('FOO', foo) + + bar = depends(when=False)('bar') + + set_config('BAR', bar) + + option('--with-qux', help='qux') + @depends(when='--with-qux') + def qux(): + return 'qux' + + set_config('QUX', qux) + """ + ): + config = self.get_config() + self.assertEqual( + config, + { + "FOO": "foo", + }, + ) + + with self.assertRaises(TypeError) as e: + with self.moz_configure( + """ + option('--foo', help='foo') + + depends('--foo')('foo') + """ + ): + self.get_config() + + self.assertEqual( + str(e.exception), "Cannot wrap literal values in @depends with dependencies" + ) + + def test_imports_failures(self): + with self.assertRaises(ConfigureError) as e: + with self.moz_configure( + """ + @imports('os') + @template + def foo(value): + return value + """ + ): + self.get_config() + + self.assertEqual(str(e.exception), "@imports must appear after @template") + + with self.assertRaises(ConfigureError) as e: + with self.moz_configure( + """ + option('--foo', help='foo') + @imports('os') + @depends('--foo') + def foo(value): + return value + """ + ): + self.get_config() + + self.assertEqual(str(e.exception), "@imports must appear after @depends") + + for import_ in ( + "42", + "_from=42, _import='os'", + "_from='os', _import='path', _as=42", + ): + with self.assertRaises(TypeError) as e: + with self.moz_configure( + """ + @imports(%s) + @template + def foo(value): + return value + """ + % import_ + ): + self.get_config() + + self.assertEqual(str(e.exception), "Unexpected type: 'int'") + + with self.assertRaises(TypeError) as e: + with self.moz_configure( + """ + @imports('os', 42) + @template + def foo(value): + return value + """ + ): + self.get_config() + + self.assertEqual(str(e.exception), "Unexpected type: 'int'") + + with self.assertRaises(ValueError) as e: + with self.moz_configure( + """ + @imports('os*') + def foo(value): + return value + """ + ): + self.get_config() + + self.assertEqual(str(e.exception), "Invalid argument to @imports: 'os*'") + + def test_only_when(self): + moz_configure = """ + option('--enable-when', help='when') + @depends('--enable-when', '--help') + def when(value, _): + return bool(value) + + with only_when(when): + option('--foo', nargs='*', help='foo') + @depends('--foo') + def foo(value): + return value + + set_config('FOO', foo) + set_define('FOO', foo) + + # It is possible to depend on a function defined in a only_when + # block. It then resolves to `None`. + set_config('BAR', depends(foo)(lambda x: x)) + set_define('BAR', depends(foo)(lambda x: x)) + """ + + with self.moz_configure(moz_configure): + config = self.get_config() + self.assertEqual( + config, + { + "DEFINES": {}, + }, + ) + + config = self.get_config(["--enable-when"]) + self.assertEqual( + config, + { + "BAR": NegativeOptionValue(), + "FOO": NegativeOptionValue(), + "DEFINES": { + "BAR": NegativeOptionValue(), + "FOO": NegativeOptionValue(), + }, + }, + ) + + config = self.get_config(["--enable-when", "--foo=bar"]) + self.assertEqual( + config, + { + "BAR": PositiveOptionValue(["bar"]), + "FOO": PositiveOptionValue(["bar"]), + "DEFINES": { + "BAR": PositiveOptionValue(["bar"]), + "FOO": PositiveOptionValue(["bar"]), + }, + }, + ) + + # The --foo option doesn't exist when --enable-when is not given. + with self.assertRaises(InvalidOptionError) as e: + self.get_config(["--foo"]) + + self.assertEqual( + str(e.exception), "--foo is not available in this configuration" + ) + + # Cannot depend on an option defined in a only_when block, because we + # don't know what OptionValue would make sense. + with self.moz_configure( + moz_configure + + """ + set_config('QUX', depends('--foo')(lambda x: x)) + """ + ): + with self.assertRaises(ConfigureError) as e: + self.get_config() + + self.assertEqual( + str(e.exception), + "@depends function needs the same `when` as " "options it depends on", + ) + + with self.moz_configure( + moz_configure + + """ + set_config('QUX', depends('--foo', when=when)(lambda x: x)) + """ + ): + self.get_config(["--enable-when"]) + + # Using imply_option for an option defined in a only_when block fails + # similarly if the imply_option happens outside the block. + with self.moz_configure( + """ + imply_option('--foo', True) + """ + + moz_configure + ): + with self.assertRaises(InvalidOptionError) as e: + self.get_config() + + self.assertEqual( + str(e.exception), "--foo is not available in this configuration" + ) + + # And similarly doesn't fail when the condition is true. + with self.moz_configure( + """ + imply_option('--foo', True) + """ + + moz_configure + ): + self.get_config(["--enable-when"]) + + def test_depends_binary_ops(self): + with self.moz_configure( + """ + option('--foo', nargs=1, help='foo') + @depends('--foo') + def foo(value): + return value or 0 + + option('--bar', nargs=1, help='bar') + @depends('--bar') + def bar(value): + return value or '' + + option('--baz', nargs=1, help='baz') + @depends('--baz') + def baz(value): + return value + + set_config('FOOorBAR', foo | bar) + set_config('FOOorBARorBAZ', foo | bar | baz) + set_config('FOOandBAR', foo & bar) + set_config('FOOandBARandBAZ', foo & bar & baz) + """ + ): + for foo_opt, foo_value in ( + ("", 0), + ("--foo=foo", PositiveOptionValue(("foo",))), + ): + for bar_opt, bar_value in ( + ("", ""), + ("--bar=bar", PositiveOptionValue(("bar",))), + ): + for baz_opt, baz_value in ( + ("", NegativeOptionValue()), + ("--baz=baz", PositiveOptionValue(("baz",))), + ): + config = self.get_config( + [x for x in (foo_opt, bar_opt, baz_opt) if x] + ) + self.assertEqual( + config, + { + "FOOorBAR": foo_value or bar_value, + "FOOorBARorBAZ": foo_value or bar_value or baz_value, + "FOOandBAR": foo_value and bar_value, + "FOOandBARandBAZ": foo_value + and bar_value + and baz_value, + }, + ) + + def test_depends_getattr(self): + with self.moz_configure( + """ + @imports(_from='mozbuild.util', _import='ReadOnlyNamespace') + def namespace(**kwargs): + return ReadOnlyNamespace(**kwargs) + + option('--foo', nargs=1, help='foo') + @depends('--foo') + def foo(value): + return value + + option('--bar', nargs=1, help='bar') + @depends('--bar') + def bar(value): + return value or None + + @depends(foo, bar) + def foobar(foo, bar): + return namespace(foo=foo, bar=bar) + + set_config('FOO', foobar.foo) + set_config('BAR', foobar.bar) + set_config('BAZ', foobar.baz) + """ + ): + config = self.get_config() + self.assertEqual( + config, + { + "FOO": NegativeOptionValue(), + }, + ) + + config = self.get_config(["--foo=foo"]) + self.assertEqual( + config, + { + "FOO": PositiveOptionValue(("foo",)), + }, + ) + + config = self.get_config(["--bar=bar"]) + self.assertEqual( + config, + { + "FOO": NegativeOptionValue(), + "BAR": PositiveOptionValue(("bar",)), + }, + ) + + config = self.get_config(["--foo=foo", "--bar=bar"]) + self.assertEqual( + config, + { + "FOO": PositiveOptionValue(("foo",)), + "BAR": PositiveOptionValue(("bar",)), + }, + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/configure/test_lint.py b/python/mozbuild/mozbuild/test/configure/test_lint.py new file mode 100644 index 0000000000..7ecac769c3 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/test_lint.py @@ -0,0 +1,487 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import contextlib +import os +import sys +import textwrap +import traceback +import unittest + +import mozpack.path as mozpath +from mozunit import MockedOpen, main + +from mozbuild.configure import ConfigureError +from mozbuild.configure.lint import LintSandbox + +test_data_path = mozpath.abspath(mozpath.dirname(__file__)) +test_data_path = mozpath.join(test_data_path, "data") + + +class TestLint(unittest.TestCase): + def lint_test(self, options=[], env={}): + sandbox = LintSandbox(env, ["configure"] + options) + + sandbox.run(mozpath.join(test_data_path, "moz.configure")) + + def moz_configure(self, source): + return MockedOpen( + {os.path.join(test_data_path, "moz.configure"): textwrap.dedent(source)} + ) + + @contextlib.contextmanager + def assertRaisesFromLine(self, exc_type, line): + with self.assertRaises(exc_type) as e: + yield e + + _, _, tb = sys.exc_info() + self.assertEqual( + traceback.extract_tb(tb)[-1][:2], + (mozpath.join(test_data_path, "moz.configure"), line), + ) + + def test_configure_testcase(self): + # Lint python/mozbuild/mozbuild/test/configure/data/moz.configure + self.lint_test() + + def test_depends_failures(self): + with self.moz_configure( + """ + option('--foo', help='foo') + @depends('--foo') + def foo(value): + return value + + @depends('--help', foo) + @imports('os') + def bar(help, foo): + return foo + """ + ): + self.lint_test() + + with self.assertRaisesFromLine(ConfigureError, 7) as e: + with self.moz_configure( + """ + option('--foo', help='foo') + @depends('--foo') + def foo(value): + return value + + @depends('--help', foo) + def bar(help, foo): + return foo + """ + ): + self.lint_test() + + self.assertEqual(str(e.exception), "The dependency on `--help` is unused") + + with self.assertRaisesFromLine(ConfigureError, 3) as e: + with self.moz_configure( + """ + option('--foo', help='foo') + @depends('--foo') + @imports('os') + def foo(value): + return value + + @depends('--help', foo) + @imports('os') + def bar(help, foo): + return foo + """ + ): + self.lint_test() + + self.assertEqual( + str(e.exception), + "Missing '--help' dependency because `bar` depends on '--help' and `foo`", + ) + + with self.assertRaisesFromLine(ConfigureError, 7) as e: + with self.moz_configure( + """ + @template + def tmpl(): + qux = 42 + + option('--foo', help='foo') + @depends('--foo') + def foo(value): + qux + return value + + @depends('--help', foo) + @imports('os') + def bar(help, foo): + return foo + tmpl() + """ + ): + self.lint_test() + + self.assertEqual( + str(e.exception), + "Missing '--help' dependency because `bar` depends on '--help' and `foo`", + ) + + with self.moz_configure( + """ + option('--foo', help='foo') + @depends('--foo') + def foo(value): + return value + + include(foo) + """ + ): + self.lint_test() + + with self.assertRaisesFromLine(ConfigureError, 3) as e: + with self.moz_configure( + """ + option('--foo', help='foo') + @depends('--foo') + @imports('os') + def foo(value): + return value + + include(foo) + """ + ): + self.lint_test() + + self.assertEqual(str(e.exception), "Missing '--help' dependency") + + with self.assertRaisesFromLine(ConfigureError, 3) as e: + with self.moz_configure( + """ + option('--foo', help='foo') + @depends('--foo') + @imports('os') + def foo(value): + return value + + @depends(foo) + def bar(value): + return value + + include(bar) + """ + ): + self.lint_test() + + self.assertEqual(str(e.exception), "Missing '--help' dependency") + + with self.assertRaisesFromLine(ConfigureError, 3) as e: + with self.moz_configure( + """ + option('--foo', help='foo') + @depends('--foo') + @imports('os') + def foo(value): + return value + + option('--bar', help='bar', when=foo) + """ + ): + self.lint_test() + + self.assertEqual(str(e.exception), "Missing '--help' dependency") + + # This would have failed with "Missing '--help' dependency" + # in the past, because of the reference to the builtin False. + with self.moz_configure( + """ + option('--foo', help='foo') + @depends('--foo') + def foo(value): + return False or value + + option('--bar', help='bar', when=foo) + """ + ): + self.lint_test() + + # However, when something that is normally a builtin is overridden, + # we should still want the dependency on --help. + with self.assertRaisesFromLine(ConfigureError, 7) as e: + with self.moz_configure( + """ + @template + def tmpl(): + sorted = 42 + + option('--foo', help='foo') + @depends('--foo') + def foo(value): + return sorted + + option('--bar', help='bar', when=foo) + tmpl() + """ + ): + self.lint_test() + + self.assertEqual(str(e.exception), "Missing '--help' dependency") + + # There is a default restricted `os` module when there is no explicit + # @imports, and it's fine to use it without a dependency on --help. + with self.moz_configure( + """ + option('--foo', help='foo') + @depends('--foo') + def foo(value): + os + return value + + include(foo) + """ + ): + self.lint_test() + + with self.assertRaisesFromLine(ConfigureError, 3) as e: + with self.moz_configure( + """ + option('--foo', help='foo') + @depends('--foo') + def foo(value): + return + + include(foo) + """ + ): + self.lint_test() + + self.assertEqual(str(e.exception), "The dependency on `--foo` is unused") + + with self.assertRaisesFromLine(ConfigureError, 5) as e: + with self.moz_configure( + """ + @depends(when=True) + def bar(): + return + @depends(bar) + def foo(value): + return + + include(foo) + """ + ): + self.lint_test() + + self.assertEqual(str(e.exception), "The dependency on `bar` is unused") + + with self.assertRaisesFromLine(ConfigureError, 2) as e: + with self.moz_configure( + """ + @depends(depends(when=True)(lambda: None)) + def foo(value): + return + + include(foo) + """ + ): + self.lint_test() + + self.assertEqual(str(e.exception), "The dependency on `` is unused") + + with self.assertRaisesFromLine(ConfigureError, 9) as e: + with self.moz_configure( + """ + @template + def tmpl(): + @depends(when=True) + def bar(): + return + return bar + qux = tmpl() + @depends(qux) + def foo(value): + return + + include(foo) + """ + ): + self.lint_test() + + self.assertEqual(str(e.exception), "The dependency on `qux` is unused") + + def test_default_enable(self): + # --enable-* with default=True is not allowed. + with self.moz_configure( + """ + option('--enable-foo', default=False, help='foo') + """ + ): + self.lint_test() + with self.assertRaisesFromLine(ConfigureError, 2) as e: + with self.moz_configure( + """ + option('--enable-foo', default=True, help='foo') + """ + ): + self.lint_test() + self.assertEqual( + str(e.exception), + "--disable-foo should be used instead of " "--enable-foo with default=True", + ) + + def test_default_disable(self): + # --disable-* with default=False is not allowed. + with self.moz_configure( + """ + option('--disable-foo', default=True, help='foo') + """ + ): + self.lint_test() + with self.assertRaisesFromLine(ConfigureError, 2) as e: + with self.moz_configure( + """ + option('--disable-foo', default=False, help='foo') + """ + ): + self.lint_test() + self.assertEqual( + str(e.exception), + "--enable-foo should be used instead of " + "--disable-foo with default=False", + ) + + def test_default_with(self): + # --with-* with default=True is not allowed. + with self.moz_configure( + """ + option('--with-foo', default=False, help='foo') + """ + ): + self.lint_test() + with self.assertRaisesFromLine(ConfigureError, 2) as e: + with self.moz_configure( + """ + option('--with-foo', default=True, help='foo') + """ + ): + self.lint_test() + self.assertEqual( + str(e.exception), + "--without-foo should be used instead of " "--with-foo with default=True", + ) + + def test_default_without(self): + # --without-* with default=False is not allowed. + with self.moz_configure( + """ + option('--without-foo', default=True, help='foo') + """ + ): + self.lint_test() + with self.assertRaisesFromLine(ConfigureError, 2) as e: + with self.moz_configure( + """ + option('--without-foo', default=False, help='foo') + """ + ): + self.lint_test() + self.assertEqual( + str(e.exception), + "--with-foo should be used instead of " "--without-foo with default=False", + ) + + def test_default_func(self): + # Help text for an option with variable default should contain + # {enable|disable} rule. + with self.moz_configure( + """ + option(env='FOO', help='foo') + option('--enable-bar', default=depends('FOO')(lambda x: bool(x)), + help='{Enable|Disable} bar') + """ + ): + self.lint_test() + with self.assertRaisesFromLine(ConfigureError, 3) as e: + with self.moz_configure( + """ + option(env='FOO', help='foo') + option('--enable-bar', default=depends('FOO')(lambda x: bool(x)),\ + help='Enable bar') + """ + ): + self.lint_test() + self.assertEqual( + str(e.exception), + '`help` should contain "{Enable|Disable}" because of ' + "non-constant default", + ) + + def test_large_offset(self): + with self.assertRaisesFromLine(ConfigureError, 375): + with self.moz_configure( + """ + option(env='FOO', help='foo') + """ + + "\n" * 371 + + """ + option('--enable-bar', default=depends('FOO')(lambda x: bool(x)),\ + help='Enable bar') + """ + ): + self.lint_test() + + def test_undefined_global(self): + with self.assertRaisesFromLine(NameError, 6) as e: + with self.moz_configure( + """ + option(env='FOO', help='foo') + @depends('FOO') + def foo(value): + if value: + return unknown + return value + """ + ): + self.lint_test() + + self.assertEqual(str(e.exception), "global name 'unknown' is not defined") + + # Ideally, this would raise on line 4, where `unknown` is used, but + # python disassembly doesn't give use the information. + with self.assertRaisesFromLine(NameError, 2) as e: + with self.moz_configure( + """ + @template + def tmpl(): + @depends(unknown) + def foo(value): + if value: + return True + return foo + tmpl() + """ + ): + self.lint_test() + + self.assertEqual(str(e.exception), "global name 'unknown' is not defined") + + def test_unnecessary_imports(self): + with self.assertRaisesFromLine(NameError, 3) as e: + with self.moz_configure( + """ + option(env='FOO', help='foo') + @depends('FOO') + @imports(_from='__builtin__', _import='list') + def foo(value): + if value: + return list() + return value + """ + ): + self.lint_test() + + self.assertEqual(str(e.exception), "builtin 'list' doesn't need to be imported") + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/configure/test_moz_configure.py b/python/mozbuild/mozbuild/test/configure/test_moz_configure.py new file mode 100644 index 0000000000..22129a3970 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/test_moz_configure.py @@ -0,0 +1,185 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from mozunit import main + +from common import BaseConfigureTest, ConfigureTestSandbox +from mozbuild.util import ReadOnlyNamespace, exec_, memoized_property + + +def sandbox_class(platform): + class ConfigureTestSandboxOverridingPlatform(ConfigureTestSandbox): + @memoized_property + def _wrapped_sys(self): + sys = {} + exec_("from sys import *", sys) + sys["platform"] = platform + return ReadOnlyNamespace(**sys) + + return ConfigureTestSandboxOverridingPlatform + + +class TargetTest(BaseConfigureTest): + def get_target(self, args, env={}): + if "linux" in self.HOST: + platform = "linux2" + elif "mingw" in self.HOST or "windows" in self.HOST: + platform = "win32" + elif "openbsd6" in self.HOST: + platform = "openbsd6" + else: + raise Exception("Missing platform for HOST {}".format(self.HOST)) + sandbox = self.get_sandbox({}, {}, args, env, cls=sandbox_class(platform)) + return sandbox._value_for(sandbox["target"]).alias + + +class TestTargetLinux(TargetTest): + def test_target(self): + self.assertEqual(self.get_target([]), self.HOST) + self.assertEqual(self.get_target(["--target=i686"]), "i686-pc-linux-gnu") + self.assertEqual( + self.get_target(["--target=i686-unknown-linux-gnu"]), + "i686-unknown-linux-gnu", + ) + self.assertEqual( + self.get_target(["--target=i686-pc-windows-msvc"]), "i686-pc-windows-msvc" + ) + + +class TestTargetWindows(TargetTest): + # BaseConfigureTest uses this as the return value for config.guess + HOST = "i686-pc-windows-msvc" + + def test_target(self): + self.assertEqual(self.get_target([]), self.HOST) + self.assertEqual( + self.get_target(["--target=x86_64-pc-windows-msvc"]), + "x86_64-pc-windows-msvc", + ) + self.assertEqual(self.get_target(["--target=x86_64"]), "x86_64-pc-windows-msvc") + + # The tests above are actually not realistic, because most Windows + # machines will have a few environment variables that make us not + # use config.guess. + + # 32-bits process on x86_64 host. + env = { + "PROCESSOR_ARCHITECTURE": "x86", + "PROCESSOR_ARCHITEW6432": "AMD64", + } + self.assertEqual(self.get_target([], env), "x86_64-pc-windows-msvc") + self.assertEqual( + self.get_target(["--target=i686-pc-windows-msvc"]), "i686-pc-windows-msvc" + ) + self.assertEqual(self.get_target(["--target=i686"]), "i686-pc-windows-msvc") + + # 64-bits process on x86_64 host. + env = { + "PROCESSOR_ARCHITECTURE": "AMD64", + } + self.assertEqual(self.get_target([], env), "x86_64-pc-windows-msvc") + self.assertEqual( + self.get_target(["--target=i686-pc-windows-msvc"]), "i686-pc-windows-msvc" + ) + self.assertEqual(self.get_target(["--target=i686"]), "i686-pc-windows-msvc") + + # 32-bits process on x86 host. + env = { + "PROCESSOR_ARCHITECTURE": "x86", + } + self.assertEqual(self.get_target([], env), "i686-pc-windows-msvc") + self.assertEqual( + self.get_target(["--target=x86_64-pc-windows-msvc"]), + "x86_64-pc-windows-msvc", + ) + self.assertEqual(self.get_target(["--target=x86_64"]), "x86_64-pc-windows-msvc") + + # While host autodection will give us a -windows-msvc triplet, setting host + # is expecting to implicitly set the target. + self.assertEqual( + self.get_target(["--host=x86_64-pc-windows-gnu"]), "x86_64-pc-windows-gnu" + ) + self.assertEqual( + self.get_target(["--host=x86_64-pc-mingw32"]), "x86_64-pc-mingw32" + ) + + +class TestTargetAndroid(TargetTest): + HOST = "x86_64-pc-linux-gnu" + + def test_target(self): + self.assertEqual( + self.get_target(["--enable-project=mobile/android"]), + "arm-unknown-linux-androideabi", + ) + self.assertEqual( + self.get_target(["--enable-project=mobile/android", "--target=i686"]), + "i686-unknown-linux-android", + ) + self.assertEqual( + self.get_target(["--enable-project=mobile/android", "--target=x86_64"]), + "x86_64-unknown-linux-android", + ) + self.assertEqual( + self.get_target(["--enable-project=mobile/android", "--target=aarch64"]), + "aarch64-unknown-linux-android", + ) + self.assertEqual( + self.get_target(["--enable-project=mobile/android", "--target=arm"]), + "arm-unknown-linux-androideabi", + ) + + +class TestTargetOpenBSD(TargetTest): + # config.guess returns amd64 on OpenBSD, which we need to pass through to + # config.sub so that it canonicalizes to x86_64. + HOST = "amd64-unknown-openbsd6.4" + + def test_target(self): + self.assertEqual(self.get_target([]), "x86_64-unknown-openbsd6.4") + + def config_sub(self, stdin, args): + if args[0] == "amd64-unknown-openbsd6.4": + return 0, "x86_64-unknown-openbsd6.4", "" + return super(TestTargetOpenBSD, self).config_sub(stdin, args) + + +class TestMozConfigure(BaseConfigureTest): + def test_nsis_version(self): + this = self + + class FakeNSIS(object): + def __init__(self, version): + self.version = version + + def __call__(self, stdin, args): + this.assertEqual(args, ("-version",)) + return 0, self.version, "" + + def check_nsis_version(version): + sandbox = self.get_sandbox( + {"/usr/bin/makensis": FakeNSIS(version)}, + {}, + ["--target=x86_64-pc-windows-msvc", "--disable-bootstrap"], + {"PATH": "/usr/bin", "MAKENSISU": "/usr/bin/makensis"}, + ) + return sandbox._value_for(sandbox["nsis_version"]) + + with self.assertRaises(SystemExit): + check_nsis_version("v2.5") + + with self.assertRaises(SystemExit): + check_nsis_version("v3.0a2") + + self.assertEqual(check_nsis_version("v3.0b1"), "3.0b1") + self.assertEqual(check_nsis_version("v3.0b2"), "3.0b2") + self.assertEqual(check_nsis_version("v3.0rc1"), "3.0rc1") + self.assertEqual(check_nsis_version("v3.0"), "3.0") + self.assertEqual(check_nsis_version("v3.0-2"), "3.0") + self.assertEqual(check_nsis_version("v3.0.1"), "3.0") + self.assertEqual(check_nsis_version("v3.1"), "3.1") + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/configure/test_options.py b/python/mozbuild/mozbuild/test/configure/test_options.py new file mode 100644 index 0000000000..59ba616355 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/test_options.py @@ -0,0 +1,905 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import unittest + +from mozunit import main + +from mozbuild.configure.options import ( + CommandLineHelper, + ConflictingOptionError, + InvalidOptionError, + NegativeOptionValue, + Option, + OptionValue, + PositiveOptionValue, +) + + +class Option(Option): + def __init__(self, *args, **kwargs): + kwargs["help"] = "Dummy help" + super(Option, self).__init__(*args, **kwargs) + + +class TestOption(unittest.TestCase): + def test_option(self): + option = Option("--option") + self.assertEqual(option.prefix, "") + self.assertEqual(option.name, "option") + self.assertEqual(option.env, None) + self.assertFalse(option.default) + + option = Option("--enable-option") + self.assertEqual(option.prefix, "enable") + self.assertEqual(option.name, "option") + self.assertEqual(option.env, None) + self.assertFalse(option.default) + + option = Option("--disable-option") + self.assertEqual(option.prefix, "disable") + self.assertEqual(option.name, "option") + self.assertEqual(option.env, None) + self.assertTrue(option.default) + + option = Option("--with-option") + self.assertEqual(option.prefix, "with") + self.assertEqual(option.name, "option") + self.assertEqual(option.env, None) + self.assertFalse(option.default) + + option = Option("--without-option") + self.assertEqual(option.prefix, "without") + self.assertEqual(option.name, "option") + self.assertEqual(option.env, None) + self.assertTrue(option.default) + + option = Option("--without-option-foo", env="MOZ_OPTION") + self.assertEqual(option.env, "MOZ_OPTION") + + option = Option(env="MOZ_OPTION") + self.assertEqual(option.prefix, "") + self.assertEqual(option.name, None) + self.assertEqual(option.env, "MOZ_OPTION") + self.assertFalse(option.default) + + with self.assertRaises(InvalidOptionError) as e: + Option("--option", nargs=0, default=("a",)) + self.assertEqual( + str(e.exception), "The given `default` doesn't satisfy `nargs`" + ) + + with self.assertRaises(InvalidOptionError) as e: + Option("--option", nargs=1, default=()) + self.assertEqual( + str(e.exception), "default must be a bool, a string or a tuple of strings" + ) + + with self.assertRaises(InvalidOptionError) as e: + Option("--option", nargs=1, default=True) + self.assertEqual( + str(e.exception), "The given `default` doesn't satisfy `nargs`" + ) + + with self.assertRaises(InvalidOptionError) as e: + Option("--option", nargs=1, default=("a", "b")) + self.assertEqual( + str(e.exception), "The given `default` doesn't satisfy `nargs`" + ) + + with self.assertRaises(InvalidOptionError) as e: + Option("--option", nargs=2, default=()) + self.assertEqual( + str(e.exception), "default must be a bool, a string or a tuple of strings" + ) + + with self.assertRaises(InvalidOptionError) as e: + Option("--option", nargs=2, default=True) + self.assertEqual( + str(e.exception), "The given `default` doesn't satisfy `nargs`" + ) + + with self.assertRaises(InvalidOptionError) as e: + Option("--option", nargs=2, default=("a",)) + self.assertEqual( + str(e.exception), "The given `default` doesn't satisfy `nargs`" + ) + + with self.assertRaises(InvalidOptionError) as e: + Option("--option", nargs="?", default=("a", "b")) + self.assertEqual( + str(e.exception), "The given `default` doesn't satisfy `nargs`" + ) + + with self.assertRaises(InvalidOptionError) as e: + Option("--option", nargs="+", default=()) + self.assertEqual( + str(e.exception), "default must be a bool, a string or a tuple of strings" + ) + + with self.assertRaises(InvalidOptionError) as e: + Option("--option", nargs="+", default=True) + self.assertEqual( + str(e.exception), "The given `default` doesn't satisfy `nargs`" + ) + + # --disable options with a nargs value that requires at least one + # argument need to be given a default. + with self.assertRaises(InvalidOptionError) as e: + Option("--disable-option", nargs=1) + self.assertEqual( + str(e.exception), "The given `default` doesn't satisfy `nargs`" + ) + + with self.assertRaises(InvalidOptionError) as e: + Option("--disable-option", nargs="+") + self.assertEqual( + str(e.exception), "The given `default` doesn't satisfy `nargs`" + ) + + # Test nargs inference from default value + option = Option("--with-foo", default=True) + self.assertEqual(option.nargs, 0) + + option = Option("--with-foo", default=False) + self.assertEqual(option.nargs, 0) + + option = Option("--with-foo", default="a") + self.assertEqual(option.nargs, "?") + + option = Option("--with-foo", default=("a",)) + self.assertEqual(option.nargs, "?") + + option = Option("--with-foo", default=("a", "b")) + self.assertEqual(option.nargs, "*") + + option = Option(env="FOO", default=True) + self.assertEqual(option.nargs, 0) + + option = Option(env="FOO", default=False) + self.assertEqual(option.nargs, 0) + + option = Option(env="FOO", default="a") + self.assertEqual(option.nargs, "?") + + option = Option(env="FOO", default=("a",)) + self.assertEqual(option.nargs, "?") + + option = Option(env="FOO", default=("a", "b")) + self.assertEqual(option.nargs, "*") + + def test_option_option(self): + for option in ( + "--option", + "--enable-option", + "--disable-option", + "--with-option", + "--without-option", + ): + self.assertEqual(Option(option).option, option) + self.assertEqual(Option(option, env="FOO").option, option) + + opt = Option(option, default=False) + self.assertEqual( + opt.option, + option.replace("-disable-", "-enable-").replace("-without-", "-with-"), + ) + + opt = Option(option, default=True) + self.assertEqual( + opt.option, + option.replace("-enable-", "-disable-").replace("-with-", "-without-"), + ) + + self.assertEqual(Option(env="FOO").option, "FOO") + + def test_option_choices(self): + with self.assertRaises(InvalidOptionError) as e: + Option("--option", nargs=3, choices=("a", "b")) + self.assertEqual(str(e.exception), "Not enough `choices` for `nargs`") + + with self.assertRaises(InvalidOptionError) as e: + Option("--without-option", nargs=1, choices=("a", "b")) + self.assertEqual( + str(e.exception), "A `default` must be given along with `choices`" + ) + + with self.assertRaises(InvalidOptionError) as e: + Option("--without-option", nargs="+", choices=("a", "b")) + self.assertEqual( + str(e.exception), "A `default` must be given along with `choices`" + ) + + with self.assertRaises(InvalidOptionError) as e: + Option("--without-option", default="c", choices=("a", "b")) + self.assertEqual( + str(e.exception), "The `default` value must be one of 'a', 'b'" + ) + + with self.assertRaises(InvalidOptionError) as e: + Option( + "--without-option", + default=( + "a", + "c", + ), + choices=("a", "b"), + ) + self.assertEqual( + str(e.exception), "The `default` value must be one of 'a', 'b'" + ) + + with self.assertRaises(InvalidOptionError) as e: + Option("--without-option", default=("c",), choices=("a", "b")) + self.assertEqual( + str(e.exception), "The `default` value must be one of 'a', 'b'" + ) + + option = Option("--with-option", nargs="+", choices=("a", "b")) + with self.assertRaises(InvalidOptionError) as e: + option.get_value("--with-option=c") + self.assertEqual(str(e.exception), "'c' is not one of 'a', 'b'") + + value = option.get_value("--with-option=b,a") + self.assertTrue(value) + self.assertEqual(PositiveOptionValue(("b", "a")), value) + + option = Option("--without-option", nargs="*", default="a", choices=("a", "b")) + with self.assertRaises(InvalidOptionError) as e: + option.get_value("--with-option=c") + self.assertEqual(str(e.exception), "'c' is not one of 'a', 'b'") + + value = option.get_value("--with-option=b,a") + self.assertTrue(value) + self.assertEqual(PositiveOptionValue(("b", "a")), value) + + # Test nargs inference from choices + option = Option("--with-option", choices=("a", "b")) + self.assertEqual(option.nargs, 1) + + # Test "relative" values + option = Option( + "--with-option", nargs="*", default=("b", "c"), choices=("a", "b", "c", "d") + ) + + value = option.get_value("--with-option=+d") + self.assertEqual(PositiveOptionValue(("b", "c", "d")), value) + + value = option.get_value("--with-option=-b") + self.assertEqual(PositiveOptionValue(("c",)), value) + + value = option.get_value("--with-option=-b,+d") + self.assertEqual(PositiveOptionValue(("c", "d")), value) + + # Adding something that is in the default is fine + value = option.get_value("--with-option=+b") + self.assertEqual(PositiveOptionValue(("b", "c")), value) + + # Removing something that is not in the default is fine, as long as it + # is one of the choices + value = option.get_value("--with-option=-a") + self.assertEqual(PositiveOptionValue(("b", "c")), value) + + with self.assertRaises(InvalidOptionError) as e: + option.get_value("--with-option=-e") + self.assertEqual(str(e.exception), "'e' is not one of 'a', 'b', 'c', 'd'") + + # Other "not a choice" errors. + with self.assertRaises(InvalidOptionError) as e: + option.get_value("--with-option=+e") + self.assertEqual(str(e.exception), "'e' is not one of 'a', 'b', 'c', 'd'") + + with self.assertRaises(InvalidOptionError) as e: + option.get_value("--with-option=e") + self.assertEqual(str(e.exception), "'e' is not one of 'a', 'b', 'c', 'd'") + + def test_option_value_compare(self): + # OptionValue are tuple and equivalence should compare as tuples. + val = PositiveOptionValue(("foo",)) + + self.assertEqual(val[0], "foo") + self.assertEqual(val, PositiveOptionValue(("foo",))) + self.assertNotEqual(val, PositiveOptionValue(("foo", "bar"))) + + # Can compare a tuple to an OptionValue. + self.assertEqual(val, ("foo",)) + self.assertNotEqual(val, ("foo", "bar")) + + # Different OptionValue types are never equal. + self.assertNotEqual(val, OptionValue(("foo",))) + + # For usability reasons, we raise TypeError when attempting to compare + # against a non-tuple. + with self.assertRaisesRegexp(TypeError, "cannot compare a"): + val == "foo" + + # But we allow empty option values to compare otherwise we can't + # easily compare value-less types like PositiveOptionValue and + # NegativeOptionValue. + empty_positive = PositiveOptionValue() + empty_negative = NegativeOptionValue() + self.assertEqual(empty_positive, ()) + self.assertEqual(empty_positive, PositiveOptionValue()) + self.assertEqual(empty_negative, ()) + self.assertEqual(empty_negative, NegativeOptionValue()) + self.assertNotEqual(empty_positive, "foo") + self.assertNotEqual(empty_positive, ("foo",)) + self.assertNotEqual(empty_negative, "foo") + self.assertNotEqual(empty_negative, ("foo",)) + + def test_option_value_format(self): + val = PositiveOptionValue() + self.assertEqual("--with-value", val.format("--with-value")) + self.assertEqual("--with-value", val.format("--without-value")) + self.assertEqual("--enable-value", val.format("--enable-value")) + self.assertEqual("--enable-value", val.format("--disable-value")) + self.assertEqual("--value", val.format("--value")) + self.assertEqual("VALUE=1", val.format("VALUE")) + + val = PositiveOptionValue(("a",)) + self.assertEqual("--with-value=a", val.format("--with-value")) + self.assertEqual("--with-value=a", val.format("--without-value")) + self.assertEqual("--enable-value=a", val.format("--enable-value")) + self.assertEqual("--enable-value=a", val.format("--disable-value")) + self.assertEqual("--value=a", val.format("--value")) + self.assertEqual("VALUE=a", val.format("VALUE")) + + val = PositiveOptionValue(("a", "b")) + self.assertEqual("--with-value=a,b", val.format("--with-value")) + self.assertEqual("--with-value=a,b", val.format("--without-value")) + self.assertEqual("--enable-value=a,b", val.format("--enable-value")) + self.assertEqual("--enable-value=a,b", val.format("--disable-value")) + self.assertEqual("--value=a,b", val.format("--value")) + self.assertEqual("VALUE=a,b", val.format("VALUE")) + + val = NegativeOptionValue() + self.assertEqual("--without-value", val.format("--with-value")) + self.assertEqual("--without-value", val.format("--without-value")) + self.assertEqual("--disable-value", val.format("--enable-value")) + self.assertEqual("--disable-value", val.format("--disable-value")) + self.assertEqual("", val.format("--value")) + self.assertEqual("VALUE=", val.format("VALUE")) + + def test_option_value(self, name="option", nargs=0, default=None): + disabled = name.startswith(("disable-", "without-")) + if disabled: + negOptionValue = PositiveOptionValue + posOptionValue = NegativeOptionValue + else: + posOptionValue = PositiveOptionValue + negOptionValue = NegativeOptionValue + defaultValue = PositiveOptionValue(default) if default else negOptionValue() + + option = Option("--%s" % name, nargs=nargs, default=default) + + if nargs in (0, "?", "*") or disabled: + value = option.get_value("--%s" % name, "option") + self.assertEqual(value, posOptionValue()) + self.assertEqual(value.origin, "option") + else: + with self.assertRaises(InvalidOptionError) as e: + option.get_value("--%s" % name) + if nargs == 1: + self.assertEqual(str(e.exception), "--%s takes 1 value" % name) + elif nargs == "+": + self.assertEqual(str(e.exception), "--%s takes 1 or more values" % name) + else: + self.assertEqual(str(e.exception), "--%s takes 2 values" % name) + + value = option.get_value("") + self.assertEqual(value, defaultValue) + self.assertEqual(value.origin, "default") + + value = option.get_value(None) + self.assertEqual(value, defaultValue) + self.assertEqual(value.origin, "default") + + with self.assertRaises(AssertionError): + value = option.get_value("MOZ_OPTION=", "environment") + + with self.assertRaises(AssertionError): + value = option.get_value("MOZ_OPTION=1", "environment") + + with self.assertRaises(AssertionError): + value = option.get_value("--foo") + + if nargs in (1, "?", "*", "+") and not disabled: + value = option.get_value("--%s=" % name, "option") + self.assertEqual(value, PositiveOptionValue(("",))) + self.assertEqual(value.origin, "option") + else: + with self.assertRaises(InvalidOptionError) as e: + option.get_value("--%s=" % name) + if disabled: + self.assertEqual(str(e.exception), "Cannot pass a value to --%s" % name) + else: + self.assertEqual( + str(e.exception), "--%s takes %d values" % (name, nargs) + ) + + if nargs in (1, "?", "*", "+") and not disabled: + value = option.get_value("--%s=foo" % name, "option") + self.assertEqual(value, PositiveOptionValue(("foo",))) + self.assertEqual(value.origin, "option") + else: + with self.assertRaises(InvalidOptionError) as e: + option.get_value("--%s=foo" % name) + if disabled: + self.assertEqual(str(e.exception), "Cannot pass a value to --%s" % name) + else: + self.assertEqual( + str(e.exception), "--%s takes %d values" % (name, nargs) + ) + + if nargs in (2, "*", "+") and not disabled: + value = option.get_value("--%s=foo,bar" % name, "option") + self.assertEqual(value, PositiveOptionValue(("foo", "bar"))) + self.assertEqual(value.origin, "option") + else: + with self.assertRaises(InvalidOptionError) as e: + option.get_value("--%s=foo,bar" % name, "option") + if disabled: + self.assertEqual(str(e.exception), "Cannot pass a value to --%s" % name) + elif nargs == "?": + self.assertEqual(str(e.exception), "--%s takes 0 or 1 values" % name) + else: + self.assertEqual( + str(e.exception), + "--%s takes %d value%s" % (name, nargs, "s" if nargs != 1 else ""), + ) + + option = Option("--%s" % name, env="MOZ_OPTION", nargs=nargs, default=default) + if nargs in (0, "?", "*") or disabled: + value = option.get_value("--%s" % name, "option") + self.assertEqual(value, posOptionValue()) + self.assertEqual(value.origin, "option") + else: + with self.assertRaises(InvalidOptionError) as e: + option.get_value("--%s" % name) + if disabled: + self.assertEqual(str(e.exception), "Cannot pass a value to --%s" % name) + elif nargs == "+": + self.assertEqual(str(e.exception), "--%s takes 1 or more values" % name) + else: + self.assertEqual( + str(e.exception), + "--%s takes %d value%s" % (name, nargs, "s" if nargs != 1 else ""), + ) + + value = option.get_value("") + self.assertEqual(value, defaultValue) + self.assertEqual(value.origin, "default") + + value = option.get_value(None) + self.assertEqual(value, defaultValue) + self.assertEqual(value.origin, "default") + + value = option.get_value("MOZ_OPTION=", "environment") + self.assertEqual(value, NegativeOptionValue()) + self.assertEqual(value.origin, "environment") + + if nargs in (0, "?", "*"): + value = option.get_value("MOZ_OPTION=1", "environment") + self.assertEqual(value, PositiveOptionValue()) + self.assertEqual(value.origin, "environment") + elif nargs in (1, "+"): + value = option.get_value("MOZ_OPTION=1", "environment") + self.assertEqual(value, PositiveOptionValue(("1",))) + self.assertEqual(value.origin, "environment") + else: + with self.assertRaises(InvalidOptionError) as e: + option.get_value("MOZ_OPTION=1", "environment") + self.assertEqual(str(e.exception), "MOZ_OPTION takes 2 values") + + if nargs in (1, "?", "*", "+") and not disabled: + value = option.get_value("--%s=" % name, "option") + self.assertEqual(value, PositiveOptionValue(("",))) + self.assertEqual(value.origin, "option") + else: + with self.assertRaises(InvalidOptionError) as e: + option.get_value("--%s=" % name, "option") + if disabled: + self.assertEqual(str(e.exception), "Cannot pass a value to --%s" % name) + else: + self.assertEqual( + str(e.exception), "--%s takes %d values" % (name, nargs) + ) + + with self.assertRaises(AssertionError): + value = option.get_value("--foo", "option") + + if nargs in (1, "?", "*", "+"): + value = option.get_value("MOZ_OPTION=foo", "environment") + self.assertEqual(value, PositiveOptionValue(("foo",))) + self.assertEqual(value.origin, "environment") + else: + with self.assertRaises(InvalidOptionError) as e: + option.get_value("MOZ_OPTION=foo", "environment") + self.assertEqual(str(e.exception), "MOZ_OPTION takes %d values" % nargs) + + if nargs in (2, "*", "+"): + value = option.get_value("MOZ_OPTION=foo,bar", "environment") + self.assertEqual(value, PositiveOptionValue(("foo", "bar"))) + self.assertEqual(value.origin, "environment") + else: + with self.assertRaises(InvalidOptionError) as e: + option.get_value("MOZ_OPTION=foo,bar", "environment") + if nargs == "?": + self.assertEqual(str(e.exception), "MOZ_OPTION takes 0 or 1 values") + else: + self.assertEqual( + str(e.exception), + "MOZ_OPTION takes %d value%s" % (nargs, "s" if nargs != 1 else ""), + ) + + if disabled: + return option + + env_option = Option(env="MOZ_OPTION", nargs=nargs, default=default) + with self.assertRaises(AssertionError): + env_option.get_value("--%s" % name) + + value = env_option.get_value("") + self.assertEqual(value, defaultValue) + self.assertEqual(value.origin, "default") + + value = env_option.get_value("MOZ_OPTION=", "environment") + self.assertEqual(value, negOptionValue()) + self.assertEqual(value.origin, "environment") + + if nargs in (0, "?", "*"): + value = env_option.get_value("MOZ_OPTION=1", "environment") + self.assertEqual(value, posOptionValue()) + self.assertTrue(value) + self.assertEqual(value.origin, "environment") + elif nargs in (1, "+"): + value = env_option.get_value("MOZ_OPTION=1", "environment") + self.assertEqual(value, PositiveOptionValue(("1",))) + self.assertEqual(value.origin, "environment") + else: + with self.assertRaises(InvalidOptionError) as e: + env_option.get_value("MOZ_OPTION=1", "environment") + self.assertEqual(str(e.exception), "MOZ_OPTION takes 2 values") + + with self.assertRaises(AssertionError) as e: + env_option.get_value("--%s" % name) + + with self.assertRaises(AssertionError) as e: + env_option.get_value("--foo") + + if nargs in (1, "?", "*", "+"): + value = env_option.get_value("MOZ_OPTION=foo", "environment") + self.assertEqual(value, PositiveOptionValue(("foo",))) + self.assertEqual(value.origin, "environment") + else: + with self.assertRaises(InvalidOptionError) as e: + env_option.get_value("MOZ_OPTION=foo", "environment") + self.assertEqual(str(e.exception), "MOZ_OPTION takes %d values" % nargs) + + if nargs in (2, "*", "+"): + value = env_option.get_value("MOZ_OPTION=foo,bar", "environment") + self.assertEqual(value, PositiveOptionValue(("foo", "bar"))) + self.assertEqual(value.origin, "environment") + else: + with self.assertRaises(InvalidOptionError) as e: + env_option.get_value("MOZ_OPTION=foo,bar", "environment") + if nargs == "?": + self.assertEqual(str(e.exception), "MOZ_OPTION takes 0 or 1 values") + else: + self.assertEqual( + str(e.exception), + "MOZ_OPTION takes %d value%s" % (nargs, "s" if nargs != 1 else ""), + ) + + return option + + def test_option_value_enable( + self, enable="enable", disable="disable", nargs=0, default=None + ): + option = self.test_option_value( + "%s-option" % enable, nargs=nargs, default=default + ) + + value = option.get_value("--%s-option" % disable, "option") + self.assertEqual(value, NegativeOptionValue()) + self.assertEqual(value.origin, "option") + + option = self.test_option_value( + "%s-option" % disable, nargs=nargs, default=default + ) + + if nargs in (0, "?", "*"): + value = option.get_value("--%s-option" % enable, "option") + self.assertEqual(value, PositiveOptionValue()) + self.assertEqual(value.origin, "option") + else: + with self.assertRaises(InvalidOptionError) as e: + option.get_value("--%s-option" % enable, "option") + if nargs == 1: + self.assertEqual(str(e.exception), "--%s-option takes 1 value" % enable) + elif nargs == "+": + self.assertEqual( + str(e.exception), "--%s-option takes 1 or more values" % enable + ) + else: + self.assertEqual( + str(e.exception), "--%s-option takes 2 values" % enable + ) + + def test_option_value_with(self): + self.test_option_value_enable("with", "without") + + def test_option_value_invalid_nargs(self): + with self.assertRaises(InvalidOptionError) as e: + Option("--option", nargs="foo") + self.assertEqual( + str(e.exception), "nargs must be a positive integer, '?', '*' or '+'" + ) + + with self.assertRaises(InvalidOptionError) as e: + Option("--option", nargs=-2) + self.assertEqual( + str(e.exception), "nargs must be a positive integer, '?', '*' or '+'" + ) + + def test_option_value_nargs_1(self): + self.test_option_value(nargs=1) + self.test_option_value(nargs=1, default=("a",)) + self.test_option_value_enable(nargs=1, default=("a",)) + + # A default is required + with self.assertRaises(InvalidOptionError) as e: + Option("--disable-option", nargs=1) + self.assertEqual( + str(e.exception), "The given `default` doesn't satisfy `nargs`" + ) + + def test_option_value_nargs_2(self): + self.test_option_value(nargs=2) + self.test_option_value(nargs=2, default=("a", "b")) + self.test_option_value_enable(nargs=2, default=("a", "b")) + + # A default is required + with self.assertRaises(InvalidOptionError) as e: + Option("--disable-option", nargs=2) + self.assertEqual( + str(e.exception), "The given `default` doesn't satisfy `nargs`" + ) + + def test_option_value_nargs_0_or_1(self): + self.test_option_value(nargs="?") + self.test_option_value(nargs="?", default=("a",)) + self.test_option_value_enable(nargs="?") + self.test_option_value_enable(nargs="?", default=("a",)) + + def test_option_value_nargs_0_or_more(self): + self.test_option_value(nargs="*") + self.test_option_value(nargs="*", default=("a",)) + self.test_option_value(nargs="*", default=("a", "b")) + self.test_option_value_enable(nargs="*") + self.test_option_value_enable(nargs="*", default=("a",)) + self.test_option_value_enable(nargs="*", default=("a", "b")) + + def test_option_value_nargs_1_or_more(self): + self.test_option_value(nargs="+") + self.test_option_value(nargs="+", default=("a",)) + self.test_option_value(nargs="+", default=("a", "b")) + self.test_option_value_enable(nargs="+", default=("a",)) + self.test_option_value_enable(nargs="+", default=("a", "b")) + + # A default is required + with self.assertRaises(InvalidOptionError) as e: + Option("--disable-option", nargs="+") + self.assertEqual( + str(e.exception), "The given `default` doesn't satisfy `nargs`" + ) + + +class TestCommandLineHelper(unittest.TestCase): + def test_basic(self): + helper = CommandLineHelper({}, ["cmd", "--foo", "--bar"]) + + self.assertEqual(["--foo", "--bar"], list(helper)) + + helper.add("--enable-qux") + + self.assertEqual(["--foo", "--bar", "--enable-qux"], list(helper)) + + value, option = helper.handle(Option("--bar")) + self.assertEqual(["--foo", "--enable-qux"], list(helper)) + self.assertEqual(PositiveOptionValue(), value) + self.assertEqual("--bar", option) + + value, option = helper.handle(Option("--baz")) + self.assertEqual(["--foo", "--enable-qux"], list(helper)) + self.assertEqual(NegativeOptionValue(), value) + self.assertEqual(None, option) + + with self.assertRaises(AssertionError): + CommandLineHelper({}, ["--foo", "--bar"]) + + def test_precedence(self): + foo = Option("--with-foo", nargs="*") + helper = CommandLineHelper({}, ["cmd", "--with-foo=a,b"]) + value, option = helper.handle(foo) + self.assertEqual(PositiveOptionValue(("a", "b")), value) + self.assertEqual("command-line", value.origin) + self.assertEqual("--with-foo=a,b", option) + + helper = CommandLineHelper({}, ["cmd", "--with-foo=a,b", "--without-foo"]) + value, option = helper.handle(foo) + self.assertEqual(NegativeOptionValue(), value) + self.assertEqual("command-line", value.origin) + self.assertEqual("--without-foo", option) + + helper = CommandLineHelper({}, ["cmd", "--without-foo", "--with-foo=a,b"]) + value, option = helper.handle(foo) + self.assertEqual(PositiveOptionValue(("a", "b")), value) + self.assertEqual("command-line", value.origin) + self.assertEqual("--with-foo=a,b", option) + + foo = Option("--with-foo", env="FOO", nargs="*") + helper = CommandLineHelper({"FOO": ""}, ["cmd", "--with-foo=a,b"]) + value, option = helper.handle(foo) + self.assertEqual(PositiveOptionValue(("a", "b")), value) + self.assertEqual("command-line", value.origin) + self.assertEqual("--with-foo=a,b", option) + + helper = CommandLineHelper({"FOO": "a,b"}, ["cmd", "--without-foo"]) + value, option = helper.handle(foo) + self.assertEqual(NegativeOptionValue(), value) + self.assertEqual("command-line", value.origin) + self.assertEqual("--without-foo", option) + + helper = CommandLineHelper({"FOO": ""}, ["cmd", "--with-bar=a,b"]) + value, option = helper.handle(foo) + self.assertEqual(NegativeOptionValue(), value) + self.assertEqual("environment", value.origin) + self.assertEqual("FOO=", option) + + helper = CommandLineHelper({"FOO": "a,b"}, ["cmd", "--without-bar"]) + value, option = helper.handle(foo) + self.assertEqual(PositiveOptionValue(("a", "b")), value) + self.assertEqual("environment", value.origin) + self.assertEqual("FOO=a,b", option) + + helper = CommandLineHelper({}, ["cmd", "--with-foo=a,b", "FOO="]) + value, option = helper.handle(foo) + self.assertEqual(NegativeOptionValue(), value) + self.assertEqual("command-line", value.origin) + self.assertEqual("FOO=", option) + + helper = CommandLineHelper({}, ["cmd", "--without-foo", "FOO=a,b"]) + value, option = helper.handle(foo) + self.assertEqual(PositiveOptionValue(("a", "b")), value) + self.assertEqual("command-line", value.origin) + self.assertEqual("FOO=a,b", option) + + helper = CommandLineHelper({}, ["cmd", "FOO=", "--with-foo=a,b"]) + value, option = helper.handle(foo) + self.assertEqual(PositiveOptionValue(("a", "b")), value) + self.assertEqual("command-line", value.origin) + self.assertEqual("--with-foo=a,b", option) + + helper = CommandLineHelper({}, ["cmd", "FOO=a,b", "--without-foo"]) + value, option = helper.handle(foo) + self.assertEqual(NegativeOptionValue(), value) + self.assertEqual("command-line", value.origin) + self.assertEqual("--without-foo", option) + + def test_extra_args(self): + foo = Option("--with-foo", env="FOO", nargs="*") + helper = CommandLineHelper({}, ["cmd"]) + helper.add("FOO=a,b,c", "other-origin") + value, option = helper.handle(foo) + self.assertEqual(PositiveOptionValue(("a", "b", "c")), value) + self.assertEqual("other-origin", value.origin) + self.assertEqual("FOO=a,b,c", option) + + helper = CommandLineHelper({}, ["cmd"]) + helper.add("FOO=a,b,c", "other-origin") + helper.add("--with-foo=a,b,c", "other-origin") + value, option = helper.handle(foo) + self.assertEqual(PositiveOptionValue(("a", "b", "c")), value) + self.assertEqual("other-origin", value.origin) + self.assertEqual("--with-foo=a,b,c", option) + + # Adding conflicting options is not allowed. + helper = CommandLineHelper({}, ["cmd"]) + helper.add("FOO=a,b,c", "other-origin") + with self.assertRaises(ConflictingOptionError) as cm: + helper.add("FOO=", "other-origin") + self.assertEqual("FOO=", cm.exception.arg) + self.assertEqual("other-origin", cm.exception.origin) + self.assertEqual("FOO=a,b,c", cm.exception.old_arg) + self.assertEqual("other-origin", cm.exception.old_origin) + with self.assertRaises(ConflictingOptionError) as cm: + helper.add("FOO=a,b", "other-origin") + self.assertEqual("FOO=a,b", cm.exception.arg) + self.assertEqual("other-origin", cm.exception.origin) + self.assertEqual("FOO=a,b,c", cm.exception.old_arg) + self.assertEqual("other-origin", cm.exception.old_origin) + # But adding the same is allowed. + helper.add("FOO=a,b,c", "other-origin") + value, option = helper.handle(foo) + self.assertEqual(PositiveOptionValue(("a", "b", "c")), value) + self.assertEqual("other-origin", value.origin) + self.assertEqual("FOO=a,b,c", option) + + # The same rule as above applies when using the option form vs. the + # variable form. But we can't detect it when .add is called. + helper = CommandLineHelper({}, ["cmd"]) + helper.add("FOO=a,b,c", "other-origin") + helper.add("--without-foo", "other-origin") + with self.assertRaises(ConflictingOptionError) as cm: + helper.handle(foo) + self.assertEqual("--without-foo", cm.exception.arg) + self.assertEqual("other-origin", cm.exception.origin) + self.assertEqual("FOO=a,b,c", cm.exception.old_arg) + self.assertEqual("other-origin", cm.exception.old_origin) + helper = CommandLineHelper({}, ["cmd"]) + helper.add("FOO=a,b,c", "other-origin") + helper.add("--with-foo=a,b", "other-origin") + with self.assertRaises(ConflictingOptionError) as cm: + helper.handle(foo) + self.assertEqual("--with-foo=a,b", cm.exception.arg) + self.assertEqual("other-origin", cm.exception.origin) + self.assertEqual("FOO=a,b,c", cm.exception.old_arg) + self.assertEqual("other-origin", cm.exception.old_origin) + helper = CommandLineHelper({}, ["cmd"]) + helper.add("FOO=a,b,c", "other-origin") + helper.add("--with-foo=a,b,c", "other-origin") + value, option = helper.handle(foo) + self.assertEqual(PositiveOptionValue(("a", "b", "c")), value) + self.assertEqual("other-origin", value.origin) + self.assertEqual("--with-foo=a,b,c", option) + + # Conflicts are also not allowed against what is in the + # environment/on the command line. + helper = CommandLineHelper({}, ["cmd", "--with-foo=a,b"]) + helper.add("FOO=a,b,c", "other-origin") + with self.assertRaises(ConflictingOptionError) as cm: + helper.handle(foo) + self.assertEqual("FOO=a,b,c", cm.exception.arg) + self.assertEqual("other-origin", cm.exception.origin) + self.assertEqual("--with-foo=a,b", cm.exception.old_arg) + self.assertEqual("command-line", cm.exception.old_origin) + + helper = CommandLineHelper({}, ["cmd", "--with-foo=a,b"]) + helper.add("--without-foo", "other-origin") + with self.assertRaises(ConflictingOptionError) as cm: + helper.handle(foo) + self.assertEqual("--without-foo", cm.exception.arg) + self.assertEqual("other-origin", cm.exception.origin) + self.assertEqual("--with-foo=a,b", cm.exception.old_arg) + self.assertEqual("command-line", cm.exception.old_origin) + + def test_possible_origins(self): + with self.assertRaises(InvalidOptionError): + Option("--foo", possible_origins="command-line") + + helper = CommandLineHelper({"BAZ": "1"}, ["cmd", "--foo", "--bar"]) + foo = Option("--foo", possible_origins=("command-line",)) + value, option = helper.handle(foo) + self.assertEqual(PositiveOptionValue(), value) + self.assertEqual("command-line", value.origin) + self.assertEqual("--foo", option) + + bar = Option("--bar", possible_origins=("mozconfig",)) + with self.assertRaisesRegexp( + InvalidOptionError, + "--bar can not be set by command-line. Values are accepted from: mozconfig", + ): + helper.handle(bar) + + baz = Option(env="BAZ", possible_origins=("implied",)) + with self.assertRaisesRegexp( + InvalidOptionError, + "BAZ=1 can not be set by environment. Values are accepted from: implied", + ): + helper.handle(baz) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/configure/test_toolchain_configure.py b/python/mozbuild/mozbuild/test/configure/test_toolchain_configure.py new file mode 100644 index 0000000000..c6af3d99d4 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/test_toolchain_configure.py @@ -0,0 +1,2056 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import logging +import os + +import six +from mozboot.util import MINIMUM_RUST_VERSION +from mozpack import path as mozpath +from mozunit import main +from six import StringIO +from test_toolchain_helpers import CompilerResult, FakeCompiler, PrependFlags + +from common import BaseConfigureTest +from mozbuild.configure.util import Version +from mozbuild.util import ReadOnlyNamespace, memoize + +DEFAULT_C99 = {"__STDC_VERSION__": "199901L"} + +DEFAULT_C11 = {"__STDC_VERSION__": "201112L"} + +DEFAULT_C17 = {"__STDC_VERSION__": "201710L"} + +DEFAULT_CXX_97 = {"__cplusplus": "199711L"} + +DEFAULT_CXX_11 = {"__cplusplus": "201103L"} + +DRAFT_CXX_14 = {"__cplusplus": "201300L"} + +DEFAULT_CXX_14 = {"__cplusplus": "201402L"} + +DRAFT_CXX17_201500 = {"__cplusplus": "201500L"} + +DRAFT_CXX17_201406 = {"__cplusplus": "201406L"} + +DEFAULT_CXX_17 = {"__cplusplus": "201703L"} + +SUPPORTS_GNU99 = {"-std=gnu99": DEFAULT_C99} + +SUPPORTS_GNUXX11 = {"-std=gnu++11": DEFAULT_CXX_11} + +SUPPORTS_GNUXX14 = {"-std=gnu++14": DEFAULT_CXX_14} + +SUPPORTS_CXX14 = {"-std=c++14": DEFAULT_CXX_14} + +SUPPORTS_GNUXX17 = {"-std=gnu++17": DEFAULT_CXX_17} + +SUPPORTS_CXX17 = {"-std=c++17": DEFAULT_CXX_17} + + +@memoize +def GCC_BASE(version): + version = Version(version) + return FakeCompiler( + { + "__GNUC__": version.major, + "__GNUC_MINOR__": version.minor, + "__GNUC_PATCHLEVEL__": version.patch, + "__STDC__": 1, + } + ) + + +@memoize +def GCC(version): + return GCC_BASE(version) + SUPPORTS_GNU99 + + +@memoize +def GXX(version): + return GCC_BASE(version) + DEFAULT_CXX_97 + SUPPORTS_GNUXX11 + + +SUPPORTS_DRAFT_CXX14_VERSION = {"-std=gnu++14": DRAFT_CXX_14} + +SUPPORTS_GNUXX1Z = {"-std=gnu++1z": DRAFT_CXX17_201406} + +SUPPORTS_DRAFT_CXX17_201500_VERSION = {"-std=gnu++17": DRAFT_CXX17_201500} + +GCC_4_9 = GCC("4.9.3") +GXX_4_9 = GXX("4.9.3") + SUPPORTS_DRAFT_CXX14_VERSION +GCC_5 = GCC("5.2.1") + DEFAULT_C11 +GXX_5 = GXX("5.2.1") + SUPPORTS_GNUXX14 +GCC_6 = GCC("6.4.0") + DEFAULT_C11 +GXX_6 = ( + GXX("6.4.0") + + DEFAULT_CXX_14 + + SUPPORTS_GNUXX17 + + SUPPORTS_DRAFT_CXX17_201500_VERSION +) +GCC_7 = GCC("7.3.0") + DEFAULT_C11 +GXX_7 = GXX("7.3.0") + DEFAULT_CXX_14 + SUPPORTS_GNUXX17 + SUPPORTS_CXX17 +GCC_8 = GCC("8.3.0") + DEFAULT_C11 +GXX_8 = GXX("8.3.0") + DEFAULT_CXX_14 + SUPPORTS_GNUXX17 + SUPPORTS_CXX17 +GCC_10 = GCC("10.2.1") + DEFAULT_C17 +GXX_10 = GXX("10.2.1") + DEFAULT_CXX_14 + SUPPORTS_GNUXX17 + SUPPORTS_CXX17 + +DEFAULT_GCC = GCC_8 +DEFAULT_GXX = GXX_8 + +GCC_PLATFORM_LITTLE_ENDIAN = { + "__ORDER_LITTLE_ENDIAN__": 1234, + "__ORDER_BIG_ENDIAN__": 4321, + "__BYTE_ORDER__": 1234, +} + +GCC_PLATFORM_BIG_ENDIAN = { + "__ORDER_LITTLE_ENDIAN__": 1234, + "__ORDER_BIG_ENDIAN__": 4321, + "__BYTE_ORDER__": 4321, +} + +GCC_PLATFORM_X86 = FakeCompiler(GCC_PLATFORM_LITTLE_ENDIAN) + { + None: {"__i386__": 1}, + "-m64": {"__i386__": False, "__x86_64__": 1}, +} + +GCC_PLATFORM_X86_64 = FakeCompiler(GCC_PLATFORM_LITTLE_ENDIAN) + { + None: {"__x86_64__": 1}, + "-m32": {"__x86_64__": False, "__i386__": 1}, +} + +GCC_PLATFORM_ARM = FakeCompiler(GCC_PLATFORM_LITTLE_ENDIAN) + {"__arm__": 1} + +GCC_PLATFORM_LINUX = {"__linux__": 1} + +GCC_PLATFORM_DARWIN = {"__APPLE__": 1} + +GCC_PLATFORM_WIN = {"_WIN32": 1, "WINNT": 1} + +GCC_PLATFORM_OPENBSD = {"__OpenBSD__": 1} + +GCC_PLATFORM_X86_LINUX = FakeCompiler(GCC_PLATFORM_X86, GCC_PLATFORM_LINUX) +GCC_PLATFORM_X86_64_LINUX = FakeCompiler(GCC_PLATFORM_X86_64, GCC_PLATFORM_LINUX) +GCC_PLATFORM_ARM_LINUX = FakeCompiler(GCC_PLATFORM_ARM, GCC_PLATFORM_LINUX) +GCC_PLATFORM_X86_OSX = FakeCompiler(GCC_PLATFORM_X86, GCC_PLATFORM_DARWIN) +GCC_PLATFORM_X86_64_OSX = FakeCompiler(GCC_PLATFORM_X86_64, GCC_PLATFORM_DARWIN) +GCC_PLATFORM_X86_WIN = FakeCompiler(GCC_PLATFORM_X86, GCC_PLATFORM_WIN) +GCC_PLATFORM_X86_64_WIN = FakeCompiler(GCC_PLATFORM_X86_64, GCC_PLATFORM_WIN) + + +@memoize +def CLANG_BASE(version): + version = Version(version) + return FakeCompiler( + { + "__clang__": 1, + "__clang_major__": version.major, + "__clang_minor__": version.minor, + "__clang_patchlevel__": version.patch, + } + ) + + +@memoize +def CLANG(version): + return GCC_BASE("4.2.1") + CLANG_BASE(version) + SUPPORTS_GNU99 + + +@memoize +def CLANGXX(version): + return ( + GCC_BASE("4.2.1") + + CLANG_BASE(version) + + DEFAULT_CXX_97 + + SUPPORTS_GNUXX11 + + SUPPORTS_GNUXX14 + ) + + +CLANG_3_3 = CLANG("3.3.0") + DEFAULT_C99 +CLANGXX_3_3 = CLANGXX("3.3.0") +CLANG_4_0 = CLANG("4.0.2") + DEFAULT_C11 +CLANGXX_4_0 = CLANGXX("4.0.2") + SUPPORTS_GNUXX1Z +CLANG_7_0 = CLANG("7.0.0") + DEFAULT_C11 +CLANGXX_7_0 = CLANGXX("7.0.0") + DEFAULT_CXX_14 + SUPPORTS_GNUXX17 +XCODE_CLANG_3_3 = ( + CLANG("5.0") + + DEFAULT_C99 + + { + # Real Xcode clang has a full version here, but we don't care about it. + "__apple_build_version__": "1" + } +) +XCODE_CLANGXX_3_3 = CLANGXX("5.0") + {"__apple_build_version__": "1"} +XCODE_CLANG_4_0 = CLANG("9.0.0") + DEFAULT_C11 + {"__apple_build_version__": "1"} +XCODE_CLANGXX_4_0 = ( + CLANGXX("9.0.0") + SUPPORTS_GNUXX1Z + {"__apple_build_version__": "1"} +) +XCODE_CLANG_7_0 = CLANG("10.0.1") + DEFAULT_C11 + {"__apple_build_version__": "1"} +XCODE_CLANGXX_7_0 = ( + CLANGXX("10.0.1") + SUPPORTS_GNUXX17 + {"__apple_build_version__": "1"} +) +DEFAULT_CLANG = CLANG_7_0 +DEFAULT_CLANGXX = CLANGXX_7_0 + + +def CLANG_PLATFORM(gcc_platform): + base = { + "--target=x86_64-linux-gnu": GCC_PLATFORM_X86_64_LINUX[None], + "--target=x86_64-apple-darwin11.2.0": GCC_PLATFORM_X86_64_OSX[None], + "--target=i686-linux-gnu": GCC_PLATFORM_X86_LINUX[None], + "--target=i686-apple-darwin11.2.0": GCC_PLATFORM_X86_OSX[None], + "--target=arm-linux-gnu": GCC_PLATFORM_ARM_LINUX[None], + } + undo_gcc_platform = { + k: {symbol: False for symbol in gcc_platform[None]} for k in base + } + return FakeCompiler(gcc_platform, undo_gcc_platform, base) + + +CLANG_PLATFORM_X86_LINUX = CLANG_PLATFORM(GCC_PLATFORM_X86_LINUX) +CLANG_PLATFORM_X86_64_LINUX = CLANG_PLATFORM(GCC_PLATFORM_X86_64_LINUX) +CLANG_PLATFORM_X86_OSX = CLANG_PLATFORM(GCC_PLATFORM_X86_OSX) +CLANG_PLATFORM_X86_64_OSX = CLANG_PLATFORM(GCC_PLATFORM_X86_64_OSX) +CLANG_PLATFORM_X86_WIN = CLANG_PLATFORM(GCC_PLATFORM_X86_WIN) +CLANG_PLATFORM_X86_64_WIN = CLANG_PLATFORM(GCC_PLATFORM_X86_64_WIN) + + +@memoize +def VS(version): + version = Version(version) + return FakeCompiler( + { + None: { + "_MSC_VER": "%02d%02d" % (version.major, version.minor), + "_MSC_FULL_VER": "%02d%02d%05d" + % (version.major, version.minor, version.patch), + "_MT": "1", + }, + "*.cpp": DEFAULT_CXX_97, + } + ) + + +VS_2017u8 = VS("19.15.26726") + +VS_PLATFORM_X86 = {"_M_IX86": 600, "_WIN32": 1} + +VS_PLATFORM_X86_64 = {"_M_X64": 100, "_WIN32": 1, "_WIN64": 1} + +# Despite the 32 in the name, this macro is defined for 32- and 64-bit. +MINGW32 = {"__MINGW32__": True} + +# Note: In reality, the -std=gnu* options are only supported when preceded by +# -Xclang. +CLANG_CL_3_9 = ( + CLANG_BASE("3.9.0") + + VS("18.00.00000") + + DEFAULT_C11 + + SUPPORTS_GNU99 + + SUPPORTS_GNUXX11 + + SUPPORTS_CXX14 +) + {"*.cpp": {"__STDC_VERSION__": False, "__cplusplus": "201103L"}} +CLANG_CL_9_0 = ( + CLANG_BASE("9.0.0") + + VS("18.00.00000") + + DEFAULT_C11 + + SUPPORTS_GNU99 + + SUPPORTS_GNUXX11 + + SUPPORTS_CXX14 + + SUPPORTS_CXX17 +) + {"*.cpp": {"__STDC_VERSION__": False, "__cplusplus": "201103L"}} + +CLANG_CL_PLATFORM_X86 = FakeCompiler( + VS_PLATFORM_X86, GCC_PLATFORM_X86[None], GCC_PLATFORM_LITTLE_ENDIAN +) +CLANG_CL_PLATFORM_X86_64 = FakeCompiler( + VS_PLATFORM_X86_64, GCC_PLATFORM_X86_64[None], GCC_PLATFORM_LITTLE_ENDIAN +) + +LIBRARY_NAME_INFOS = { + "linux-gnu": { + "DLL_PREFIX": "lib", + "DLL_SUFFIX": ".so", + "LIB_PREFIX": "lib", + "LIB_SUFFIX": "a", + "IMPORT_LIB_SUFFIX": "", + "OBJ_SUFFIX": "o", + }, + "darwin11.2.0": { + "DLL_PREFIX": "lib", + "DLL_SUFFIX": ".dylib", + "LIB_PREFIX": "lib", + "LIB_SUFFIX": "a", + "IMPORT_LIB_SUFFIX": "", + "OBJ_SUFFIX": "o", + }, + "mingw32": { + "DLL_PREFIX": "", + "DLL_SUFFIX": ".dll", + "LIB_PREFIX": "lib", + "LIB_SUFFIX": "a", + "IMPORT_LIB_SUFFIX": "a", + "OBJ_SUFFIX": "o", + }, + "windows-msvc": { + "DLL_PREFIX": "", + "DLL_SUFFIX": ".dll", + "LIB_PREFIX": "", + "LIB_SUFFIX": "lib", + "IMPORT_LIB_SUFFIX": "lib", + "OBJ_SUFFIX": "obj", + }, + "windows-gnu": { + "DLL_PREFIX": "", + "DLL_SUFFIX": ".dll", + "LIB_PREFIX": "lib", + "LIB_SUFFIX": "a", + "IMPORT_LIB_SUFFIX": "a", + "OBJ_SUFFIX": "o", + }, + "openbsd6.1": { + "DLL_PREFIX": "lib", + "DLL_SUFFIX": ".so.1.0", + "LIB_PREFIX": "lib", + "LIB_SUFFIX": "a", + "IMPORT_LIB_SUFFIX": "", + "OBJ_SUFFIX": "o", + }, +} + + +class BaseToolchainTest(BaseConfigureTest): + def setUp(self): + super(BaseToolchainTest, self).setUp() + self.out = StringIO() + self.logger = logging.getLogger("BaseToolchainTest") + self.logger.setLevel(logging.ERROR) + self.handler = logging.StreamHandler(self.out) + self.logger.addHandler(self.handler) + + def tearDown(self): + self.logger.removeHandler(self.handler) + del self.handler + del self.out + super(BaseToolchainTest, self).tearDown() + + def do_toolchain_test(self, paths, results, args=[], environ={}): + """Helper to test the toolchain checks from toolchain.configure. + + - `paths` is a dict associating compiler paths to FakeCompiler + definitions from above. + - `results` is a dict associating result variable names from + toolchain.configure (c_compiler, cxx_compiler, host_c_compiler, + host_cxx_compiler) with a result. + The result can either be an error string, or a CompilerResult + corresponding to the object returned by toolchain.configure checks. + When the results for host_c_compiler are identical to c_compiler, + they can be omitted. Likewise for host_cxx_compiler vs. + cxx_compiler. + """ + environ = dict(environ) + if "PATH" not in environ: + environ["PATH"] = os.pathsep.join( + mozpath.abspath(p) for p in ("/bin", "/usr/bin") + ) + + args = args + ["--enable-release", "--disable-bootstrap"] + + sandbox = self.get_sandbox(paths, {}, args, environ, logger=self.logger) + + for var in ( + "c_compiler", + "cxx_compiler", + "host_c_compiler", + "host_cxx_compiler", + ): + if var in results: + result = results[var] + elif var.startswith("host_"): + result = results.get(var[5:], {}) + else: + result = {} + try: + self.out.truncate(0) + self.out.seek(0) + compiler = sandbox._value_for(sandbox[var]) + # Add var on both ends to make it clear which of the + # variables is failing the test when that happens. + self.assertEqual((var, compiler), (var, result)) + except SystemExit: + self.assertEqual((var, result), (var, self.out.getvalue().strip())) + return + + # Normalize the target os to match what we have as keys in + # LIBRARY_NAME_INFOS. + target_os = getattr(self, "TARGET", self.HOST).split("-", 2)[2] + if target_os == "mingw32": + compiler_type = sandbox._value_for(sandbox["c_compiler"]).type + if compiler_type == "clang-cl": + target_os = "windows-msvc" + elif target_os == "linux-gnuabi64": + target_os = "linux-gnu" + + self.do_library_name_info_test(target_os, sandbox) + + # Try again on artifact builds. In that case, we always get library + # name info for msvc on Windows + if target_os == "mingw32": + target_os = "windows-msvc" + + sandbox = self.get_sandbox( + paths, {}, args + ["--enable-artifact-builds"], environ, logger=self.logger + ) + + self.do_library_name_info_test(target_os, sandbox) + + def do_library_name_info_test(self, target_os, sandbox): + library_name_info = LIBRARY_NAME_INFOS[target_os] + for k in ( + "DLL_PREFIX", + "DLL_SUFFIX", + "LIB_PREFIX", + "LIB_SUFFIX", + "IMPORT_LIB_SUFFIX", + "OBJ_SUFFIX", + ): + self.assertEqual( + "%s=%s" % (k, sandbox.get_config(k)), + "%s=%s" % (k, library_name_info[k]), + ) + + +def old_gcc_message(old_ver): + return "Only GCC 8.1 or newer is supported (found version {}).".format(old_ver) + + +class LinuxToolchainTest(BaseToolchainTest): + PATHS = { + "/usr/bin/gcc": DEFAULT_GCC + GCC_PLATFORM_X86_64_LINUX, + "/usr/bin/g++": DEFAULT_GXX + GCC_PLATFORM_X86_64_LINUX, + "/usr/bin/gcc-4.9": GCC_4_9 + GCC_PLATFORM_X86_64_LINUX, + "/usr/bin/g++-4.9": GXX_4_9 + GCC_PLATFORM_X86_64_LINUX, + "/usr/bin/gcc-5": GCC_5 + GCC_PLATFORM_X86_64_LINUX, + "/usr/bin/g++-5": GXX_5 + GCC_PLATFORM_X86_64_LINUX, + "/usr/bin/gcc-6": GCC_6 + GCC_PLATFORM_X86_64_LINUX, + "/usr/bin/g++-6": GXX_6 + GCC_PLATFORM_X86_64_LINUX, + "/usr/bin/gcc-7": GCC_7 + GCC_PLATFORM_X86_64_LINUX, + "/usr/bin/g++-7": GXX_7 + GCC_PLATFORM_X86_64_LINUX, + "/usr/bin/gcc-8": GCC_8 + GCC_PLATFORM_X86_64_LINUX, + "/usr/bin/g++-8": GXX_8 + GCC_PLATFORM_X86_64_LINUX, + "/usr/bin/gcc-10": GCC_10 + GCC_PLATFORM_X86_64_LINUX, + "/usr/bin/g++-10": GXX_10 + GCC_PLATFORM_X86_64_LINUX, + "/usr/bin/clang": DEFAULT_CLANG + CLANG_PLATFORM_X86_64_LINUX, + "/usr/bin/clang++": DEFAULT_CLANGXX + CLANG_PLATFORM_X86_64_LINUX, + "/usr/bin/clang-7.0": CLANG_7_0 + CLANG_PLATFORM_X86_64_LINUX, + "/usr/bin/clang++-7.0": CLANGXX_7_0 + CLANG_PLATFORM_X86_64_LINUX, + "/usr/bin/clang-4.0": CLANG_4_0 + CLANG_PLATFORM_X86_64_LINUX, + "/usr/bin/clang++-4.0": CLANGXX_4_0 + CLANG_PLATFORM_X86_64_LINUX, + "/usr/bin/clang-3.3": CLANG_3_3 + CLANG_PLATFORM_X86_64_LINUX, + "/usr/bin/clang++-3.3": CLANGXX_3_3 + CLANG_PLATFORM_X86_64_LINUX, + } + + GCC_4_7_RESULT = old_gcc_message("4.7.3") + GXX_4_7_RESULT = GCC_4_7_RESULT + GCC_4_9_RESULT = old_gcc_message("4.9.3") + GXX_4_9_RESULT = GCC_4_9_RESULT + GCC_5_RESULT = old_gcc_message("5.2.1") + GXX_5_RESULT = GCC_5_RESULT + GCC_6_RESULT = old_gcc_message("6.4.0") + GXX_6_RESULT = GCC_6_RESULT + GCC_7_RESULT = old_gcc_message("7.3.0") + GXX_7_RESULT = GCC_7_RESULT + GCC_8_RESULT = CompilerResult( + flags=["-std=gnu99"], + version="8.3.0", + type="gcc", + compiler="/usr/bin/gcc-8", + language="C", + ) + GXX_8_RESULT = CompilerResult( + flags=["-std=gnu++17"], + version="8.3.0", + type="gcc", + compiler="/usr/bin/g++-8", + language="C++", + ) + DEFAULT_GCC_RESULT = GCC_8_RESULT + {"compiler": "/usr/bin/gcc"} + DEFAULT_GXX_RESULT = GXX_8_RESULT + {"compiler": "/usr/bin/g++"} + + CLANG_3_3_RESULT = ( + "Only clang/llvm 7.0 or newer is supported (found version 3.3.0)." + ) + CLANGXX_3_3_RESULT = ( + "Only clang/llvm 7.0 or newer is supported (found version 3.3.0)." + ) + CLANG_4_0_RESULT = ( + "Only clang/llvm 7.0 or newer is supported (found version 4.0.2)." + ) + CLANGXX_4_0_RESULT = ( + "Only clang/llvm 7.0 or newer is supported (found version 4.0.2)." + ) + CLANG_7_0_RESULT = CompilerResult( + flags=["-std=gnu99"], + version="7.0.0", + type="clang", + compiler="/usr/bin/clang-7.0", + language="C", + ) + CLANGXX_7_0_RESULT = CompilerResult( + flags=["-std=gnu++17"], + version="7.0.0", + type="clang", + compiler="/usr/bin/clang++-7.0", + language="C++", + ) + DEFAULT_CLANG_RESULT = CLANG_7_0_RESULT + {"compiler": "/usr/bin/clang"} + DEFAULT_CLANGXX_RESULT = CLANGXX_7_0_RESULT + {"compiler": "/usr/bin/clang++"} + + def test_default(self): + # We'll try clang and gcc, and find clang first. + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.DEFAULT_CLANG_RESULT, + "cxx_compiler": self.DEFAULT_CLANGXX_RESULT, + }, + ) + + def test_gcc(self): + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.DEFAULT_GCC_RESULT, + "cxx_compiler": self.DEFAULT_GXX_RESULT, + }, + environ={"CC": "gcc", "CXX": "g++"}, + ) + + def test_unsupported_gcc(self): + self.do_toolchain_test( + self.PATHS, + {"c_compiler": self.GCC_4_9_RESULT}, + environ={"CC": "gcc-4.9", "CXX": "g++-4.9"}, + ) + + # Maybe this should be reporting the mismatched version instead. + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.DEFAULT_GCC_RESULT, + "cxx_compiler": self.GXX_4_9_RESULT, + }, + environ={"CC": "gcc", "CXX": "g++-4.9"}, + ) + + def test_overridden_gcc(self): + self.do_toolchain_test( + self.PATHS, + {"c_compiler": self.GCC_7_RESULT, "cxx_compiler": self.GXX_7_RESULT}, + environ={"CC": "gcc-7", "CXX": "g++-7"}, + ) + + def test_guess_cxx(self): + # When CXX is not set, we guess it from CC. + self.do_toolchain_test( + self.PATHS, + {"c_compiler": self.GCC_7_RESULT, "cxx_compiler": self.GXX_7_RESULT}, + environ={"CC": "gcc-7"}, + ) + + def test_mismatched_gcc(self): + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.DEFAULT_GCC_RESULT, + "cxx_compiler": ( + "The target C compiler is version 8.3.0, while the target " + "C++ compiler is version 10.2.1. Need to use the same compiler " + "version." + ), + }, + environ={"CC": "gcc", "CXX": "g++-10"}, + ) + + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.DEFAULT_GCC_RESULT, + "cxx_compiler": self.DEFAULT_GXX_RESULT, + "host_c_compiler": self.DEFAULT_GCC_RESULT, + "host_cxx_compiler": ( + "The host C compiler is version 8.3.0, while the host " + "C++ compiler is version 10.2.1. Need to use the same compiler " + "version." + ), + }, + environ={"CC": "gcc", "HOST_CXX": "g++-10"}, + ) + + def test_mismatched_compiler(self): + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.DEFAULT_CLANG_RESULT, + "cxx_compiler": ( + "The target C compiler is clang, while the target C++ compiler " + "is gcc. Need to use the same compiler suite." + ), + }, + environ={"CXX": "g++"}, + ) + + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.DEFAULT_CLANG_RESULT, + "cxx_compiler": self.DEFAULT_CLANGXX_RESULT, + "host_c_compiler": self.DEFAULT_CLANG_RESULT, + "host_cxx_compiler": ( + "The host C compiler is clang, while the host C++ compiler " + "is gcc. Need to use the same compiler suite." + ), + }, + environ={"HOST_CXX": "g++"}, + ) + + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": "`%s` is not a C compiler." + % mozpath.abspath("/usr/bin/g++") + }, + environ={"CC": "g++"}, + ) + + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.DEFAULT_CLANG_RESULT, + "cxx_compiler": "`%s` is not a C++ compiler." + % mozpath.abspath("/usr/bin/clang"), + }, + environ={"CXX": "clang"}, + ) + + def test_clang(self): + # We'll try gcc and clang, but since there is no gcc (gcc-x.y doesn't + # count), find clang. + paths = { + k: v + for k, v in six.iteritems(self.PATHS) + if os.path.basename(k) not in ("gcc", "g++") + } + self.do_toolchain_test( + paths, + { + "c_compiler": self.DEFAULT_CLANG_RESULT, + "cxx_compiler": self.DEFAULT_CLANGXX_RESULT, + }, + ) + + def test_guess_cxx_clang(self): + # When CXX is not set, we guess it from CC. + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.CLANG_7_0_RESULT, + "cxx_compiler": self.CLANGXX_7_0_RESULT, + }, + environ={"CC": "clang-7.0"}, + ) + + def test_unsupported_clang(self): + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.CLANG_3_3_RESULT, + "cxx_compiler": self.CLANGXX_3_3_RESULT, + }, + environ={"CC": "clang-3.3", "CXX": "clang++-3.3"}, + ) + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.CLANG_4_0_RESULT, + "cxx_compiler": self.CLANGXX_4_0_RESULT, + }, + environ={"CC": "clang-4.0", "CXX": "clang++-4.0"}, + ) + + def test_no_supported_compiler(self): + # Even if there are gcc-x.y or clang-x.y compilers available, we + # don't try them. This could be considered something to improve. + paths = { + k: v + for k, v in six.iteritems(self.PATHS) + if os.path.basename(k) not in ("gcc", "g++", "clang", "clang++") + } + self.do_toolchain_test( + paths, {"c_compiler": "Cannot find the target C compiler"} + ) + + def test_absolute_path(self): + paths = dict(self.PATHS) + paths.update( + { + "/opt/clang/bin/clang": paths["/usr/bin/clang"], + "/opt/clang/bin/clang++": paths["/usr/bin/clang++"], + } + ) + result = { + "c_compiler": self.DEFAULT_CLANG_RESULT + + {"compiler": "/opt/clang/bin/clang"}, + "cxx_compiler": self.DEFAULT_CLANGXX_RESULT + + {"compiler": "/opt/clang/bin/clang++"}, + } + self.do_toolchain_test( + paths, + result, + environ={"CC": "/opt/clang/bin/clang", "CXX": "/opt/clang/bin/clang++"}, + ) + # With CXX guess too. + self.do_toolchain_test(paths, result, environ={"CC": "/opt/clang/bin/clang"}) + + def test_atypical_name(self): + paths = dict(self.PATHS) + paths.update( + { + "/usr/bin/afl-clang-fast": paths["/usr/bin/clang"], + "/usr/bin/afl-clang-fast++": paths["/usr/bin/clang++"], + } + ) + self.do_toolchain_test( + paths, + { + "c_compiler": self.DEFAULT_CLANG_RESULT + + {"compiler": "/usr/bin/afl-clang-fast"}, + "cxx_compiler": self.DEFAULT_CLANGXX_RESULT + + {"compiler": "/usr/bin/afl-clang-fast++"}, + }, + environ={"CC": "afl-clang-fast", "CXX": "afl-clang-fast++"}, + ) + + def test_mixed_compilers(self): + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.DEFAULT_CLANG_RESULT, + "cxx_compiler": self.DEFAULT_CLANGXX_RESULT, + "host_c_compiler": self.DEFAULT_GCC_RESULT, + "host_cxx_compiler": self.DEFAULT_GXX_RESULT, + }, + environ={"CC": "clang", "HOST_CC": "gcc"}, + ) + + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.DEFAULT_CLANG_RESULT, + "cxx_compiler": self.DEFAULT_CLANGXX_RESULT, + "host_c_compiler": self.DEFAULT_GCC_RESULT, + "host_cxx_compiler": self.DEFAULT_GXX_RESULT, + }, + environ={"CC": "clang", "CXX": "clang++", "HOST_CC": "gcc"}, + ) + + +class LinuxSimpleCrossToolchainTest(BaseToolchainTest): + TARGET = "i686-pc-linux-gnu" + PATHS = LinuxToolchainTest.PATHS + DEFAULT_GCC_RESULT = LinuxToolchainTest.DEFAULT_GCC_RESULT + DEFAULT_GXX_RESULT = LinuxToolchainTest.DEFAULT_GXX_RESULT + DEFAULT_CLANG_RESULT = LinuxToolchainTest.DEFAULT_CLANG_RESULT + DEFAULT_CLANGXX_RESULT = LinuxToolchainTest.DEFAULT_CLANGXX_RESULT + + def test_cross_gcc(self): + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.DEFAULT_GCC_RESULT + {"flags": ["-m32"]}, + "cxx_compiler": self.DEFAULT_GXX_RESULT + {"flags": ["-m32"]}, + "host_c_compiler": self.DEFAULT_GCC_RESULT, + "host_cxx_compiler": self.DEFAULT_GXX_RESULT, + }, + environ={"CC": "gcc"}, + ) + + def test_cross_clang(self): + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.DEFAULT_CLANG_RESULT + {"flags": ["-m32"]}, + "cxx_compiler": self.DEFAULT_CLANGXX_RESULT + {"flags": ["-m32"]}, + "host_c_compiler": self.DEFAULT_CLANG_RESULT, + "host_cxx_compiler": self.DEFAULT_CLANGXX_RESULT, + }, + ) + + +class LinuxX86_64CrossToolchainTest(BaseToolchainTest): + HOST = "i686-pc-linux-gnu" + TARGET = "x86_64-pc-linux-gnu" + PATHS = { + "/usr/bin/gcc": DEFAULT_GCC + GCC_PLATFORM_X86_LINUX, + "/usr/bin/g++": DEFAULT_GXX + GCC_PLATFORM_X86_LINUX, + "/usr/bin/clang": DEFAULT_CLANG + CLANG_PLATFORM_X86_LINUX, + "/usr/bin/clang++": DEFAULT_CLANGXX + CLANG_PLATFORM_X86_LINUX, + } + DEFAULT_GCC_RESULT = LinuxToolchainTest.DEFAULT_GCC_RESULT + DEFAULT_GXX_RESULT = LinuxToolchainTest.DEFAULT_GXX_RESULT + DEFAULT_CLANG_RESULT = LinuxToolchainTest.DEFAULT_CLANG_RESULT + DEFAULT_CLANGXX_RESULT = LinuxToolchainTest.DEFAULT_CLANGXX_RESULT + + def test_cross_gcc(self): + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.DEFAULT_GCC_RESULT + {"flags": ["-m64"]}, + "cxx_compiler": self.DEFAULT_GXX_RESULT + {"flags": ["-m64"]}, + "host_c_compiler": self.DEFAULT_GCC_RESULT, + "host_cxx_compiler": self.DEFAULT_GXX_RESULT, + }, + environ={"CC": "gcc"}, + ) + + def test_cross_clang(self): + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.DEFAULT_CLANG_RESULT + {"flags": ["-m64"]}, + "cxx_compiler": self.DEFAULT_CLANGXX_RESULT + {"flags": ["-m64"]}, + "host_c_compiler": self.DEFAULT_CLANG_RESULT, + "host_cxx_compiler": self.DEFAULT_CLANGXX_RESULT, + }, + ) + + +def xcrun(stdin, args): + if args == ("--show-sdk-path",): + return ( + 0, + mozpath.join(os.path.abspath(os.path.dirname(__file__)), "macos_fake_sdk"), + "", + ) + raise NotImplementedError() + + +class OSXToolchainTest(BaseToolchainTest): + HOST = "x86_64-apple-darwin11.2.0" + PATHS = { + "/usr/bin/gcc-5": GCC_5 + GCC_PLATFORM_X86_64_OSX, + "/usr/bin/g++-5": GXX_5 + GCC_PLATFORM_X86_64_OSX, + "/usr/bin/gcc-8": GCC_8 + GCC_PLATFORM_X86_64_OSX, + "/usr/bin/g++-8": GXX_8 + GCC_PLATFORM_X86_64_OSX, + "/usr/bin/clang": XCODE_CLANG_7_0 + CLANG_PLATFORM_X86_64_OSX, + "/usr/bin/clang++": XCODE_CLANGXX_7_0 + CLANG_PLATFORM_X86_64_OSX, + "/usr/bin/clang-4.0": XCODE_CLANG_4_0 + CLANG_PLATFORM_X86_64_OSX, + "/usr/bin/clang++-4.0": XCODE_CLANGXX_4_0 + CLANG_PLATFORM_X86_64_OSX, + "/usr/bin/clang-3.3": XCODE_CLANG_3_3 + CLANG_PLATFORM_X86_64_OSX, + "/usr/bin/clang++-3.3": XCODE_CLANGXX_3_3 + CLANG_PLATFORM_X86_64_OSX, + "/usr/bin/xcrun": xcrun, + } + CLANG_3_3_RESULT = ( + "Only clang/llvm 7.0 or newer is supported (found version 4.0.0.or.less)." + ) + CLANGXX_3_3_RESULT = ( + "Only clang/llvm 7.0 or newer is supported (found version 4.0.0.or.less)." + ) + CLANG_4_0_RESULT = ( + "Only clang/llvm 7.0 or newer is supported (found version 4.0.0.or.less)." + ) + CLANGXX_4_0_RESULT = ( + "Only clang/llvm 7.0 or newer is supported (found version 4.0.0.or.less)." + ) + DEFAULT_CLANG_RESULT = CompilerResult( + flags=["-std=gnu99"], + version="7.0.0", + type="clang", + compiler="/usr/bin/clang", + language="C", + ) + DEFAULT_CLANGXX_RESULT = CompilerResult( + flags=["-stdlib=libc++", "-std=gnu++17"], + version="7.0.0", + type="clang", + compiler="/usr/bin/clang++", + language="C++", + ) + GCC_5_RESULT = LinuxToolchainTest.GCC_5_RESULT + GXX_5_RESULT = LinuxToolchainTest.GXX_5_RESULT + GCC_8_RESULT = LinuxToolchainTest.GCC_8_RESULT + GXX_8_RESULT = LinuxToolchainTest.GXX_8_RESULT + SYSROOT_FLAGS = { + "flags": PrependFlags( + [ + "-isysroot", + xcrun("", ("--show-sdk-path",))[1], + "-mmacosx-version-min=10.12", + ] + ) + } + + def test_clang(self): + # We only try clang because gcc is known not to work. + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.DEFAULT_CLANG_RESULT + self.SYSROOT_FLAGS, + "cxx_compiler": self.DEFAULT_CLANGXX_RESULT + self.SYSROOT_FLAGS, + }, + ) + + def test_not_gcc(self): + # We won't pick GCC if it's the only thing available. + paths = { + k: v + for k, v in six.iteritems(self.PATHS) + if os.path.basename(k) not in ("clang", "clang++") + } + self.do_toolchain_test( + paths, {"c_compiler": "Cannot find the target C compiler"} + ) + + def test_unsupported_clang(self): + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.CLANG_3_3_RESULT, + "cxx_compiler": self.CLANGXX_3_3_RESULT, + }, + environ={"CC": "clang-3.3", "CXX": "clang++-3.3"}, + ) + # When targeting mac, we require at least version 5. + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.CLANG_4_0_RESULT, + "cxx_compiler": self.CLANGXX_4_0_RESULT, + }, + environ={"CC": "clang-4.0", "CXX": "clang++-4.0"}, + ) + + def test_forced_gcc(self): + # GCC can still be forced if the user really wants it. + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.GCC_8_RESULT + self.SYSROOT_FLAGS, + "cxx_compiler": self.GXX_8_RESULT + self.SYSROOT_FLAGS, + }, + environ={"CC": "gcc-8", "CXX": "g++-8"}, + ) + + def test_forced_unsupported_gcc(self): + self.do_toolchain_test( + self.PATHS, + {"c_compiler": self.GCC_5_RESULT}, + environ={"CC": "gcc-5", "CXX": "g++-5"}, + ) + + +class MingwToolchainTest(BaseToolchainTest): + HOST = "i686-pc-mingw32" + + # For the purpose of this test, it doesn't matter that the paths are not + # real Windows paths. + PATHS = { + "/usr/bin/cl": VS_2017u8 + VS_PLATFORM_X86, + "/usr/bin/clang-cl-3.9": CLANG_CL_3_9 + CLANG_CL_PLATFORM_X86, + "/usr/bin/clang-cl": CLANG_CL_9_0 + CLANG_CL_PLATFORM_X86, + "/usr/bin/gcc": DEFAULT_GCC + GCC_PLATFORM_X86_WIN + MINGW32, + "/usr/bin/g++": DEFAULT_GXX + GCC_PLATFORM_X86_WIN + MINGW32, + "/usr/bin/gcc-4.9": GCC_4_9 + GCC_PLATFORM_X86_WIN + MINGW32, + "/usr/bin/g++-4.9": GXX_4_9 + GCC_PLATFORM_X86_WIN + MINGW32, + "/usr/bin/gcc-5": GCC_5 + GCC_PLATFORM_X86_WIN + MINGW32, + "/usr/bin/g++-5": GXX_5 + GCC_PLATFORM_X86_WIN + MINGW32, + "/usr/bin/gcc-6": GCC_6 + GCC_PLATFORM_X86_WIN + MINGW32, + "/usr/bin/g++-6": GXX_6 + GCC_PLATFORM_X86_WIN + MINGW32, + "/usr/bin/gcc-7": GCC_7 + GCC_PLATFORM_X86_WIN + MINGW32, + "/usr/bin/g++-7": GXX_7 + GCC_PLATFORM_X86_WIN + MINGW32, + "/usr/bin/clang": DEFAULT_CLANG + CLANG_PLATFORM_X86_WIN, + "/usr/bin/clang++": DEFAULT_CLANGXX + CLANG_PLATFORM_X86_WIN, + "/usr/bin/clang-7.0": CLANG_7_0 + CLANG_PLATFORM_X86_WIN, + "/usr/bin/clang++-7.0": CLANGXX_7_0 + CLANG_PLATFORM_X86_WIN, + "/usr/bin/clang-4.0": CLANG_4_0 + CLANG_PLATFORM_X86_WIN, + "/usr/bin/clang++-4.0": CLANGXX_4_0 + CLANG_PLATFORM_X86_WIN, + "/usr/bin/clang-3.3": CLANG_3_3 + CLANG_PLATFORM_X86_WIN, + "/usr/bin/clang++-3.3": CLANGXX_3_3 + CLANG_PLATFORM_X86_WIN, + } + + CLANG_CL_3_9_RESULT = ( + "Only clang-cl 9.0 or newer is supported (found version 3.9.0)" + ) + CLANG_CL_9_0_RESULT = CompilerResult( + version="9.0.0", + flags=["-Xclang", "-std=gnu99"], + type="clang-cl", + compiler="/usr/bin/clang-cl", + language="C", + ) + CLANGXX_CL_3_9_RESULT = ( + "Only clang-cl 9.0 or newer is supported (found version 3.9.0)" + ) + CLANGXX_CL_9_0_RESULT = CompilerResult( + version="9.0.0", + flags=["-Xclang", "-std=c++17"], + type="clang-cl", + compiler="/usr/bin/clang-cl", + language="C++", + ) + CLANG_3_3_RESULT = LinuxToolchainTest.CLANG_3_3_RESULT + CLANGXX_3_3_RESULT = LinuxToolchainTest.CLANGXX_3_3_RESULT + CLANG_4_0_RESULT = LinuxToolchainTest.CLANG_4_0_RESULT + CLANGXX_4_0_RESULT = LinuxToolchainTest.CLANGXX_4_0_RESULT + DEFAULT_CLANG_RESULT = LinuxToolchainTest.DEFAULT_CLANG_RESULT + DEFAULT_CLANGXX_RESULT = LinuxToolchainTest.DEFAULT_CLANGXX_RESULT + + def test_unsupported_msvc(self): + self.do_toolchain_test( + self.PATHS, + {"c_compiler": "Unknown compiler or compiler not supported."}, + environ={"CC": "/usr/bin/cl"}, + ) + + def test_unsupported_clang_cl(self): + self.do_toolchain_test( + self.PATHS, + {"c_compiler": self.CLANG_CL_3_9_RESULT}, + environ={"CC": "/usr/bin/clang-cl-3.9"}, + ) + + def test_clang_cl(self): + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.CLANG_CL_9_0_RESULT, + "cxx_compiler": self.CLANGXX_CL_9_0_RESULT, + }, + ) + + def test_gcc(self): + # GCC is unsupported, if you try it should find clang. + paths = { + k: v + for k, v in six.iteritems(self.PATHS) + if os.path.basename(k) != "clang-cl" + } + self.do_toolchain_test( + paths, + { + "c_compiler": self.DEFAULT_CLANG_RESULT, + "cxx_compiler": self.DEFAULT_CLANGXX_RESULT, + }, + ) + + # This test is not perfect, as the GCC version needs to be updated when we + # bump the minimum GCC version, but the idea is that even supported GCC + # on other platforms should not be supported on Windows. + def test_overridden_supported_elsewhere_gcc(self): + self.do_toolchain_test( + self.PATHS, + {"c_compiler": "Unknown compiler or compiler not supported."}, + environ={"CC": "gcc-7", "CXX": "g++-7"}, + ) + + def test_overridden_unsupported_gcc(self): + self.do_toolchain_test( + self.PATHS, + {"c_compiler": "Unknown compiler or compiler not supported."}, + environ={"CC": "gcc-5", "CXX": "g++-5"}, + ) + + def test_clang(self): + # We'll pick clang if nothing else is found. + paths = { + k: v + for k, v in six.iteritems(self.PATHS) + if os.path.basename(k) not in ("clang-cl", "gcc") + } + self.do_toolchain_test( + paths, + { + "c_compiler": self.DEFAULT_CLANG_RESULT, + "cxx_compiler": self.DEFAULT_CLANGXX_RESULT, + }, + ) + + def test_overridden_unsupported_clang(self): + # clang 3.3 C compiler is perfectly fine, but we need more for C++. + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.CLANG_3_3_RESULT, + "cxx_compiler": self.CLANGXX_3_3_RESULT, + }, + environ={"CC": "clang-3.3", "CXX": "clang++-3.3"}, + ) + + +class Mingw64ToolchainTest(MingwToolchainTest): + HOST = "x86_64-pc-mingw32" + + # For the purpose of this test, it doesn't matter that the paths are not + # real Windows paths. + PATHS = { + "/usr/bin/cl": VS_2017u8 + VS_PLATFORM_X86_64, + "/usr/bin/clang-cl": CLANG_CL_9_0 + CLANG_CL_PLATFORM_X86_64, + "/usr/bin/clang-cl-3.9": CLANG_CL_3_9 + CLANG_CL_PLATFORM_X86_64, + "/usr/bin/gcc": DEFAULT_GCC + GCC_PLATFORM_X86_64_WIN + MINGW32, + "/usr/bin/g++": DEFAULT_GXX + GCC_PLATFORM_X86_64_WIN + MINGW32, + "/usr/bin/gcc-4.9": GCC_4_9 + GCC_PLATFORM_X86_64_WIN + MINGW32, + "/usr/bin/g++-4.9": GXX_4_9 + GCC_PLATFORM_X86_64_WIN + MINGW32, + "/usr/bin/gcc-5": GCC_5 + GCC_PLATFORM_X86_64_WIN + MINGW32, + "/usr/bin/g++-5": GXX_5 + GCC_PLATFORM_X86_64_WIN + MINGW32, + "/usr/bin/gcc-6": GCC_6 + GCC_PLATFORM_X86_64_WIN + MINGW32, + "/usr/bin/g++-6": GXX_6 + GCC_PLATFORM_X86_64_WIN + MINGW32, + "/usr/bin/gcc-7": GCC_7 + GCC_PLATFORM_X86_64_WIN + MINGW32, + "/usr/bin/g++-7": GXX_7 + GCC_PLATFORM_X86_64_WIN + MINGW32, + "/usr/bin/clang": DEFAULT_CLANG + CLANG_PLATFORM_X86_64_WIN, + "/usr/bin/clang++": DEFAULT_CLANGXX + CLANG_PLATFORM_X86_64_WIN, + "/usr/bin/clang-7.0": CLANG_7_0 + CLANG_PLATFORM_X86_64_WIN, + "/usr/bin/clang++-7.0": CLANGXX_7_0 + CLANG_PLATFORM_X86_64_WIN, + "/usr/bin/clang-4.0": CLANG_4_0 + CLANG_PLATFORM_X86_64_WIN, + "/usr/bin/clang++-4.0": CLANGXX_4_0 + CLANG_PLATFORM_X86_64_WIN, + "/usr/bin/clang-3.3": CLANG_3_3 + CLANG_PLATFORM_X86_64_WIN, + "/usr/bin/clang++-3.3": CLANGXX_3_3 + CLANG_PLATFORM_X86_64_WIN, + } + + +class WindowsToolchainTest(BaseToolchainTest): + HOST = "i686-pc-windows-msvc" + + PATHS = MingwToolchainTest.PATHS + + def test_unsupported_msvc(self): + self.do_toolchain_test( + self.PATHS, + {"c_compiler": "Unknown compiler or compiler not supported."}, + environ={"CC": "/usr/bin/cl"}, + ) + + def test_unsupported_clang_cl(self): + self.do_toolchain_test( + self.PATHS, + {"c_compiler": MingwToolchainTest.CLANG_CL_3_9_RESULT}, + environ={"CC": "/usr/bin/clang-cl-3.9"}, + ) + + def test_clang_cl(self): + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": MingwToolchainTest.CLANG_CL_9_0_RESULT, + "cxx_compiler": MingwToolchainTest.CLANGXX_CL_9_0_RESULT, + }, + ) + + def test_unsupported_gcc(self): + paths = { + k: v + for k, v in six.iteritems(self.PATHS) + if os.path.basename(k) != "clang-cl" + } + self.do_toolchain_test( + paths, + {"c_compiler": "Cannot find the target C compiler"}, + ) + + def test_overridden_unsupported_gcc(self): + self.do_toolchain_test( + self.PATHS, + {"c_compiler": "Unknown compiler or compiler not supported."}, + environ={"CC": "gcc-5", "CXX": "g++-5"}, + ) + + def test_unsupported_clang(self): + paths = { + k: v + for k, v in six.iteritems(self.PATHS) + if os.path.basename(k) not in ("clang-cl", "gcc") + } + self.do_toolchain_test( + paths, + {"c_compiler": "Cannot find the target C compiler"}, + ) + + def test_overridden_unsupported_clang(self): + self.do_toolchain_test( + self.PATHS, + {"c_compiler": "Unknown compiler or compiler not supported."}, + environ={"CC": "clang-3.3", "CXX": "clang++-3.3"}, + ) + + +class Windows64ToolchainTest(WindowsToolchainTest): + HOST = "x86_64-pc-windows-msvc" + + PATHS = Mingw64ToolchainTest.PATHS + + +class WindowsGnuToolchainTest(BaseToolchainTest): + HOST = "i686-pc-windows-gnu" + + PATHS = MingwToolchainTest.PATHS + + def test_unsupported_msvc(self): + self.do_toolchain_test( + self.PATHS, + {"c_compiler": "Unknown compiler or compiler not supported."}, + environ={"CC": "/usr/bin/cl"}, + ) + + def test_unsupported_clang_cl(self): + paths = { + k: v + for k, v in six.iteritems(self.PATHS) + if os.path.basename(k) == "clang-cl" + } + self.do_toolchain_test( + paths, + {"c_compiler": "Cannot find the target C compiler"}, + ) + + def test_overridden_unsupported_clang_cl(self): + self.do_toolchain_test( + self.PATHS, + {"c_compiler": "Unknown compiler or compiler not supported."}, + environ={"CC": "clang-cl", "CXX": "clang-cl"}, + ) + + def test_unsupported_gcc(self): + paths = { + k: v for k, v in six.iteritems(self.PATHS) if os.path.basename(k) == "gcc" + } + self.do_toolchain_test( + paths, + {"c_compiler": "Cannot find the target C compiler"}, + ) + + def test_overridden_unsupported_gcc(self): + self.do_toolchain_test( + self.PATHS, + {"c_compiler": "Unknown compiler or compiler not supported."}, + environ={"CC": "gcc-5", "CXX": "g++-5"}, + ) + + def test_clang(self): + paths = { + k: v + for k, v in six.iteritems(self.PATHS) + if os.path.basename(k) not in ("clang-cl", "gcc") + } + self.do_toolchain_test( + paths, + { + "c_compiler": MingwToolchainTest.DEFAULT_CLANG_RESULT, + "cxx_compiler": MingwToolchainTest.DEFAULT_CLANGXX_RESULT, + }, + ) + + def test_overridden_unsupported_clang(self): + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": MingwToolchainTest.CLANG_3_3_RESULT, + "cxx_compiler": MingwToolchainTest.CLANGXX_3_3_RESULT, + }, + environ={"CC": "clang-3.3", "CXX": "clang++-3.3"}, + ) + + +class WindowsGnu64ToolchainTest(WindowsGnuToolchainTest): + HOST = "x86_64-pc-windows-gnu" + + PATHS = Mingw64ToolchainTest.PATHS + + +class LinuxCrossCompileToolchainTest(BaseToolchainTest): + TARGET = "arm-unknown-linux-gnu" + PATHS = { + "/usr/bin/arm-linux-gnu-gcc-4.9": GCC_4_9 + GCC_PLATFORM_ARM_LINUX, + "/usr/bin/arm-linux-gnu-g++-4.9": GXX_4_9 + GCC_PLATFORM_ARM_LINUX, + "/usr/bin/arm-linux-gnu-gcc-5": GCC_5 + GCC_PLATFORM_ARM_LINUX, + "/usr/bin/arm-linux-gnu-g++-5": GXX_5 + GCC_PLATFORM_ARM_LINUX, + "/usr/bin/arm-linux-gnu-gcc": DEFAULT_GCC + GCC_PLATFORM_ARM_LINUX, + "/usr/bin/arm-linux-gnu-g++": DEFAULT_GXX + GCC_PLATFORM_ARM_LINUX, + "/usr/bin/arm-linux-gnu-gcc-7": GCC_7 + GCC_PLATFORM_ARM_LINUX, + "/usr/bin/arm-linux-gnu-g++-7": GXX_7 + GCC_PLATFORM_ARM_LINUX, + } + PATHS.update(LinuxToolchainTest.PATHS) + ARM_GCC_4_9_RESULT = LinuxToolchainTest.GCC_4_9_RESULT + ARM_GXX_4_9_RESULT = LinuxToolchainTest.GXX_4_9_RESULT + ARM_GCC_5_RESULT = LinuxToolchainTest.GCC_5_RESULT + ARM_GXX_5_RESULT = LinuxToolchainTest.GXX_5_RESULT + ARM_DEFAULT_GCC_RESULT = LinuxToolchainTest.DEFAULT_GCC_RESULT + { + "compiler": "/usr/bin/arm-linux-gnu-gcc" + } + ARM_DEFAULT_GXX_RESULT = LinuxToolchainTest.DEFAULT_GXX_RESULT + { + "compiler": "/usr/bin/arm-linux-gnu-g++" + } + ARM_GCC_7_RESULT = LinuxToolchainTest.GCC_7_RESULT + ARM_GXX_7_RESULT = LinuxToolchainTest.GXX_7_RESULT + DEFAULT_CLANG_RESULT = LinuxToolchainTest.DEFAULT_CLANG_RESULT + DEFAULT_CLANGXX_RESULT = LinuxToolchainTest.DEFAULT_CLANGXX_RESULT + DEFAULT_GCC_RESULT = LinuxToolchainTest.DEFAULT_GCC_RESULT + DEFAULT_GXX_RESULT = LinuxToolchainTest.DEFAULT_GXX_RESULT + + little_endian = FakeCompiler(GCC_PLATFORM_LINUX, GCC_PLATFORM_LITTLE_ENDIAN) + big_endian = FakeCompiler(GCC_PLATFORM_LINUX, GCC_PLATFORM_BIG_ENDIAN) + + PLATFORMS = { + "i686-pc-linux-gnu": GCC_PLATFORM_X86_LINUX, + "x86_64-pc-linux-gnu": GCC_PLATFORM_X86_64_LINUX, + "arm-unknown-linux-gnu": GCC_PLATFORM_ARM_LINUX, + "aarch64-unknown-linux-gnu": little_endian + {"__aarch64__": 1}, + "ia64-unknown-linux-gnu": little_endian + {"__ia64__": 1}, + "s390x-unknown-linux-gnu": big_endian + {"__s390x__": 1, "__s390__": 1}, + "s390-unknown-linux-gnu": big_endian + {"__s390__": 1}, + "powerpc64-unknown-linux-gnu": big_endian + + { + None: {"__powerpc64__": 1, "__powerpc__": 1}, + "-m32": {"__powerpc64__": False}, + }, + "powerpc-unknown-linux-gnu": big_endian + + {None: {"__powerpc__": 1}, "-m64": {"__powerpc64__": 1}}, + "alpha-unknown-linux-gnu": little_endian + {"__alpha__": 1}, + "hppa-unknown-linux-gnu": big_endian + {"__hppa__": 1}, + "sparc64-unknown-linux-gnu": big_endian + + {None: {"__arch64__": 1, "__sparc__": 1}, "-m32": {"__arch64__": False}}, + "sparc-unknown-linux-gnu": big_endian + + {None: {"__sparc__": 1}, "-m64": {"__arch64__": 1}}, + "m68k-unknown-linux-gnu": big_endian + {"__m68k__": 1}, + "mips64-unknown-linux-gnuabi64": big_endian + {"__mips64": 1, "__mips__": 1}, + "mips-unknown-linux-gnu": big_endian + {"__mips__": 1}, + "riscv64-unknown-linux-gnu": little_endian + {"__riscv": 1, "__riscv_xlen": 64}, + "sh4-unknown-linux-gnu": little_endian + {"__sh__": 1}, + } + + PLATFORMS["powerpc64le-unknown-linux-gnu"] = ( + PLATFORMS["powerpc64-unknown-linux-gnu"] + GCC_PLATFORM_LITTLE_ENDIAN + ) + PLATFORMS["mips64el-unknown-linux-gnuabi64"] = ( + PLATFORMS["mips64-unknown-linux-gnuabi64"] + GCC_PLATFORM_LITTLE_ENDIAN + ) + PLATFORMS["mipsel-unknown-linux-gnu"] = ( + PLATFORMS["mips-unknown-linux-gnu"] + GCC_PLATFORM_LITTLE_ENDIAN + ) + + def do_test_cross_gcc_32_64(self, host, target): + self.HOST = host + self.TARGET = target + paths = { + "/usr/bin/gcc": DEFAULT_GCC + self.PLATFORMS[host], + "/usr/bin/g++": DEFAULT_GXX + self.PLATFORMS[host], + } + cross_flags = {"flags": ["-m64" if "64" in target else "-m32"]} + self.do_toolchain_test( + paths, + { + "c_compiler": self.DEFAULT_GCC_RESULT + cross_flags, + "cxx_compiler": self.DEFAULT_GXX_RESULT + cross_flags, + "host_c_compiler": self.DEFAULT_GCC_RESULT, + "host_cxx_compiler": self.DEFAULT_GXX_RESULT, + }, + ) + self.HOST = LinuxCrossCompileToolchainTest.HOST + self.TARGET = LinuxCrossCompileToolchainTest.TARGET + + def test_cross_x86_x64(self): + self.do_test_cross_gcc_32_64("i686-pc-linux-gnu", "x86_64-pc-linux-gnu") + self.do_test_cross_gcc_32_64("x86_64-pc-linux-gnu", "i686-pc-linux-gnu") + + def test_cross_sparc_sparc64(self): + self.do_test_cross_gcc_32_64( + "sparc-unknown-linux-gnu", "sparc64-unknown-linux-gnu" + ) + self.do_test_cross_gcc_32_64( + "sparc64-unknown-linux-gnu", "sparc-unknown-linux-gnu" + ) + + def test_cross_ppc_ppc64(self): + self.do_test_cross_gcc_32_64( + "powerpc-unknown-linux-gnu", "powerpc64-unknown-linux-gnu" + ) + self.do_test_cross_gcc_32_64( + "powerpc64-unknown-linux-gnu", "powerpc-unknown-linux-gnu" + ) + + def do_test_cross_gcc(self, host, target): + self.HOST = host + self.TARGET = target + host_cpu = host.split("-")[0] + cpu, manufacturer, os = target.split("-", 2) + toolchain_prefix = "/usr/bin/%s-%s" % (cpu, os) + paths = { + "/usr/bin/gcc": DEFAULT_GCC + self.PLATFORMS[host], + "/usr/bin/g++": DEFAULT_GXX + self.PLATFORMS[host], + } + self.do_toolchain_test( + paths, + { + "c_compiler": ( + "Target C compiler target CPU (%s) " + "does not match --target CPU (%s)" % (host_cpu, cpu) + ) + }, + ) + + paths.update( + { + "%s-gcc" % toolchain_prefix: DEFAULT_GCC + self.PLATFORMS[target], + "%s-g++" % toolchain_prefix: DEFAULT_GXX + self.PLATFORMS[target], + } + ) + self.do_toolchain_test( + paths, + { + "c_compiler": self.DEFAULT_GCC_RESULT + + {"compiler": "%s-gcc" % toolchain_prefix}, + "cxx_compiler": self.DEFAULT_GXX_RESULT + + {"compiler": "%s-g++" % toolchain_prefix}, + "host_c_compiler": self.DEFAULT_GCC_RESULT, + "host_cxx_compiler": self.DEFAULT_GXX_RESULT, + }, + ) + self.HOST = LinuxCrossCompileToolchainTest.HOST + self.TARGET = LinuxCrossCompileToolchainTest.TARGET + + def test_cross_gcc_misc(self): + for target in self.PLATFORMS: + if not target.endswith("-pc-linux-gnu"): + self.do_test_cross_gcc("x86_64-pc-linux-gnu", target) + + def test_cannot_cross(self): + self.TARGET = "mipsel-unknown-linux-gnu" + + paths = { + "/usr/bin/gcc": DEFAULT_GCC + self.PLATFORMS["mips-unknown-linux-gnu"], + "/usr/bin/g++": DEFAULT_GXX + self.PLATFORMS["mips-unknown-linux-gnu"], + } + self.do_toolchain_test( + paths, + { + "c_compiler": ( + "Target C compiler target endianness (big) " + "does not match --target endianness (little)" + ) + }, + ) + self.TARGET = LinuxCrossCompileToolchainTest.TARGET + + def test_overridden_cross_gcc(self): + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.ARM_GCC_7_RESULT, + "cxx_compiler": self.ARM_GXX_7_RESULT, + "host_c_compiler": self.DEFAULT_GCC_RESULT, + "host_cxx_compiler": self.DEFAULT_GXX_RESULT, + }, + environ={"CC": "arm-linux-gnu-gcc-7", "CXX": "arm-linux-gnu-g++-7"}, + ) + + def test_overridden_unsupported_cross_gcc(self): + self.do_toolchain_test( + self.PATHS, + {"c_compiler": self.ARM_GCC_4_9_RESULT}, + environ={"CC": "arm-linux-gnu-gcc-4.9", "CXX": "arm-linux-gnu-g++-4.9"}, + ) + + def test_guess_cross_cxx(self): + # When CXX is not set, we guess it from CC. + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.ARM_GCC_7_RESULT, + "cxx_compiler": self.ARM_GXX_7_RESULT, + "host_c_compiler": self.DEFAULT_GCC_RESULT, + "host_cxx_compiler": self.DEFAULT_GXX_RESULT, + }, + environ={"CC": "arm-linux-gnu-gcc-7"}, + ) + + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.ARM_DEFAULT_GCC_RESULT, + "cxx_compiler": self.ARM_DEFAULT_GXX_RESULT, + "host_c_compiler": self.DEFAULT_CLANG_RESULT, + "host_cxx_compiler": self.DEFAULT_CLANGXX_RESULT, + }, + environ={"CC": "arm-linux-gnu-gcc", "HOST_CC": "clang"}, + ) + + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.ARM_DEFAULT_GCC_RESULT, + "cxx_compiler": self.ARM_DEFAULT_GXX_RESULT, + "host_c_compiler": self.DEFAULT_CLANG_RESULT, + "host_cxx_compiler": self.DEFAULT_CLANGXX_RESULT, + }, + environ={ + "CC": "arm-linux-gnu-gcc", + "CXX": "arm-linux-gnu-g++", + "HOST_CC": "clang", + }, + ) + + def test_cross_clang(self): + cross_clang_result = self.DEFAULT_CLANG_RESULT + { + "flags": ["--target=arm-linux-gnu"] + } + cross_clangxx_result = self.DEFAULT_CLANGXX_RESULT + { + "flags": ["--target=arm-linux-gnu"] + } + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": cross_clang_result, + "cxx_compiler": cross_clangxx_result, + "host_c_compiler": self.DEFAULT_CLANG_RESULT, + "host_cxx_compiler": self.DEFAULT_CLANGXX_RESULT, + }, + environ={"CC": "clang", "HOST_CC": "clang"}, + ) + + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": cross_clang_result, + "cxx_compiler": cross_clangxx_result, + "host_c_compiler": self.DEFAULT_CLANG_RESULT, + "host_cxx_compiler": self.DEFAULT_CLANGXX_RESULT, + }, + environ={"CC": "clang"}, + ) + + def test_cross_atypical_clang(self): + paths = dict(self.PATHS) + paths.update( + { + "/usr/bin/afl-clang-fast": paths["/usr/bin/clang"], + "/usr/bin/afl-clang-fast++": paths["/usr/bin/clang++"], + } + ) + afl_clang_result = self.DEFAULT_CLANG_RESULT + { + "compiler": "/usr/bin/afl-clang-fast" + } + afl_clangxx_result = self.DEFAULT_CLANGXX_RESULT + { + "compiler": "/usr/bin/afl-clang-fast++" + } + self.do_toolchain_test( + paths, + { + "c_compiler": afl_clang_result + {"flags": ["--target=arm-linux-gnu"]}, + "cxx_compiler": afl_clangxx_result + + {"flags": ["--target=arm-linux-gnu"]}, + "host_c_compiler": afl_clang_result, + "host_cxx_compiler": afl_clangxx_result, + }, + environ={"CC": "afl-clang-fast", "CXX": "afl-clang-fast++"}, + ) + + +class OSXCrossToolchainTest(BaseToolchainTest): + TARGET = "i686-apple-darwin11.2.0" + PATHS = dict(LinuxToolchainTest.PATHS) + PATHS.update( + { + "/usr/bin/clang": CLANG_7_0 + CLANG_PLATFORM_X86_64_LINUX, + "/usr/bin/clang++": CLANGXX_7_0 + CLANG_PLATFORM_X86_64_LINUX, + } + ) + DEFAULT_CLANG_RESULT = CompilerResult( + flags=["-std=gnu99"], + version="7.0.0", + type="clang", + compiler="/usr/bin/clang", + language="C", + ) + DEFAULT_CLANGXX_RESULT = CompilerResult( + flags=["-std=gnu++17"], + version="7.0.0", + type="clang", + compiler="/usr/bin/clang++", + language="C++", + ) + + def test_osx_cross(self): + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.DEFAULT_CLANG_RESULT + + OSXToolchainTest.SYSROOT_FLAGS + + {"flags": ["--target=i686-apple-darwin11.2.0"]}, + "cxx_compiler": self.DEFAULT_CLANGXX_RESULT + + {"flags": PrependFlags(["-stdlib=libc++"])} + + OSXToolchainTest.SYSROOT_FLAGS + + {"flags": ["--target=i686-apple-darwin11.2.0"]}, + "host_c_compiler": self.DEFAULT_CLANG_RESULT, + "host_cxx_compiler": self.DEFAULT_CLANGXX_RESULT, + }, + environ={"CC": "clang"}, + args=["--with-macos-sdk=%s" % OSXToolchainTest.SYSROOT_FLAGS["flags"][1]], + ) + + def test_cannot_osx_cross(self): + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": "Target C compiler target kernel (Linux) does not " + "match --target kernel (Darwin)" + }, + environ={"CC": "gcc"}, + args=["--with-macos-sdk=%s" % OSXToolchainTest.SYSROOT_FLAGS["flags"][1]], + ) + + +class WindowsCrossToolchainTest(BaseToolchainTest): + TARGET = "x86_64-pc-windows-msvc" + DEFAULT_CLANG_RESULT = LinuxToolchainTest.DEFAULT_CLANG_RESULT + DEFAULT_CLANGXX_RESULT = LinuxToolchainTest.DEFAULT_CLANGXX_RESULT + + def test_clang_cl_cross(self): + paths = {"/usr/bin/clang-cl": CLANG_CL_9_0 + CLANG_CL_PLATFORM_X86_64} + paths.update(LinuxToolchainTest.PATHS) + self.do_toolchain_test( + paths, + { + "c_compiler": MingwToolchainTest.CLANG_CL_9_0_RESULT, + "cxx_compiler": MingwToolchainTest.CLANGXX_CL_9_0_RESULT, + "host_c_compiler": self.DEFAULT_CLANG_RESULT, + "host_cxx_compiler": self.DEFAULT_CLANGXX_RESULT, + }, + ) + + +class OpenBSDToolchainTest(BaseToolchainTest): + HOST = "x86_64-unknown-openbsd6.1" + TARGET = "x86_64-unknown-openbsd6.1" + PATHS = { + "/usr/bin/gcc": DEFAULT_GCC + GCC_PLATFORM_X86_64 + GCC_PLATFORM_OPENBSD, + "/usr/bin/g++": DEFAULT_GXX + GCC_PLATFORM_X86_64 + GCC_PLATFORM_OPENBSD, + } + DEFAULT_GCC_RESULT = LinuxToolchainTest.DEFAULT_GCC_RESULT + DEFAULT_GXX_RESULT = LinuxToolchainTest.DEFAULT_GXX_RESULT + + def test_gcc(self): + self.do_toolchain_test( + self.PATHS, + { + "c_compiler": self.DEFAULT_GCC_RESULT, + "cxx_compiler": self.DEFAULT_GXX_RESULT, + }, + ) + + +@memoize +def gen_invoke_cargo(version, rustup_wrapper=False): + def invoke_cargo(stdin, args): + args = tuple(args) + if not rustup_wrapper and args == ("+stable",): + return (101, "", "we are the real thing") + if args == ("--version", "--verbose"): + return 0, "cargo %s\nrelease: %s" % (version, version), "" + raise NotImplementedError("unsupported arguments") + + return invoke_cargo + + +@memoize +def gen_invoke_rustc(version, rustup_wrapper=False): + def invoke_rustc(stdin, args): + args = tuple(args) + # TODO: we don't have enough machinery set up to test the `rustup which` + # fallback yet. + if not rustup_wrapper and args == ("+stable",): + return (1, "", "error: couldn't read +stable: No such file or directory") + if args == ("--version", "--verbose"): + return ( + 0, + "rustc %s\nrelease: %s\nhost: x86_64-unknown-linux-gnu" + % (version, version), + "", + ) + if args == ("--print", "target-list"): + # Raw list returned by rustc version 1.32, + ios, which somehow + # don't appear in the default list. + # https://github.com/rust-lang/rust/issues/36156 + rust_targets = [ + "aarch64-apple-ios", + "aarch64-fuchsia", + "aarch64-linux-android", + "aarch64-pc-windows-msvc", + "aarch64-unknown-cloudabi", + "aarch64-unknown-freebsd", + "aarch64-unknown-hermit", + "aarch64-unknown-linux-gnu", + "aarch64-unknown-linux-musl", + "aarch64-unknown-netbsd", + "aarch64-unknown-none", + "aarch64-unknown-openbsd", + "arm-linux-androideabi", + "arm-unknown-linux-gnueabi", + "arm-unknown-linux-gnueabihf", + "arm-unknown-linux-musleabi", + "arm-unknown-linux-musleabihf", + "armebv7r-none-eabi", + "armebv7r-none-eabihf", + "armv4t-unknown-linux-gnueabi", + "armv5te-unknown-linux-gnueabi", + "armv5te-unknown-linux-musleabi", + "armv6-unknown-netbsd-eabihf", + "armv7-linux-androideabi", + "armv7-unknown-cloudabi-eabihf", + "armv7-unknown-linux-gnueabihf", + "armv7-unknown-linux-musleabihf", + "armv7-unknown-netbsd-eabihf", + "armv7r-none-eabi", + "armv7r-none-eabihf", + "armv7s-apple-ios", + "asmjs-unknown-emscripten", + "i386-apple-ios", + "i586-pc-windows-msvc", + "i586-unknown-linux-gnu", + "i586-unknown-linux-musl", + "i686-apple-darwin", + "i686-linux-android", + "i686-pc-windows-gnu", + "i686-pc-windows-msvc", + "i686-unknown-cloudabi", + "i686-unknown-dragonfly", + "i686-unknown-freebsd", + "i686-unknown-haiku", + "i686-unknown-linux-gnu", + "i686-unknown-linux-musl", + "i686-unknown-netbsd", + "i686-unknown-openbsd", + "mips-unknown-linux-gnu", + "mips-unknown-linux-musl", + "mips-unknown-linux-uclibc", + "mips64-unknown-linux-gnuabi64", + "mips64el-unknown-linux-gnuabi64", + "mipsel-unknown-linux-gnu", + "mipsel-unknown-linux-musl", + "mipsel-unknown-linux-uclibc", + "msp430-none-elf", + "powerpc-unknown-linux-gnu", + "powerpc-unknown-linux-gnuspe", + "powerpc-unknown-linux-musl", + "powerpc-unknown-netbsd", + "powerpc64-unknown-linux-gnu", + "powerpc64-unknown-linux-musl", + "powerpc64le-unknown-linux-gnu", + "powerpc64le-unknown-linux-musl", + "riscv32imac-unknown-none-elf", + "riscv32imc-unknown-none-elf", + "s390x-unknown-linux-gnu", + "sparc-unknown-linux-gnu", + "sparc64-unknown-linux-gnu", + "sparc64-unknown-netbsd", + "sparcv9-sun-solaris", + "thumbv6m-none-eabi", + "thumbv7a-pc-windows-msvc", + "thumbv7em-none-eabi", + "thumbv7em-none-eabihf", + "thumbv7m-none-eabi", + "thumbv8m.base-none-eabi", + "wasm32-experimental-emscripten", + "wasm32-unknown-emscripten", + "wasm32-unknown-unknown", + "x86_64-apple-darwin", + "x86_64-apple-ios", + "x86_64-fortanix-unknown-sgx", + "x86_64-fuchsia", + "x86_64-linux-android", + "x86_64-pc-windows-gnu", + "x86_64-pc-windows-msvc", + "x86_64-rumprun-netbsd", + "x86_64-sun-solaris", + "x86_64-unknown-bitrig", + "x86_64-unknown-cloudabi", + "x86_64-unknown-dragonfly", + "x86_64-unknown-freebsd", + "x86_64-unknown-haiku", + "x86_64-unknown-hermit", + "x86_64-unknown-l4re-uclibc", + "x86_64-unknown-linux-gnu", + "x86_64-unknown-linux-gnux32", + "x86_64-unknown-linux-musl", + "x86_64-unknown-netbsd", + "x86_64-unknown-openbsd", + "x86_64-unknown-redox", + ] + # Additional targets from 1.33 + if Version(version) >= "1.33.0": + rust_targets += [ + "thumbv7neon-linux-androideabi", + "thumbv7neon-unknown-linux-gnueabihf", + "x86_64-unknown-uefi", + "thumbv8m.main-none-eabi", + "thumbv8m.main-none-eabihf", + ] + # Additional targets from 1.34 + if Version(version) >= "1.34.0": + rust_targets += [ + "nvptx64-nvidia-cuda", + "powerpc64-unknown-freebsd", + "riscv64gc-unknown-none-elf", + "riscv64imac-unknown-none-elf", + ] + # Additional targets from 1.35 + if Version(version) >= "1.35.0": + rust_targets += [ + "armv6-unknown-freebsd", + "armv7-unknown-freebsd", + "mipsisa32r6-unknown-linux-gnu", + "mipsisa32r6el-unknown-linux-gnu", + "mipsisa64r6-unknown-linux-gnuabi64", + "mipsisa64r6el-unknown-linux-gnuabi64", + "wasm32-unknown-wasi", + ] + # Additional targets from 1.36 + if Version(version) >= "1.36.0": + rust_targets += ["wasm32-wasi"] + rust_targets.remove("wasm32-unknown-wasi") + rust_targets.remove("x86_64-unknown-bitrig") + # Additional targets from 1.37 + if Version(version) >= "1.37.0": + rust_targets += ["x86_64-pc-solaris"] + # Additional targets from 1.38 + if Version(version) >= "1.38.0": + rust_targets += [ + "aarch64-unknown-redox", + "aarch64-wrs-vxworks", + "armv7-unknown-linux-gnueabi", + "armv7-unknown-linux-musleabi", + "armv7-wrs-vxworks", + "hexagon-unknown-linux-musl", + "i586-wrs-vxworks", + "i686-uwp-windows-gnu", + "i686-wrs-vxworks", + "powerpc-wrs-vxworks", + "powerpc-wrs-vxworks-spe", + "powerpc64-wrs-vxworks", + "riscv32i-unknown-none-elf", + "x86_64-uwp-windows-gnu", + "x86_64-wrs-vxworks", + ] + # Additional targets from 1.38 + if Version(version) >= "1.39.0": + rust_targets += [ + "aarch64-uwp-windows-msvc", + "armv7-wrs-vxworks-eabihf", + "i686-unknown-uefi", + "i686-uwp-windows-msvc", + "mips64-unknown-linux-muslabi64", + "mips64el-unknown-linux-muslabi64", + "sparc64-unknown-openbsd", + "x86_64-linux-kernel", + "x86_64-uwp-windows-msvc", + ] + rust_targets.remove("armv7-wrs-vxworks") + rust_targets.remove("i586-wrs-vxworks") + + return 0, "\n".join(sorted(rust_targets)), "" + if ( + len(args) == 6 + and args[:2] == ("--crate-type", "staticlib") + and args[2].startswith("--target=") + and args[3] == "-o" + ): + with open(args[4], "w") as fh: + fh.write("foo") + return 0, "", "" + raise NotImplementedError("unsupported arguments") + + return invoke_rustc + + +class RustTest(BaseConfigureTest): + def get_rust_target( + self, target, compiler_type="gcc", version=MINIMUM_RUST_VERSION, arm_target=None + ): + environ = { + "PATH": os.pathsep.join(mozpath.abspath(p) for p in ("/bin", "/usr/bin")) + } + + paths = { + mozpath.abspath("/usr/bin/cargo"): gen_invoke_cargo(version), + mozpath.abspath("/usr/bin/rustc"): gen_invoke_rustc(version), + } + + self.TARGET = target + sandbox = self.get_sandbox(paths, {}, [], environ) + + # Trick the sandbox into not running the target compiler check + dep = sandbox._depends[sandbox["c_compiler"]] + getattr(sandbox, "__value_for_depends")[(dep,)] = CompilerResult( + type=compiler_type + ) + # Same for the arm_target checks. + dep = sandbox._depends[sandbox["arm_target"]] + getattr(sandbox, "__value_for_depends")[ + (dep,) + ] = arm_target or ReadOnlyNamespace( + arm_arch=7, thumb2=False, fpu="vfpv2", float_abi="softfp" + ) + return sandbox._value_for(sandbox["rust_target_triple"]) + + def test_rust_target(self): + # Cases where the output of config.sub matches a rust target + for straightforward in ( + "x86_64-unknown-dragonfly", + "aarch64-unknown-freebsd", + "i686-unknown-freebsd", + "x86_64-unknown-freebsd", + "sparc64-unknown-netbsd", + "i686-unknown-netbsd", + "x86_64-unknown-netbsd", + "i686-unknown-openbsd", + "x86_64-unknown-openbsd", + "aarch64-unknown-linux-gnu", + "sparc64-unknown-linux-gnu", + "i686-unknown-linux-gnu", + "i686-apple-darwin", + "x86_64-apple-darwin", + "mips-unknown-linux-gnu", + "mipsel-unknown-linux-gnu", + "mips64-unknown-linux-gnuabi64", + "mips64el-unknown-linux-gnuabi64", + "powerpc64-unknown-linux-gnu", + "powerpc64le-unknown-linux-gnu", + "i686-pc-windows-msvc", + "x86_64-pc-windows-msvc", + "aarch64-pc-windows-msvc", + "i686-pc-windows-gnu", + "x86_64-pc-windows-gnu", + ): + self.assertEqual(self.get_rust_target(straightforward), straightforward) + + # Cases where the output of config.sub is different + for autoconf, rust in ( + ("aarch64-unknown-linux-android", "aarch64-linux-android"), + ("arm-unknown-linux-androideabi", "armv7-linux-androideabi"), + ("armv7-unknown-linux-androideabi", "armv7-linux-androideabi"), + ("i386-unknown-linux-android", "i686-linux-android"), + ("i686-unknown-linux-android", "i686-linux-android"), + ("i686-pc-linux-gnu", "i686-unknown-linux-gnu"), + ("x86_64-unknown-linux-android", "x86_64-linux-android"), + ("x86_64-pc-linux-gnu", "x86_64-unknown-linux-gnu"), + ("sparcv9-sun-solaris2", "sparcv9-sun-solaris"), + ("x86_64-sun-solaris2", "x86_64-sun-solaris"), + ): + self.assertEqual(self.get_rust_target(autoconf), rust) + + # Windows + for autoconf, building_with_gcc, rust in ( + ("i686-pc-mingw32", "clang-cl", "i686-pc-windows-msvc"), + ("x86_64-pc-mingw32", "clang-cl", "x86_64-pc-windows-msvc"), + ("i686-pc-mingw32", "clang", "i686-pc-windows-gnu"), + ("x86_64-pc-mingw32", "clang", "x86_64-pc-windows-gnu"), + ("i686-w64-mingw32", "clang", "i686-pc-windows-gnu"), + ("x86_64-w64-mingw32", "clang", "x86_64-pc-windows-gnu"), + ("aarch64-windows-mingw32", "clang-cl", "aarch64-pc-windows-msvc"), + ): + self.assertEqual(self.get_rust_target(autoconf, building_with_gcc), rust) + + # Arm special cases + self.assertEqual( + self.get_rust_target( + "arm-unknown-linux-androideabi", + arm_target=ReadOnlyNamespace( + arm_arch=7, fpu="neon", thumb2=True, float_abi="softfp" + ), + ), + "thumbv7neon-linux-androideabi", + ) + + self.assertEqual( + self.get_rust_target( + "arm-unknown-linux-androideabi", + arm_target=ReadOnlyNamespace( + arm_arch=7, fpu="neon", thumb2=False, float_abi="softfp" + ), + ), + "armv7-linux-androideabi", + ) + + self.assertEqual( + self.get_rust_target( + "arm-unknown-linux-androideabi", + arm_target=ReadOnlyNamespace( + arm_arch=7, fpu="vfpv2", thumb2=True, float_abi="softfp" + ), + ), + "armv7-linux-androideabi", + ) + + self.assertEqual( + self.get_rust_target( + "armv7-unknown-linux-gnueabihf", + arm_target=ReadOnlyNamespace( + arm_arch=7, fpu="neon", thumb2=True, float_abi="hard" + ), + ), + "thumbv7neon-unknown-linux-gnueabihf", + ) + + self.assertEqual( + self.get_rust_target( + "armv7-unknown-linux-gnueabihf", + arm_target=ReadOnlyNamespace( + arm_arch=7, fpu="neon", thumb2=False, float_abi="hard" + ), + ), + "armv7-unknown-linux-gnueabihf", + ) + + self.assertEqual( + self.get_rust_target( + "armv7-unknown-linux-gnueabihf", + arm_target=ReadOnlyNamespace( + arm_arch=7, fpu="vfpv2", thumb2=True, float_abi="hard" + ), + ), + "armv7-unknown-linux-gnueabihf", + ) + + self.assertEqual( + self.get_rust_target( + "arm-unknown-freebsd13.0-gnueabihf", + arm_target=ReadOnlyNamespace( + arm_arch=7, fpu="vfpv2", thumb2=True, float_abi="hard" + ), + ), + "armv7-unknown-freebsd", + ) + + self.assertEqual( + self.get_rust_target( + "arm-unknown-freebsd13.0-gnueabihf", + arm_target=ReadOnlyNamespace( + arm_arch=6, fpu=None, thumb2=False, float_abi="hard" + ), + ), + "armv6-unknown-freebsd", + ) + + self.assertEqual( + self.get_rust_target( + "arm-unknown-linux-gnueabi", + arm_target=ReadOnlyNamespace( + arm_arch=4, fpu=None, thumb2=False, float_abi="softfp" + ), + ), + "armv4t-unknown-linux-gnueabi", + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py b/python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py new file mode 100644 index 0000000000..f42778215b --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py @@ -0,0 +1,433 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import copy +import re +import unittest +from fnmatch import fnmatch +from textwrap import dedent + +import six +from mozpack import path as mozpath +from mozunit import MockedOpen, main +from six import StringIO + +from mozbuild.preprocessor import Preprocessor +from mozbuild.util import ReadOnlyNamespace + + +class CompilerPreprocessor(Preprocessor): + # The C preprocessor only expands macros when they are not in C strings. + # For now, we don't look very hard for C strings because they don't matter + # that much for our unit tests, but we at least avoid expanding in the + # simple "FOO" case. + VARSUBST = re.compile('(?\w+)(?!")', re.U) + NON_WHITESPACE = re.compile("\S") + HAS_FEATURE_OR_BUILTIN = re.compile( + '(__has_(?:feature|builtin|attribute|warning))\("?([^"\)]*)"?\)' + ) + + def __init__(self, *args, **kwargs): + Preprocessor.__init__(self, *args, **kwargs) + self.do_filter("c_substitution") + self.setMarker("#\s*") + + def do_if(self, expression, **kwargs): + # The C preprocessor handles numbers following C rules, which is a + # different handling than what our Preprocessor does out of the box. + # Hack around it enough that the configure tests work properly. + context = self.context + + def normalize_numbers(value): + if isinstance(value, six.string_types): + if value[-1:] == "L" and value[:-1].isdigit(): + value = int(value[:-1]) + return value + + # Our Preprocessor doesn't handle macros with parameters, so we hack + # around that for __has_feature()-like things. + + def normalize_has_feature_or_builtin(expr): + return ( + self.HAS_FEATURE_OR_BUILTIN.sub(r"\1\2", expr) + .replace("-", "_") + .replace("+", "_") + ) + + self.context = self.Context( + (normalize_has_feature_or_builtin(k), normalize_numbers(v)) + for k, v in six.iteritems(context) + ) + try: + return Preprocessor.do_if( + self, normalize_has_feature_or_builtin(expression), **kwargs + ) + finally: + self.context = context + + class Context(dict): + def __missing__(self, key): + return None + + def filter_c_substitution(self, line): + def repl(matchobj): + varname = matchobj.group("VAR") + if varname in self.context: + result = six.text_type(self.context[varname]) + # The C preprocessor inserts whitespaces around expanded + # symbols. + start, end = matchobj.span("VAR") + if self.NON_WHITESPACE.match(line[start - 1 : start]): + result = " " + result + if self.NON_WHITESPACE.match(line[end : end + 1]): + result = result + " " + return result + return matchobj.group(0) + + return self.VARSUBST.sub(repl, line) + + +class TestCompilerPreprocessor(unittest.TestCase): + def test_expansion(self): + pp = CompilerPreprocessor({"A": 1, "B": "2", "C": "c", "D": "d"}) + pp.out = StringIO() + input = StringIO('A.B.C "D"') + input.name = "foo" + pp.do_include(input) + + self.assertEqual(pp.out.getvalue(), '1 . 2 . c "D"') + + def test_normalization(self): + pp = CompilerPreprocessor( + {"__has_attribute(bar)": 1, '__has_warning("-Wc++98-foo")': 1} + ) + pp.out = StringIO() + input = StringIO( + dedent( + """\ + #if __has_warning("-Wbar") + WBAR + #endif + #if __has_warning("-Wc++98-foo") + WFOO + #endif + #if !__has_warning("-Wc++98-foo") + NO_WFOO + #endif + #if __has_attribute(bar) + BAR + #else + NO_BAR + #endif + #if !__has_attribute(foo) + NO_FOO + #endif + """ + ) + ) + + input.name = "foo" + pp.do_include(input) + + self.assertEqual(pp.out.getvalue(), "WFOO\nBAR\nNO_FOO\n") + + def test_condition(self): + pp = CompilerPreprocessor({"A": 1, "B": "2", "C": "0L"}) + pp.out = StringIO() + input = StringIO( + dedent( + """\ + #ifdef A + IFDEF_A + #endif + #if A + IF_A + #endif + # if B + IF_B + # else + IF_NOT_B + # endif + #if !C + IF_NOT_C + #else + IF_C + #endif + """ + ) + ) + input.name = "foo" + pp.do_include(input) + + self.assertEqual("IFDEF_A\nIF_A\nIF_NOT_B\nIF_NOT_C\n", pp.out.getvalue()) + + +class FakeCompiler(dict): + """Defines a fake compiler for use in toolchain tests below. + + The definitions given when creating an instance can have one of two + forms: + - a dict giving preprocessor symbols and their respective value, e.g. + { '__GNUC__': 4, '__STDC__': 1 } + - a dict associating flags to preprocessor symbols. An entry for `None` + is required in this case. Those are the baseline preprocessor symbols. + Additional entries describe additional flags to set or existing flags + to unset (with a value of `False`). + { + None: { '__GNUC__': 4, '__STDC__': 1, '__STRICT_ANSI__': 1 }, + '-std=gnu99': { '__STDC_VERSION__': '199901L', + '__STRICT_ANSI__': False }, + } + With the dict above, invoking the preprocessor with no additional flags + would define __GNUC__, __STDC__ and __STRICT_ANSI__, and with -std=gnu99, + __GNUC__, __STDC__, and __STDC_VERSION__ (__STRICT_ANSI__ would be + unset). + It is also possible to have different symbols depending on the source + file extension. In this case, the key is '*.ext'. e.g. + { + '*.c': { '__STDC__': 1 }, + '*.cpp': { '__cplusplus': '199711L' }, + } + + All the given definitions are merged together. + + A FakeCompiler instance itself can be used as a definition to create + another FakeCompiler. + + For convenience, FakeCompiler instances can be added (+) to one another. + """ + + def __init__(self, *definitions): + for definition in definitions: + if all(not isinstance(d, dict) for d in six.itervalues(definition)): + definition = {None: definition} + for key, value in six.iteritems(definition): + self.setdefault(key, {}).update(value) + + def __call__(self, stdin, args): + files = [] + flags = [] + args = iter(args) + while True: + arg = next(args, None) + if arg is None: + break + if arg.startswith("-"): + # Ignore -isysroot/--sysroot and the argument that follows it. + if arg in ("-isysroot", "--sysroot"): + next(args, None) + else: + flags.append(arg) + else: + files.append(arg) + + if "-E" in flags: + assert len(files) == 1 + file = files[0] + pp = CompilerPreprocessor(self[None]) + + def apply_defn(defn): + for k, v in six.iteritems(defn): + if v is False: + if k in pp.context: + del pp.context[k] + else: + pp.context[k] = v + + for glob, defn in six.iteritems(self): + if glob and not glob.startswith("-") and fnmatch(file, glob): + apply_defn(defn) + + for flag in flags: + apply_defn(self.get(flag, {})) + + pp.out = StringIO() + pp.do_include(file) + return 0, pp.out.getvalue(), "" + elif "-c" in flags: + if "-funknown-flag" in flags: + return 1, "", "" + return 0, "", "" + + return 1, "", "" + + def __add__(self, other): + return FakeCompiler(self, other) + + +class TestFakeCompiler(unittest.TestCase): + def test_fake_compiler(self): + with MockedOpen({"file": "A B C", "file.c": "A B C"}): + compiler = FakeCompiler({"A": "1", "B": "2"}) + self.assertEqual(compiler(None, ["-E", "file"]), (0, "1 2 C", "")) + + compiler = FakeCompiler( + { + None: {"A": "1", "B": "2"}, + "-foo": {"C": "foo"}, + "-bar": {"B": "bar", "C": "bar"}, + "-qux": {"B": False}, + "*.c": {"B": "42"}, + } + ) + self.assertEqual(compiler(None, ["-E", "file"]), (0, "1 2 C", "")) + self.assertEqual(compiler(None, ["-E", "-foo", "file"]), (0, "1 2 foo", "")) + self.assertEqual( + compiler(None, ["-E", "-bar", "file"]), (0, "1 bar bar", "") + ) + self.assertEqual(compiler(None, ["-E", "-qux", "file"]), (0, "1 B C", "")) + self.assertEqual( + compiler(None, ["-E", "-foo", "-bar", "file"]), (0, "1 bar bar", "") + ) + self.assertEqual( + compiler(None, ["-E", "-bar", "-foo", "file"]), (0, "1 bar foo", "") + ) + self.assertEqual( + compiler(None, ["-E", "-bar", "-qux", "file"]), (0, "1 B bar", "") + ) + self.assertEqual( + compiler(None, ["-E", "-qux", "-bar", "file"]), (0, "1 bar bar", "") + ) + self.assertEqual(compiler(None, ["-E", "file.c"]), (0, "1 42 C", "")) + self.assertEqual( + compiler(None, ["-E", "-bar", "file.c"]), (0, "1 bar bar", "") + ) + + def test_multiple_definitions(self): + compiler = FakeCompiler({"A": 1, "B": 2}, {"C": 3}) + + self.assertEqual(compiler, {None: {"A": 1, "B": 2, "C": 3}}) + compiler = FakeCompiler({"A": 1, "B": 2}, {"B": 4, "C": 3}) + + self.assertEqual(compiler, {None: {"A": 1, "B": 4, "C": 3}}) + compiler = FakeCompiler( + {"A": 1, "B": 2}, {None: {"B": 4, "C": 3}, "-foo": {"D": 5}} + ) + + self.assertEqual(compiler, {None: {"A": 1, "B": 4, "C": 3}, "-foo": {"D": 5}}) + + compiler = FakeCompiler( + {None: {"A": 1, "B": 2}, "-foo": {"D": 5}}, + {"-foo": {"D": 5}, "-bar": {"E": 6}}, + ) + + self.assertEqual( + compiler, {None: {"A": 1, "B": 2}, "-foo": {"D": 5}, "-bar": {"E": 6}} + ) + + +class PrependFlags(list): + """Wrapper to allow to Prepend to flags instead of appending, in + CompilerResult. + """ + + +class CompilerResult(ReadOnlyNamespace): + """Helper of convenience to manipulate toolchain results in unit tests + + When adding a dict, the result is a new CompilerResult with the values + from the dict replacing those from the CompilerResult, except for `flags`, + where the value from the dict extends the `flags` in `self`. + """ + + def __init__( + self, wrapper=None, compiler="", version="", type="", language="", flags=None + ): + if flags is None: + flags = [] + if wrapper is None: + wrapper = [] + super(CompilerResult, self).__init__( + flags=flags, + version=version, + type=type, + compiler=mozpath.abspath(compiler), + wrapper=wrapper, + language=language, + ) + + def __add__(self, other): + assert isinstance(other, dict) + result = copy.deepcopy(self.__dict__) + for k, v in six.iteritems(other): + if k == "flags": + flags = result.setdefault(k, []) + if isinstance(v, PrependFlags): + flags[:0] = v + else: + flags.extend(v) + else: + result[k] = v + return CompilerResult(**result) + + +class TestCompilerResult(unittest.TestCase): + def test_compiler_result(self): + result = CompilerResult() + self.assertEqual( + result.__dict__, + { + "wrapper": [], + "compiler": mozpath.abspath(""), + "version": "", + "type": "", + "language": "", + "flags": [], + }, + ) + + result = CompilerResult( + compiler="/usr/bin/gcc", + version="4.2.1", + type="gcc", + language="C", + flags=["-std=gnu99"], + ) + self.assertEqual( + result.__dict__, + { + "wrapper": [], + "compiler": mozpath.abspath("/usr/bin/gcc"), + "version": "4.2.1", + "type": "gcc", + "language": "C", + "flags": ["-std=gnu99"], + }, + ) + + result2 = result + {"flags": ["-m32"]} + self.assertEqual( + result2.__dict__, + { + "wrapper": [], + "compiler": mozpath.abspath("/usr/bin/gcc"), + "version": "4.2.1", + "type": "gcc", + "language": "C", + "flags": ["-std=gnu99", "-m32"], + }, + ) + # Original flags are untouched. + self.assertEqual(result.flags, ["-std=gnu99"]) + + result3 = result + { + "compiler": "/usr/bin/gcc-4.7", + "version": "4.7.3", + "flags": ["-m32"], + } + self.assertEqual( + result3.__dict__, + { + "wrapper": [], + "compiler": mozpath.abspath("/usr/bin/gcc-4.7"), + "version": "4.7.3", + "type": "gcc", + "language": "C", + "flags": ["-std=gnu99", "-m32"], + }, + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py b/python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py new file mode 100644 index 0000000000..e6b96b3627 --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py @@ -0,0 +1,102 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os + +from buildconfig import topsrcdir +from mozpack import path as mozpath +from mozunit import MockedOpen, main + +from common import BaseConfigureTest +from mozbuild.configure.options import InvalidOptionError + + +class TestToolkitMozConfigure(BaseConfigureTest): + def test_moz_configure_options(self): + def get_value_for(args=[], environ={}, mozconfig=""): + sandbox = self.get_sandbox({}, {}, args, environ, mozconfig) + + # Add a fake old-configure option + sandbox.option_impl( + "--with-foo", nargs="*", help="Help missing for old configure options" + ) + + # Remove all implied options, otherwise, getting + # all_configure_options below triggers them, and that triggers + # configure parts that aren't expected to run during this test. + del sandbox._implied_options[:] + result = sandbox._value_for(sandbox["all_configure_options"]) + shell = mozpath.abspath("/bin/sh") + return result.replace("CONFIG_SHELL=%s " % shell, "") + + self.assertEqual( + "--enable-application=browser", + get_value_for(["--enable-application=browser"]), + ) + + self.assertEqual( + "--enable-application=browser " "MOZ_VTUNE=1", + get_value_for(["--enable-application=browser", "MOZ_VTUNE=1"]), + ) + + value = get_value_for( + environ={"MOZ_VTUNE": "1"}, + mozconfig="ac_add_options --enable-application=browser", + ) + + self.assertEqual("--enable-application=browser MOZ_VTUNE=1", value) + + # --disable-js-shell is the default, so it's filtered out. + self.assertEqual( + "--enable-application=browser", + get_value_for(["--enable-application=browser", "--disable-js-shell"]), + ) + + # Normally, --without-foo would be filtered out because that's the + # default, but since it is a (fake) old-configure option, it always + # appears. + self.assertEqual( + "--enable-application=browser --without-foo", + get_value_for(["--enable-application=browser", "--without-foo"]), + ) + self.assertEqual( + "--enable-application=browser --with-foo", + get_value_for(["--enable-application=browser", "--with-foo"]), + ) + + self.assertEqual( + "--enable-application=browser '--with-foo=foo bar'", + get_value_for(["--enable-application=browser", "--with-foo=foo bar"]), + ) + + def test_developer_options(self, milestone="42.0a1"): + def get_value(args=[], environ={}): + sandbox = self.get_sandbox({}, {}, args, environ) + return sandbox._value_for(sandbox["developer_options"]) + + milestone_path = os.path.join(topsrcdir, "config", "milestone.txt") + with MockedOpen({milestone_path: milestone}): + # developer options are enabled by default on "nightly" milestone + # only + self.assertEqual(get_value(), "a" in milestone or None) + + self.assertEqual(get_value(["--enable-release"]), None) + + self.assertEqual(get_value(environ={"MOZILLA_OFFICIAL": 1}), None) + + self.assertEqual( + get_value(["--enable-release"], environ={"MOZILLA_OFFICIAL": 1}), None + ) + + with self.assertRaises(InvalidOptionError): + get_value(["--disable-release"], environ={"MOZILLA_OFFICIAL": 1}) + + self.assertEqual(get_value(environ={"MOZ_AUTOMATION": 1}), None) + + def test_developer_options_release(self): + self.test_developer_options("42.0") + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/configure/test_util.py b/python/mozbuild/mozbuild/test/configure/test_util.py new file mode 100644 index 0000000000..81c2e2a8bf --- /dev/null +++ b/python/mozbuild/mozbuild/test/configure/test_util.py @@ -0,0 +1,539 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import logging +import os +import sys +import tempfile +import textwrap +import unittest + +import six +from buildconfig import topsrcdir +from mozpack import path as mozpath +from mozunit import main +from six import StringIO + +from common import ConfigureTestSandbox +from mozbuild.configure import ConfigureSandbox +from mozbuild.configure.util import ( + ConfigureOutputHandler, + LineIO, + Version, + getpreferredencoding, +) +from mozbuild.util import exec_ + + +class TestConfigureOutputHandler(unittest.TestCase): + def test_separation(self): + out = StringIO() + err = StringIO() + name = "%s.test_separation" % self.__class__.__name__ + logger = logging.getLogger(name) + logger.setLevel(logging.DEBUG) + logger.addHandler(ConfigureOutputHandler(out, err)) + + logger.error("foo") + logger.warning("bar") + logger.info("baz") + # DEBUG level is not printed out by this handler + logger.debug("qux") + + self.assertEqual(out.getvalue(), "baz\n") + self.assertEqual(err.getvalue(), "foo\nbar\n") + + def test_format(self): + out = StringIO() + err = StringIO() + name = "%s.test_format" % self.__class__.__name__ + logger = logging.getLogger(name) + logger.setLevel(logging.DEBUG) + handler = ConfigureOutputHandler(out, err) + handler.setFormatter(logging.Formatter("%(levelname)s:%(message)s")) + logger.addHandler(handler) + + logger.error("foo") + logger.warning("bar") + logger.info("baz") + # DEBUG level is not printed out by this handler + logger.debug("qux") + + self.assertEqual(out.getvalue(), "baz\n") + self.assertEqual(err.getvalue(), "ERROR:foo\n" "WARNING:bar\n") + + def test_continuation(self): + out = StringIO() + name = "%s.test_continuation" % self.__class__.__name__ + logger = logging.getLogger(name) + logger.setLevel(logging.DEBUG) + handler = ConfigureOutputHandler(out, out) + handler.setFormatter(logging.Formatter("%(levelname)s:%(message)s")) + logger.addHandler(handler) + + logger.info("foo") + logger.info("checking bar... ") + logger.info("yes") + logger.info("qux") + + self.assertEqual(out.getvalue(), "foo\n" "checking bar... yes\n" "qux\n") + + out.seek(0) + out.truncate() + + logger.info("foo") + logger.info("checking bar... ") + logger.warning("hoge") + logger.info("no") + logger.info("qux") + + self.assertEqual( + out.getvalue(), + "foo\n" "checking bar... \n" "WARNING:hoge\n" " ... no\n" "qux\n", + ) + + out.seek(0) + out.truncate() + + logger.info("foo") + logger.info("checking bar... ") + logger.warning("hoge") + logger.warning("fuga") + logger.info("no") + logger.info("qux") + + self.assertEqual( + out.getvalue(), + "foo\n" + "checking bar... \n" + "WARNING:hoge\n" + "WARNING:fuga\n" + " ... no\n" + "qux\n", + ) + + out.seek(0) + out.truncate() + err = StringIO() + + logger.removeHandler(handler) + handler = ConfigureOutputHandler(out, err) + handler.setFormatter(logging.Formatter("%(levelname)s:%(message)s")) + logger.addHandler(handler) + + logger.info("foo") + logger.info("checking bar... ") + logger.warning("hoge") + logger.warning("fuga") + logger.info("no") + logger.info("qux") + + self.assertEqual(out.getvalue(), "foo\n" "checking bar... no\n" "qux\n") + + self.assertEqual(err.getvalue(), "WARNING:hoge\n" "WARNING:fuga\n") + + def test_queue_debug(self): + out = StringIO() + name = "%s.test_queue_debug" % self.__class__.__name__ + logger = logging.getLogger(name) + logger.setLevel(logging.DEBUG) + handler = ConfigureOutputHandler(out, out, maxlen=3) + handler.setFormatter(logging.Formatter("%(levelname)s:%(message)s")) + logger.addHandler(handler) + + with handler.queue_debug(): + logger.info("checking bar... ") + logger.debug("do foo") + logger.info("yes") + logger.info("qux") + + self.assertEqual(out.getvalue(), "checking bar... yes\n" "qux\n") + + out.seek(0) + out.truncate() + + with handler.queue_debug(): + logger.info("checking bar... ") + logger.debug("do foo") + logger.info("no") + logger.error("fail") + + self.assertEqual( + out.getvalue(), "checking bar... no\n" "DEBUG:do foo\n" "ERROR:fail\n" + ) + + out.seek(0) + out.truncate() + + with handler.queue_debug(): + logger.info("checking bar... ") + logger.debug("do foo") + logger.debug("do bar") + logger.debug("do baz") + logger.info("no") + logger.error("fail") + + self.assertEqual( + out.getvalue(), + "checking bar... no\n" + "DEBUG:do foo\n" + "DEBUG:do bar\n" + "DEBUG:do baz\n" + "ERROR:fail\n", + ) + + out.seek(0) + out.truncate() + + with handler.queue_debug(): + logger.info("checking bar... ") + logger.debug("do foo") + logger.debug("do bar") + logger.debug("do baz") + logger.debug("do qux") + logger.debug("do hoge") + logger.info("no") + logger.error("fail") + + self.assertEqual( + out.getvalue(), + "checking bar... no\n" + "DEBUG:\n" + "DEBUG:do baz\n" + "DEBUG:do qux\n" + "DEBUG:do hoge\n" + "ERROR:fail\n", + ) + + out.seek(0) + out.truncate() + + try: + with handler.queue_debug(): + logger.info("checking bar... ") + logger.debug("do foo") + logger.debug("do bar") + logger.info("no") + e = Exception("fail") + raise e + except Exception as caught: + self.assertIs(caught, e) + + self.assertEqual( + out.getvalue(), "checking bar... no\n" "DEBUG:do foo\n" "DEBUG:do bar\n" + ) + + def test_queue_debug_reentrant(self): + out = StringIO() + name = "%s.test_queue_debug_reentrant" % self.__class__.__name__ + logger = logging.getLogger(name) + logger.setLevel(logging.DEBUG) + handler = ConfigureOutputHandler(out, out, maxlen=10) + handler.setFormatter(logging.Formatter("%(levelname)s| %(message)s")) + logger.addHandler(handler) + + try: + with handler.queue_debug(): + logger.info("outer info") + logger.debug("outer debug") + with handler.queue_debug(): + logger.info("inner info") + logger.debug("inner debug") + e = Exception("inner exception") + raise e + except Exception as caught: + self.assertIs(caught, e) + + self.assertEqual( + out.getvalue(), + "outer info\n" "inner info\n" "DEBUG| outer debug\n" "DEBUG| inner debug\n", + ) + + out.seek(0) + out.truncate() + + try: + with handler.queue_debug(): + logger.info("outer info") + logger.debug("outer debug") + with handler.queue_debug(): + logger.info("inner info") + logger.debug("inner debug") + e = Exception("outer exception") + raise e + except Exception as caught: + self.assertIs(caught, e) + + self.assertEqual( + out.getvalue(), + "outer info\n" "inner info\n" "DEBUG| outer debug\n" "DEBUG| inner debug\n", + ) + + out.seek(0) + out.truncate() + + with handler.queue_debug(): + logger.info("outer info") + logger.debug("outer debug") + with handler.queue_debug(): + logger.info("inner info") + logger.debug("inner debug") + logger.error("inner error") + self.assertEqual( + out.getvalue(), + "outer info\n" + "inner info\n" + "DEBUG| outer debug\n" + "DEBUG| inner debug\n" + "ERROR| inner error\n", + ) + + out.seek(0) + out.truncate() + + with handler.queue_debug(): + logger.info("outer info") + logger.debug("outer debug") + with handler.queue_debug(): + logger.info("inner info") + logger.debug("inner debug") + logger.error("outer error") + self.assertEqual( + out.getvalue(), + "outer info\n" + "inner info\n" + "DEBUG| outer debug\n" + "DEBUG| inner debug\n" + "ERROR| outer error\n", + ) + + def test_is_same_output(self): + fd1 = sys.stderr.fileno() + fd2 = os.dup(fd1) + try: + self.assertTrue(ConfigureOutputHandler._is_same_output(fd1, fd2)) + finally: + os.close(fd2) + + fd2, path = tempfile.mkstemp() + try: + self.assertFalse(ConfigureOutputHandler._is_same_output(fd1, fd2)) + + fd3 = os.dup(fd2) + try: + self.assertTrue(ConfigureOutputHandler._is_same_output(fd2, fd3)) + finally: + os.close(fd3) + + with open(path, "a") as fh: + fd3 = fh.fileno() + self.assertTrue(ConfigureOutputHandler._is_same_output(fd2, fd3)) + + finally: + os.close(fd2) + os.remove(path) + + +class TestLineIO(unittest.TestCase): + def test_lineio(self): + lines = [] + l = LineIO(lambda l: lines.append(l)) + + l.write("a") + self.assertEqual(lines, []) + + l.write("b") + self.assertEqual(lines, []) + + l.write("\n") + self.assertEqual(lines, ["ab"]) + + l.write("cdef") + self.assertEqual(lines, ["ab"]) + + l.write("\n") + self.assertEqual(lines, ["ab", "cdef"]) + + l.write("ghi\njklm") + self.assertEqual(lines, ["ab", "cdef", "ghi"]) + + l.write("nop\nqrst\nuv\n") + self.assertEqual(lines, ["ab", "cdef", "ghi", "jklmnop", "qrst", "uv"]) + + l.write("wx\nyz") + self.assertEqual(lines, ["ab", "cdef", "ghi", "jklmnop", "qrst", "uv", "wx"]) + + l.close() + self.assertEqual( + lines, ["ab", "cdef", "ghi", "jklmnop", "qrst", "uv", "wx", "yz"] + ) + + def test_lineio_contextmanager(self): + lines = [] + with LineIO(lambda l: lines.append(l)) as l: + l.write("a\nb\nc") + + self.assertEqual(lines, ["a", "b"]) + + self.assertEqual(lines, ["a", "b", "c"]) + + +class TestLogSubprocessOutput(unittest.TestCase): + def test_non_ascii_subprocess_output(self): + out = StringIO() + sandbox = ConfigureSandbox({}, {}, ["configure"], out, out) + + sandbox.include_file( + mozpath.join(topsrcdir, "build", "moz.configure", "util.configure") + ) + sandbox.include_file( + mozpath.join( + topsrcdir, + "python", + "mozbuild", + "mozbuild", + "test", + "configure", + "data", + "subprocess.configure", + ) + ) + status = 0 + try: + sandbox.run() + except SystemExit as e: + status = e.code + + self.assertEqual(status, 0) + quote_char = "'" + if getpreferredencoding().lower() == "utf-8": + quote_char = "\u00B4" + self.assertEqual(six.ensure_text(out.getvalue().strip()), quote_char) + + +class TestVersion(unittest.TestCase): + def test_version_simple(self): + v = Version("1") + self.assertEqual(v, "1") + self.assertLess(v, "2") + self.assertGreater(v, "0.5") + self.assertEqual(v.major, 1) + self.assertEqual(v.minor, 0) + self.assertEqual(v.patch, 0) + + def test_version_more(self): + v = Version("1.2.3b") + self.assertLess(v, "2") + self.assertEqual(v.major, 1) + self.assertEqual(v.minor, 2) + self.assertEqual(v.patch, 3) + + def test_version_bad(self): + # A version with a letter in the middle doesn't really make sense, + # so everything after it should be ignored. + v = Version("1.2b.3") + self.assertLess(v, "2") + self.assertEqual(v.major, 1) + self.assertEqual(v.minor, 2) + self.assertEqual(v.patch, 0) + + def test_version_badder(self): + v = Version("1b.2.3") + self.assertLess(v, "2") + self.assertEqual(v.major, 1) + self.assertEqual(v.minor, 0) + self.assertEqual(v.patch, 0) + + +class TestCheckCmdOutput(unittest.TestCase): + def get_result(self, command="", paths=None): + paths = paths or {} + config = {} + out = StringIO() + sandbox = ConfigureTestSandbox(paths, config, {}, ["/bin/configure"], out, out) + sandbox.include_file( + mozpath.join(topsrcdir, "build", "moz.configure", "util.configure") + ) + status = 0 + try: + exec_(command, sandbox) + sandbox.run() + except SystemExit as e: + status = e.code + return config, out.getvalue(), status + + def test_simple_program(self): + def mock_simple_prog(_, args): + if len(args) == 1 and args[0] == "--help": + return 0, "simple program help...", "" + self.fail("Unexpected arguments to mock_simple_program: %s" % args) + + prog_path = mozpath.abspath("/simple/prog") + cmd = "log.info(check_cmd_output('%s', '--help'))" % prog_path + config, out, status = self.get_result(cmd, paths={prog_path: mock_simple_prog}) + self.assertEqual(config, {}) + self.assertEqual(status, 0) + self.assertEqual(out, "simple program help...\n") + + def test_failing_program(self): + def mock_error_prog(_, args): + if len(args) == 1 and args[0] == "--error": + return (127, "simple program output", "simple program error output") + self.fail("Unexpected arguments to mock_error_program: %s" % args) + + prog_path = mozpath.abspath("/simple/prog") + cmd = "log.info(check_cmd_output('%s', '--error'))" % prog_path + config, out, status = self.get_result(cmd, paths={prog_path: mock_error_prog}) + self.assertEqual(config, {}) + self.assertEqual(status, 1) + self.assertEqual( + out, + textwrap.dedent( + """\ + DEBUG: Executing: `%s --error` + DEBUG: The command returned non-zero exit status 127. + DEBUG: Its output was: + DEBUG: | simple program output + DEBUG: Its error output was: + DEBUG: | simple program error output + ERROR: Command `%s --error` failed with exit status 127. + """ + % (prog_path, prog_path) + ), + ) + + def test_error_callback(self): + def mock_error_prog(_, args): + if len(args) == 1 and args[0] == "--error": + return 127, "simple program error...", "" + self.fail("Unexpected arguments to mock_error_program: %s" % args) + + prog_path = mozpath.abspath("/simple/prog") + cmd = textwrap.dedent( + """\ + check_cmd_output('%s', '--error', + onerror=lambda: die('`prog` produced an error')) + """ + % prog_path + ) + config, out, status = self.get_result(cmd, paths={prog_path: mock_error_prog}) + self.assertEqual(config, {}) + self.assertEqual(status, 1) + self.assertEqual( + out, + textwrap.dedent( + """\ + DEBUG: Executing: `%s --error` + DEBUG: The command returned non-zero exit status 127. + DEBUG: Its output was: + DEBUG: | simple program error... + ERROR: `prog` produced an error + """ + % prog_path + ), + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/controller/__init__.py b/python/mozbuild/mozbuild/test/controller/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/controller/test_ccachestats.py b/python/mozbuild/mozbuild/test/controller/test_ccachestats.py new file mode 100644 index 0000000000..f1efa78c3a --- /dev/null +++ b/python/mozbuild/mozbuild/test/controller/test_ccachestats.py @@ -0,0 +1,866 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import time +import unittest + +from mozunit import main + +from mozbuild.controller.building import CCacheStats + +TIMESTAMP = time.time() +TIMESTAMP2 = time.time() + 10 +TIMESTAMP_STR = time.strftime("%c", time.localtime(TIMESTAMP)) +TIMESTAMP2_STR = time.strftime("%c", time.localtime(TIMESTAMP2)) + + +class TestCcacheStats(unittest.TestCase): + STAT_GARBAGE = """A garbage line which should be failed to parse""" + + STAT0 = """ + cache directory /home/tlin/.ccache + cache hit (direct) 0 + cache hit (preprocessed) 0 + cache miss 0 + files in cache 0 + cache size 0 Kbytes + max cache size 16.0 Gbytes""" + + STAT1 = """ + cache directory /home/tlin/.ccache + cache hit (direct) 100 + cache hit (preprocessed) 200 + cache miss 2500 + called for link 180 + called for preprocessing 6 + compile failed 11 + preprocessor error 3 + bad compiler arguments 6 + unsupported source language 9 + autoconf compile/link 60 + unsupported compiler option 2 + no input file 21 + files in cache 7344 + cache size 1.9 Gbytes + max cache size 16.0 Gbytes""" + + STAT2 = """ + cache directory /home/tlin/.ccache + cache hit (direct) 1900 + cache hit (preprocessed) 300 + cache miss 2600 + called for link 361 + called for preprocessing 12 + compile failed 22 + preprocessor error 6 + bad compiler arguments 12 + unsupported source language 18 + autoconf compile/link 120 + unsupported compiler option 4 + no input file 48 + files in cache 7392 + cache size 2.0 Gbytes + max cache size 16.0 Gbytes""" + + STAT3 = """ + cache directory /Users/tlin/.ccache + primary config /Users/tlin/.ccache/ccache.conf + secondary config (readonly) /usr/local/Cellar/ccache/3.2/etc/ccache.conf + cache hit (direct) 12004 + cache hit (preprocessed) 1786 + cache miss 26348 + called for link 2338 + called for preprocessing 6313 + compile failed 399 + preprocessor error 390 + bad compiler arguments 86 + unsupported source language 66 + autoconf compile/link 2439 + unsupported compiler option 187 + no input file 1068 + files in cache 18044 + cache size 7.5 GB + max cache size 8.6 GB + """ + + STAT4 = """ + cache directory /Users/tlin/.ccache + primary config /Users/tlin/.ccache/ccache.conf + secondary config (readonly) /usr/local/Cellar/ccache/3.2.1/etc/ccache.conf + cache hit (direct) 21039 + cache hit (preprocessed) 2315 + cache miss 39370 + called for link 3651 + called for preprocessing 6693 + compile failed 723 + ccache internal error 1 + preprocessor error 588 + bad compiler arguments 128 + unsupported source language 99 + autoconf compile/link 3669 + unsupported compiler option 187 + no input file 1711 + files in cache 18313 + cache size 6.3 GB + max cache size 6.0 GB + """ + + STAT5 = """ + cache directory /Users/tlin/.ccache + primary config /Users/tlin/.ccache/ccache.conf + secondary config (readonly) /usr/local/Cellar/ccache/3.2.1/etc/ccache.conf + cache hit (direct) 21039 + cache hit (preprocessed) 2315 + cache miss 39372 + called for link 3653 + called for preprocessing 6693 + compile failed 723 + ccache internal error 1 + preprocessor error 588 + bad compiler arguments 128 + unsupported source language 99 + autoconf compile/link 3669 + unsupported compiler option 187 + no input file 1711 + files in cache 17411 + cache size 6.0 GB + max cache size 6.0 GB + """ + + STAT6 = """ + cache directory /Users/tlin/.ccache + primary config /Users/tlin/.ccache/ccache.conf + secondary config (readonly) /usr/local/Cellar/ccache/3.3.2/etc/ccache.conf + cache hit (direct) 319287 + cache hit (preprocessed) 125987 + cache miss 749959 + cache hit rate 37.25 % + called for link 87978 + called for preprocessing 418591 + multiple source files 1861 + compiler produced no output 122 + compiler produced empty output 174 + compile failed 14330 + ccache internal error 1 + preprocessor error 9459 + can't use precompiled header 4 + bad compiler arguments 2077 + unsupported source language 18195 + autoconf compile/link 51485 + unsupported compiler option 322 + no input file 309538 + cleanups performed 1 + files in cache 17358 + cache size 15.4 GB + max cache size 17.2 GB + """ + + STAT7 = """ + cache directory /Users/tlin/.ccache + primary config /Users/tlin/.ccache/ccache.conf + secondary config (readonly) /usr/local/Cellar/ccache/3.3.3/etc/ccache.conf + cache hit (direct) 27035 + cache hit (preprocessed) 13939 + cache miss 62630 + cache hit rate 39.55 % + called for link 1280 + called for preprocessing 736 + compile failed 550 + preprocessor error 638 + bad compiler arguments 20 + autoconf compile/link 1751 + unsupported code directive 2 + no input file 2378 + cleanups performed 1792 + files in cache 3479 + cache size 4.4 GB + max cache size 5.0 GB + """ + + # Substitute a locally-generated timestamp because the timestamp format is + # locale-dependent. + STAT8 = f""" + cache directory /home/psimonyi/.ccache + primary config /home/psimonyi/.ccache/ccache.conf + secondary config (readonly) /etc/ccache.conf + stats zero time {TIMESTAMP_STR} + cache hit (direct) 571 + cache hit (preprocessed) 1203 + cache miss 11747 + cache hit rate 13.12 % + called for link 623 + called for preprocessing 7194 + compile failed 32 + preprocessor error 137 + bad compiler arguments 4 + autoconf compile/link 348 + no input file 162 + cleanups performed 77 + files in cache 13464 + cache size 6.2 GB + max cache size 7.0 GB + """ + + STAT9 = f""" + cache directory /Users/tlin/.ccache + primary config /Users/tlin/.ccache/ccache.conf + secondary config (readonly) /usr/local/Cellar/ccache/3.5/etc/ccache.conf + stats updated {TIMESTAMP2_STR} + stats zeroed {TIMESTAMP_STR} + cache hit (direct) 80147 + cache hit (preprocessed) 21413 + cache miss 191128 + cache hit rate 34.70 % + called for link 5194 + called for preprocessing 1721 + compile failed 825 + preprocessor error 3838 + cache file missing 4863 + bad compiler arguments 32 + autoconf compile/link 3554 + unsupported code directive 4 + no input file 5545 + cleanups performed 3154 + files in cache 18525 + cache size 13.4 GB + max cache size 15.0 GB + """ + + VERSION_3_5_GIT = """ + ccache version 3.5.1+2_gf5309092_dirty + + Copyright (C) 2002-2007 Andrew Tridgell + Copyright (C) 2009-2019 Joel Rosdahl + + This program is free software; you can redistribute it and/or modify it under + the terms of the GNU General Public License as published by the Free Software + Foundation; either version 3 of the License, or (at your option) any later + version. + """ + + VERSION_4_2 = """ + ccache version 4.2.1 + + Copyright (C) 2002-2007 Andrew Tridgell + Copyright (C) 2009-2021 Joel Rosdahl and other contributors + + See for a complete list of contributors. + + This program is free software; you can redistribute it and/or modify it under + the terms of the GNU General Public License as published by the Free Software + Foundation; either version 3 of the License, or (at your option) any later + version. + """ + + VERSION_4_4 = """ + ccache version 4.4 + Features: file-storage http-storage + + Copyright (C) 2002-2007 Andrew Tridgell + Copyright (C) 2009-2021 Joel Rosdahl and other contributors + + See for a complete list of contributors. + + This program is free software; you can redistribute it and/or modify it under + the terms of the GNU General Public License as published by the Free Software + Foundation; either version 3 of the License, or (at your option) any later + version. + """ + + VERSION_4_4_2 = """ + ccache version 4.4.2 + Features: file-storage http-storage + + Copyright (C) 2002-2007 Andrew Tridgell + Copyright (C) 2009-2021 Joel Rosdahl and other contributors + + See for a complete list of contributors. + + This program is free software; you can redistribute it and/or modify it under + the terms of the GNU General Public License as published by the Free Software + Foundation; either version 3 of the License, or (at your option) any later + version. + """ + + VERSION_4_5 = """ + ccache version 4.5.1 + Features: file-storage http-storage redis-storage + + Copyright (C) 2002-2007 Andrew Tridgell + Copyright (C) 2009-2021 Joel Rosdahl and other contributors + + See for a complete list of contributors. + + This program is free software; you can redistribute it and/or modify it under + the terms of the GNU General Public License as published by the Free Software + Foundation; either version 3 of the License, or (at your option) any later + version. + """ + + STAT10 = f"""\ +stats_updated_timestamp\t{int(TIMESTAMP)} +stats_zeroed_timestamp\t0 +direct_cache_hit\t197 +preprocessed_cache_hit\t719 +cache_miss\t8427 +called_for_link\t569 +called_for_preprocessing\t110 +multiple_source_files\t0 +compiler_produced_stdout\t0 +compiler_produced_no_output\t0 +compiler_produced_empty_output\t0 +compile_failed\t49 +internal_error\t1 +preprocessor_error\t90 +could_not_use_precompiled_header\t0 +could_not_use_modules\t0 +could_not_find_compiler\t0 +missing_cache_file\t1 +bad_compiler_arguments\t6 +unsupported_source_language\t0 +compiler_check_failed\t0 +autoconf_test\t418 +unsupported_compiler_option\t0 +unsupported_code_directive\t1 +output_to_stdout\t0 +bad_output_file\t0 +no_input_file\t9 +error_hashing_extra_file\t0 +cleanups_performed\t161 +files_in_cache\t4425 +cache_size_kibibyte\t4624220 +""" + + STAT11 = f"""\ +stats_updated_timestamp\t{int(TIMESTAMP)} +stats_zeroed_timestamp\t{int(TIMESTAMP2)} +direct_cache_hit\t0 +preprocessed_cache_hit\t0 +cache_miss\t0 +called_for_link\t0 +called_for_preprocessing\t0 +multiple_source_files\t0 +compiler_produced_stdout\t0 +compiler_produced_no_output\t0 +compiler_produced_empty_output\t0 +compile_failed\t0 +internal_error\t0 +preprocessor_error\t0 +could_not_use_precompiled_header\t0 +could_not_use_modules\t0 +could_not_find_compiler\t0 +missing_cache_file\t0 +bad_compiler_arguments\t0 +unsupported_source_language\t0 +compiler_check_failed\t0 +autoconf_test\t0 +unsupported_compiler_option\t0 +unsupported_code_directive\t0 +output_to_stdout\t0 +bad_output_file\t0 +no_input_file\t0 +error_hashing_extra_file\t0 +cleanups_performed\t16 +files_in_cache\t0 +cache_size_kibibyte\t0 +""" + + STAT12 = """\ +stats_updated_timestamp\t0 +stats_zeroed_timestamp\t0 +direct_cache_hit\t0 +preprocessed_cache_hit\t0 +cache_miss\t0 +called_for_link\t0 +called_for_preprocessing\t0 +multiple_source_files\t0 +compiler_produced_stdout\t0 +compiler_produced_no_output\t0 +compiler_produced_empty_output\t0 +compile_failed\t0 +internal_error\t0 +preprocessor_error\t0 +could_not_use_precompiled_header\t0 +could_not_use_modules\t0 +could_not_find_compiler\t0 +missing_cache_file\t0 +bad_compiler_arguments\t0 +unsupported_source_language\t0 +compiler_check_failed\t0 +autoconf_test\t0 +unsupported_compiler_option\t0 +unsupported_code_directive\t0 +output_to_stdout\t0 +bad_output_file\t0 +no_input_file\t0 +error_hashing_extra_file\t0 +cleanups_performed\t16 +files_in_cache\t0 +cache_size_kibibyte\t0 +""" + + STAT13 = f"""\ +stats_updated_timestamp\t{int(TIMESTAMP)} +stats_zeroed_timestamp\t{int(TIMESTAMP2)} +direct_cache_hit\t280542 +preprocessed_cache_hit\t0 +cache_miss\t387653 +called_for_link\t0 +called_for_preprocessing\t0 +multiple_source_files\t0 +compiler_produced_stdout\t0 +compiler_produced_no_output\t0 +compiler_produced_empty_output\t0 +compile_failed\t1665 +internal_error\t1 +preprocessor_error\t0 +could_not_use_precompiled_header\t0 +could_not_use_modules\t0 +could_not_find_compiler\t0 +missing_cache_file\t0 +bad_compiler_arguments\t0 +unsupported_source_language\t0 +compiler_check_failed\t0 +autoconf_test\t0 +unsupported_compiler_option\t0 +unsupported_code_directive\t0 +output_to_stdout\t0 +bad_output_file\t0 +no_input_file\t2 +error_hashing_extra_file\t0 +cleanups_performed\t364 +files_in_cache\t335104 +cache_size_kibibyte\t18224250 +""" + + maxDiff = None + + def test_parse_garbage_stats_message(self): + self.assertRaises(ValueError, CCacheStats, self.STAT_GARBAGE) + + def test_parse_zero_stats_message(self): + stats = CCacheStats(self.STAT0) + self.assertEqual(stats.hit_rates(), (0, 0, 0)) + + def test_hit_rate_of_diff_stats(self): + stats1 = CCacheStats(self.STAT1) + stats2 = CCacheStats(self.STAT2) + stats_diff = stats2 - stats1 + self.assertEqual(stats_diff.hit_rates(), (0.9, 0.05, 0.05)) + + def test_stats_contains_data(self): + stats0 = CCacheStats(self.STAT0) + stats1 = CCacheStats(self.STAT1) + stats2 = CCacheStats(self.STAT2) + stats_diff_zero = stats1 - stats1 + stats_diff_negative1 = stats0 - stats1 + stats_diff_negative2 = stats1 - stats2 + + self.assertFalse(stats0) + self.assertTrue(stats1) + self.assertTrue(stats2) + self.assertFalse(stats_diff_zero) + self.assertFalse(stats_diff_negative1) + self.assertFalse(stats_diff_negative2) + + def test_stats_version32(self): + stat2 = CCacheStats(self.STAT2) + stat3 = CCacheStats(self.STAT3) + stats_diff = stat3 - stat2 + self.assertEqual( + str(stat3), + "cache hit (direct) 12004\n" + "cache hit (preprocessed) 1786\n" + "cache miss 26348\n" + "called for link 2338\n" + "called for preprocessing 6313\n" + "compile failed 399\n" + "preprocessor error 390\n" + "bad compiler arguments 86\n" + "unsupported source language 66\n" + "autoconf compile/link 2439\n" + "unsupported compiler option 187\n" + "no input file 1068\n" + "files in cache 18044\n" + "cache size 7.5 Gbytes\n" + "max cache size 8.6 Gbytes", + ) + self.assertEqual( + str(stats_diff), + "cache hit (direct) 10104\n" + "cache hit (preprocessed) 1486\n" + "cache miss 23748\n" + "called for link 1977\n" + "called for preprocessing 6301\n" + "compile failed 377\n" + "preprocessor error 384\n" + "bad compiler arguments 74\n" + "unsupported source language 48\n" + "autoconf compile/link 2319\n" + "unsupported compiler option 183\n" + "no input file 1020\n" + "files in cache 18044\n" + "cache size 7.5 Gbytes\n" + "max cache size 8.6 Gbytes", + ) + + def test_cache_size_shrinking(self): + stat4 = CCacheStats(self.STAT4) + stat5 = CCacheStats(self.STAT5) + stats_diff = stat5 - stat4 + self.assertEqual( + str(stat4), + "cache hit (direct) 21039\n" + "cache hit (preprocessed) 2315\n" + "cache miss 39370\n" + "called for link 3651\n" + "called for preprocessing 6693\n" + "compile failed 723\n" + "ccache internal error 1\n" + "preprocessor error 588\n" + "bad compiler arguments 128\n" + "unsupported source language 99\n" + "autoconf compile/link 3669\n" + "unsupported compiler option 187\n" + "no input file 1711\n" + "files in cache 18313\n" + "cache size 6.3 Gbytes\n" + "max cache size 6.0 Gbytes", + ) + self.assertEqual( + str(stat5), + "cache hit (direct) 21039\n" + "cache hit (preprocessed) 2315\n" + "cache miss 39372\n" + "called for link 3653\n" + "called for preprocessing 6693\n" + "compile failed 723\n" + "ccache internal error 1\n" + "preprocessor error 588\n" + "bad compiler arguments 128\n" + "unsupported source language 99\n" + "autoconf compile/link 3669\n" + "unsupported compiler option 187\n" + "no input file 1711\n" + "files in cache 17411\n" + "cache size 6.0 Gbytes\n" + "max cache size 6.0 Gbytes", + ) + self.assertEqual( + str(stats_diff), + "cache hit (direct) 0\n" + "cache hit (preprocessed) 0\n" + "cache miss 2\n" + "called for link 2\n" + "called for preprocessing 0\n" + "compile failed 0\n" + "ccache internal error 0\n" + "preprocessor error 0\n" + "bad compiler arguments 0\n" + "unsupported source language 0\n" + "autoconf compile/link 0\n" + "unsupported compiler option 0\n" + "no input file 0\n" + "files in cache 17411\n" + "cache size 6.0 Gbytes\n" + "max cache size 6.0 Gbytes", + ) + + def test_stats_version33(self): + # Test stats for 3.3.2. + stat3 = CCacheStats(self.STAT3) + stat6 = CCacheStats(self.STAT6) + stats_diff = stat6 - stat3 + self.assertEqual( + str(stat6), + "cache hit (direct) 319287\n" + "cache hit (preprocessed) 125987\n" + "cache hit rate 37\n" + "cache miss 749959\n" + "called for link 87978\n" + "called for preprocessing 418591\n" + "multiple source files 1861\n" + "compiler produced no output 122\n" + "compiler produced empty output 174\n" + "compile failed 14330\n" + "ccache internal error 1\n" + "preprocessor error 9459\n" + "can't use precompiled header 4\n" + "bad compiler arguments 2077\n" + "unsupported source language 18195\n" + "autoconf compile/link 51485\n" + "unsupported compiler option 322\n" + "no input file 309538\n" + "cleanups performed 1\n" + "files in cache 17358\n" + "cache size 15.4 Gbytes\n" + "max cache size 17.2 Gbytes", + ) + self.assertEqual( + str(stat3), + "cache hit (direct) 12004\n" + "cache hit (preprocessed) 1786\n" + "cache miss 26348\n" + "called for link 2338\n" + "called for preprocessing 6313\n" + "compile failed 399\n" + "preprocessor error 390\n" + "bad compiler arguments 86\n" + "unsupported source language 66\n" + "autoconf compile/link 2439\n" + "unsupported compiler option 187\n" + "no input file 1068\n" + "files in cache 18044\n" + "cache size 7.5 Gbytes\n" + "max cache size 8.6 Gbytes", + ) + self.assertEqual( + str(stats_diff), + "cache hit (direct) 307283\n" + "cache hit (preprocessed) 124201\n" + "cache hit rate 37\n" + "cache miss 723611\n" + "called for link 85640\n" + "called for preprocessing 412278\n" + "multiple source files 1861\n" + "compiler produced no output 122\n" + "compiler produced empty output 174\n" + "compile failed 13931\n" + "ccache internal error 1\n" + "preprocessor error 9069\n" + "can't use precompiled header 4\n" + "bad compiler arguments 1991\n" + "unsupported source language 18129\n" + "autoconf compile/link 49046\n" + "unsupported compiler option 135\n" + "no input file 308470\n" + "cleanups performed 1\n" + "files in cache 17358\n" + "cache size 15.4 Gbytes\n" + "max cache size 17.2 Gbytes", + ) + + # Test stats for 3.3.3. + stat7 = CCacheStats(self.STAT7) + self.assertEqual( + str(stat7), + "cache hit (direct) 27035\n" + "cache hit (preprocessed) 13939\n" + "cache hit rate 39\n" + "cache miss 62630\n" + "called for link 1280\n" + "called for preprocessing 736\n" + "compile failed 550\n" + "preprocessor error 638\n" + "bad compiler arguments 20\n" + "autoconf compile/link 1751\n" + "unsupported code directive 2\n" + "no input file 2378\n" + "cleanups performed 1792\n" + "files in cache 3479\n" + "cache size 4.4 Gbytes\n" + "max cache size 5.0 Gbytes", + ) + + def test_stats_version34(self): + # Test parsing 3.4 output. + stat8 = CCacheStats(self.STAT8) + self.assertEqual( + str(stat8), + f"stats zeroed {int(TIMESTAMP)}\n" + "cache hit (direct) 571\n" + "cache hit (preprocessed) 1203\n" + "cache hit rate 13\n" + "cache miss 11747\n" + "called for link 623\n" + "called for preprocessing 7194\n" + "compile failed 32\n" + "preprocessor error 137\n" + "bad compiler arguments 4\n" + "autoconf compile/link 348\n" + "no input file 162\n" + "cleanups performed 77\n" + "files in cache 13464\n" + "cache size 6.2 Gbytes\n" + "max cache size 7.0 Gbytes", + ) + + def test_stats_version35(self): + # Test parsing 3.5 output. + stat9 = CCacheStats(self.STAT9) + self.assertEqual( + str(stat9), + f"stats zeroed {int(TIMESTAMP)}\n" + f"stats updated {int(TIMESTAMP2)}\n" + "cache hit (direct) 80147\n" + "cache hit (preprocessed) 21413\n" + "cache hit rate 34\n" + "cache miss 191128\n" + "called for link 5194\n" + "called for preprocessing 1721\n" + "compile failed 825\n" + "preprocessor error 3838\n" + "cache file missing 4863\n" + "bad compiler arguments 32\n" + "autoconf compile/link 3554\n" + "unsupported code directive 4\n" + "no input file 5545\n" + "cleanups performed 3154\n" + "files in cache 18525\n" + "cache size 13.4 Gbytes\n" + "max cache size 15.0 Gbytes", + ) + + def test_stats_version37(self): + # verify version checks + self.assertFalse(CCacheStats._is_version_3_7_or_newer(self.VERSION_3_5_GIT)) + self.assertTrue(CCacheStats._is_version_3_7_or_newer(self.VERSION_4_2)) + self.assertTrue(CCacheStats._is_version_3_7_or_newer(self.VERSION_4_4)) + self.assertTrue(CCacheStats._is_version_3_7_or_newer(self.VERSION_4_4_2)) + self.assertTrue(CCacheStats._is_version_3_7_or_newer(self.VERSION_4_5)) + + # Test parsing 3.7+ output. + stat10 = CCacheStats(self.STAT10, True) + self.assertEqual( + str(stat10), + "stats zeroed 0\n" + f"stats updated {int(TIMESTAMP)}\n" + "cache hit (direct) 197\n" + "cache hit (preprocessed) 719\n" + "cache hit rate 9\n" + "cache miss 8427\n" + "called for link 569\n" + "called for preprocessing 110\n" + "multiple source files 0\n" + "compiler produced stdout 0\n" + "compiler produced no output 0\n" + "compiler produced empty output 0\n" + "compile failed 49\n" + "ccache internal error 1\n" + "preprocessor error 90\n" + "can't use precompiled header 0\n" + "couldn't find the compiler 0\n" + "cache file missing 1\n" + "bad compiler arguments 6\n" + "unsupported source language 0\n" + "compiler check failed 0\n" + "autoconf compile/link 418\n" + "unsupported code directive 1\n" + "unsupported compiler option 0\n" + "output to stdout 0\n" + "no input file 9\n" + "error hashing extra file 0\n" + "cleanups performed 161\n" + "files in cache 4425\n" + "cache size 4.4 Gbytes", + ) + + stat11 = CCacheStats(self.STAT11, True) + self.assertEqual( + str(stat11), + f"stats zeroed {int(TIMESTAMP2)}\n" + f"stats updated {int(TIMESTAMP)}\n" + "cache hit (direct) 0\n" + "cache hit (preprocessed) 0\n" + "cache hit rate 0\n" + "cache miss 0\n" + "called for link 0\n" + "called for preprocessing 0\n" + "multiple source files 0\n" + "compiler produced stdout 0\n" + "compiler produced no output 0\n" + "compiler produced empty output 0\n" + "compile failed 0\n" + "ccache internal error 0\n" + "preprocessor error 0\n" + "can't use precompiled header 0\n" + "couldn't find the compiler 0\n" + "cache file missing 0\n" + "bad compiler arguments 0\n" + "unsupported source language 0\n" + "compiler check failed 0\n" + "autoconf compile/link 0\n" + "unsupported code directive 0\n" + "unsupported compiler option 0\n" + "output to stdout 0\n" + "no input file 0\n" + "error hashing extra file 0\n" + "cleanups performed 16\n" + "files in cache 0\n" + "cache size 0.0 Kbytes", + ) + + stat12 = CCacheStats(self.STAT12, True) + self.assertEqual( + str(stat12), + "stats zeroed 0\n" + "stats updated 0\n" + "cache hit (direct) 0\n" + "cache hit (preprocessed) 0\n" + "cache hit rate 0\n" + "cache miss 0\n" + "called for link 0\n" + "called for preprocessing 0\n" + "multiple source files 0\n" + "compiler produced stdout 0\n" + "compiler produced no output 0\n" + "compiler produced empty output 0\n" + "compile failed 0\n" + "ccache internal error 0\n" + "preprocessor error 0\n" + "can't use precompiled header 0\n" + "couldn't find the compiler 0\n" + "cache file missing 0\n" + "bad compiler arguments 0\n" + "unsupported source language 0\n" + "compiler check failed 0\n" + "autoconf compile/link 0\n" + "unsupported code directive 0\n" + "unsupported compiler option 0\n" + "output to stdout 0\n" + "no input file 0\n" + "error hashing extra file 0\n" + "cleanups performed 16\n" + "files in cache 0\n" + "cache size 0.0 Kbytes", + ) + + stat13 = CCacheStats(self.STAT13, True) + self.assertEqual( + str(stat13), + f"stats zeroed {int(TIMESTAMP2)}\n" + f"stats updated {int(TIMESTAMP)}\n" + "cache hit (direct) 280542\n" + "cache hit (preprocessed) 0\n" + "cache hit rate 41\n" + "cache miss 387653\n" + "called for link 0\n" + "called for preprocessing 0\n" + "multiple source files 0\n" + "compiler produced stdout 0\n" + "compiler produced no output 0\n" + "compiler produced empty output 0\n" + "compile failed 1665\n" + "ccache internal error 1\n" + "preprocessor error 0\n" + "can't use precompiled header 0\n" + "couldn't find the compiler 0\n" + "cache file missing 0\n" + "bad compiler arguments 0\n" + "unsupported source language 0\n" + "compiler check failed 0\n" + "autoconf compile/link 0\n" + "unsupported code directive 0\n" + "unsupported compiler option 0\n" + "output to stdout 0\n" + "no input file 2\n" + "error hashing extra file 0\n" + "cleanups performed 364\n" + "files in cache 335104\n" + "cache size 17.4 Gbytes", + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/controller/test_clobber.py b/python/mozbuild/mozbuild/test/controller/test_clobber.py new file mode 100644 index 0000000000..fff3c5a438 --- /dev/null +++ b/python/mozbuild/mozbuild/test/controller/test_clobber.py @@ -0,0 +1,214 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import shutil +import tempfile +import unittest + +from mozunit import main + +from mozbuild.base import MozbuildObject +from mozbuild.controller.building import BuildDriver +from mozbuild.controller.clobber import Clobberer +from mozbuild.test.common import prepare_tmp_topsrcdir + + +class TestClobberer(unittest.TestCase): + def setUp(self): + self._temp_dirs = [] + self._old_env = dict(os.environ) + os.environ.pop("MOZCONFIG", None) + os.environ.pop("MOZ_OBJDIR", None) + + return unittest.TestCase.setUp(self) + + def tearDown(self): + os.environ.clear() + os.environ.update(self._old_env) + + for d in self._temp_dirs: + shutil.rmtree(d, ignore_errors=True) + + return unittest.TestCase.tearDown(self) + + def get_tempdir(self): + t = tempfile.mkdtemp() + self._temp_dirs.append(t) + return t + + def get_topsrcdir(self): + t = self.get_tempdir() + prepare_tmp_topsrcdir(t) + p = os.path.join(t, "CLOBBER") + with open(p, "a"): + pass + + return t + + def test_no_objdir(self): + """If topobjdir does not exist, no clobber is needed.""" + + tmp = os.path.join(self.get_tempdir(), "topobjdir") + self.assertFalse(os.path.exists(tmp)) + + c = Clobberer(self.get_topsrcdir(), tmp) + self.assertFalse(c.clobber_needed()) + + required, performed, reason = c.maybe_do_clobber(os.getcwd(), True) + self.assertFalse(required) + self.assertFalse(performed) + self.assertIsNone(reason) + + self.assertFalse(os.path.isdir(tmp)) + self.assertFalse(os.path.exists(os.path.join(tmp, "CLOBBER"))) + + def test_objdir_no_clobber_file(self): + """If CLOBBER does not exist in topobjdir, treat as empty.""" + + c = Clobberer(self.get_topsrcdir(), self.get_tempdir()) + self.assertFalse(c.clobber_needed()) + + required, performed, reason = c.maybe_do_clobber(os.getcwd(), True) + self.assertFalse(required) + self.assertFalse(performed) + self.assertIsNone(reason) + + self.assertFalse(os.path.exists(os.path.join(c.topobjdir, "CLOBBER"))) + + def test_objdir_clobber_newer(self): + """If CLOBBER in topobjdir is newer, do nothing.""" + + c = Clobberer(self.get_topsrcdir(), self.get_tempdir()) + with open(c.obj_clobber, "a"): + pass + + required, performed, reason = c.maybe_do_clobber(os.getcwd(), True) + self.assertFalse(required) + self.assertFalse(performed) + self.assertIsNone(reason) + + def test_objdir_clobber_older(self): + """If CLOBBER in topobjdir is older, we clobber.""" + + c = Clobberer(self.get_topsrcdir(), self.get_tempdir()) + with open(c.obj_clobber, "a"): + pass + + dummy_path = os.path.join(c.topobjdir, "foo") + with open(dummy_path, "a"): + pass + + self.assertTrue(os.path.exists(dummy_path)) + + old_time = os.path.getmtime(c.src_clobber) - 60 + os.utime(c.obj_clobber, (old_time, old_time)) + + self.assertTrue(c.clobber_needed()) + + required, performed, reason = c.maybe_do_clobber(os.getcwd(), True) + self.assertTrue(required) + self.assertTrue(performed) + + self.assertFalse(os.path.exists(dummy_path)) + self.assertFalse(os.path.exists(c.obj_clobber)) + + def test_objdir_is_srcdir(self): + """If topobjdir is the topsrcdir, refuse to clobber.""" + + tmp = self.get_topsrcdir() + c = Clobberer(tmp, tmp) + + self.assertFalse(c.clobber_needed()) + + def test_cwd_is_topobjdir(self): + """If cwd is topobjdir, we can still clobber.""" + c = Clobberer(self.get_topsrcdir(), self.get_tempdir()) + + with open(c.obj_clobber, "a"): + pass + + dummy_file = os.path.join(c.topobjdir, "dummy_file") + with open(dummy_file, "a"): + pass + + dummy_dir = os.path.join(c.topobjdir, "dummy_dir") + os.mkdir(dummy_dir) + + self.assertTrue(os.path.exists(dummy_file)) + self.assertTrue(os.path.isdir(dummy_dir)) + + old_time = os.path.getmtime(c.src_clobber) - 60 + os.utime(c.obj_clobber, (old_time, old_time)) + + self.assertTrue(c.clobber_needed()) + + required, performed, reason = c.maybe_do_clobber(c.topobjdir, True) + self.assertTrue(required) + self.assertTrue(performed) + + self.assertFalse(os.path.exists(dummy_file)) + self.assertFalse(os.path.exists(dummy_dir)) + + def test_cwd_under_topobjdir(self): + """If cwd is under topobjdir, we can't clobber.""" + + c = Clobberer(self.get_topsrcdir(), self.get_tempdir()) + + with open(c.obj_clobber, "a"): + pass + + old_time = os.path.getmtime(c.src_clobber) - 60 + os.utime(c.obj_clobber, (old_time, old_time)) + + d = os.path.join(c.topobjdir, "dummy_dir") + os.mkdir(d) + + required, performed, reason = c.maybe_do_clobber(d, True) + self.assertTrue(required) + self.assertFalse(performed) + self.assertIn("Cannot clobber while the shell is inside", reason) + + def test_mozconfig_opt_in(self): + """Auto clobber iff AUTOCLOBBER is in the environment.""" + + topsrcdir = self.get_topsrcdir() + topobjdir = self.get_tempdir() + + obj_clobber = os.path.join(topobjdir, "CLOBBER") + with open(obj_clobber, "a"): + pass + + dummy_file = os.path.join(topobjdir, "dummy_file") + with open(dummy_file, "a"): + pass + + self.assertTrue(os.path.exists(dummy_file)) + + old_time = os.path.getmtime(os.path.join(topsrcdir, "CLOBBER")) - 60 + os.utime(obj_clobber, (old_time, old_time)) + + # Check auto clobber is off by default + env = dict(os.environ) + if env.get("AUTOCLOBBER", False): + del env["AUTOCLOBBER"] + + mbo = MozbuildObject(topsrcdir, None, None, topobjdir) + build = mbo._spawn(BuildDriver) + + status = build._check_clobber(build.mozconfig, env) + + self.assertEqual(status, True) + self.assertTrue(os.path.exists(dummy_file)) + + # Check auto clobber opt-in works + env["AUTOCLOBBER"] = "1" + + status = build._check_clobber(build.mozconfig, env) + self.assertFalse(status) + self.assertFalse(os.path.exists(dummy_file)) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/data/Makefile b/python/mozbuild/mozbuild/test/data/Makefile new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/data/bad.properties b/python/mozbuild/mozbuild/test/data/bad.properties new file mode 100644 index 0000000000..d4d8109b69 --- /dev/null +++ b/python/mozbuild/mozbuild/test/data/bad.properties @@ -0,0 +1,12 @@ +# A region.properties file with invalid unicode byte sequences. The +# sequences were cribbed from Markus Kuhn's "UTF-8 decoder capability +# and stress test", available at +# http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt + +# 3.5 Impossible bytes | +# | +# The following two bytes cannot appear in a correct UTF-8 string | +# | +# 3.5.1 fe = "þ" | +# 3.5.2 ff = "ÿ" | +# 3.5.3 fe fe ff ff = "þþÿÿ" | diff --git a/python/mozbuild/mozbuild/test/data/test-dir/Makefile b/python/mozbuild/mozbuild/test/data/test-dir/Makefile new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/data/test-dir/with/Makefile b/python/mozbuild/mozbuild/test/data/test-dir/with/Makefile new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/data/test-dir/with/without/with/Makefile b/python/mozbuild/mozbuild/test/data/test-dir/with/without/with/Makefile new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/data/test-dir/without/with/Makefile b/python/mozbuild/mozbuild/test/data/test-dir/without/with/Makefile new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/data/valid.properties b/python/mozbuild/mozbuild/test/data/valid.properties new file mode 100644 index 0000000000..db64bf2eed --- /dev/null +++ b/python/mozbuild/mozbuild/test/data/valid.properties @@ -0,0 +1,11 @@ +# A region.properties file with unicode characters. + +# Danish. +# #### ~~ Søren Munk Skrøder, sskroeder - 2009-05-30 @ #mozmae + +# Korean. +A.title=í•œë©”ì¼ + +# Russian. +list.0 = test +list.1 = Ð¯Ð½Ð´ÐµÐºÑ diff --git a/python/mozbuild/mozbuild/test/frontend/__init__.py b/python/mozbuild/mozbuild/test/frontend/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/allow-compiler-warnings/moz.build b/python/mozbuild/mozbuild/test/frontend/data/allow-compiler-warnings/moz.build new file mode 100644 index 0000000000..0bf5b55ecb --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/allow-compiler-warnings/moz.build @@ -0,0 +1,20 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def AllowCompilerWarnings(): + COMPILE_FLAGS["WARNINGS_AS_ERRORS"] = [] + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + +UNIFIED_SOURCES += ["test1.c"] + +AllowCompilerWarnings() diff --git a/python/mozbuild/mozbuild/test/frontend/data/allow-compiler-warnings/test1.c b/python/mozbuild/mozbuild/test/frontend/data/allow-compiler-warnings/test1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/asflags/moz.build b/python/mozbuild/mozbuild/test/frontend/data/asflags/moz.build new file mode 100644 index 0000000000..80f48a7d81 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/asflags/moz.build @@ -0,0 +1,15 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + +SOURCES += ["test1.c", "test2.S"] + +ASFLAGS += ["-no-integrated-as"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/asflags/test1.c b/python/mozbuild/mozbuild/test/frontend/data/asflags/test1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/asflags/test2.S b/python/mozbuild/mozbuild/test/frontend/data/asflags/test2.S new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/branding-files/bar.ico b/python/mozbuild/mozbuild/test/frontend/data/branding-files/bar.ico new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/branding-files/baz.png b/python/mozbuild/mozbuild/test/frontend/data/branding-files/baz.png new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/branding-files/foo.xpm b/python/mozbuild/mozbuild/test/frontend/data/branding-files/foo.xpm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/branding-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/branding-files/moz.build new file mode 100644 index 0000000000..65f22d578b --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/branding-files/moz.build @@ -0,0 +1,12 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +BRANDING_FILES += [ + "bar.ico", + "baz.png", + "foo.xpm", +] + +BRANDING_FILES.icons += [ + "quux.icns", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/branding-files/quux.icns b/python/mozbuild/mozbuild/test/frontend/data/branding-files/quux.icns new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-defines/moz.build b/python/mozbuild/mozbuild/test/frontend/data/compile-defines/moz.build new file mode 100644 index 0000000000..65d71dae2b --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/compile-defines/moz.build @@ -0,0 +1,16 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + +UNIFIED_SOURCES += ["test1.c"] + +DEFINES["MOZ_TEST_DEFINE"] = True +LIBRARY_DEFINES["MOZ_LIBRARY_DEFINE"] = "MOZ_TEST" diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-defines/test1.c b/python/mozbuild/mozbuild/test/frontend/data/compile-defines/test1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-flags-field-validation/moz.build b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-field-validation/moz.build new file mode 100644 index 0000000000..70622bc4e1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-field-validation/moz.build @@ -0,0 +1,15 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + +COMPILE_FLAGS["STL_FLAGS"] = [] + +UNIFIED_SOURCES += ["test1.c"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-flags-field-validation/test1.c b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-field-validation/test1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-flags-templates/moz.build b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-templates/moz.build new file mode 100644 index 0000000000..6e611fc598 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-templates/moz.build @@ -0,0 +1,27 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + + +@template +def DisableStlWrapping(): + COMPILE_FLAGS["STL"] = [] + + +@template +def NoVisibilityFlags(): + COMPILE_FLAGS["VISIBILITY"] = [] + + +UNIFIED_SOURCES += ["test1.c"] + +DisableStlWrapping() +NoVisibilityFlags() diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-flags-templates/test1.c b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-templates/test1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-flags-type-validation/moz.build b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-type-validation/moz.build new file mode 100644 index 0000000000..31094736a7 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-type-validation/moz.build @@ -0,0 +1,15 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + +COMPILE_FLAGS["STL"] = [None, 123] + +UNIFIED_SOURCES += ["test1.c"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-flags-type-validation/test1.c b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-type-validation/test1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-flags/moz.build b/python/mozbuild/mozbuild/test/frontend/data/compile-flags/moz.build new file mode 100644 index 0000000000..0e6f75cfa1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/compile-flags/moz.build @@ -0,0 +1,22 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + + +@template +def DisableStlWrapping(): + COMPILE_FLAGS["STL"] = [] + + +UNIFIED_SOURCES += ["test1.c"] + +CXXFLAGS += ["-funroll-loops", "-Wall"] +CFLAGS += ["-Wall", "-funroll-loops"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-flags/test1.c b/python/mozbuild/mozbuild/test/frontend/data/compile-flags/test1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-includes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/compile-includes/moz.build new file mode 100644 index 0000000000..10c28e2833 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/compile-includes/moz.build @@ -0,0 +1,15 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + +UNIFIED_SOURCES += ["test1.c"] + +LOCAL_INCLUDES += ["subdir"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-includes/subdir/header.h b/python/mozbuild/mozbuild/test/frontend/data/compile-includes/subdir/header.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-includes/test1.c b/python/mozbuild/mozbuild/test/frontend/data/compile-includes/test1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/config-file-substitution/moz.build b/python/mozbuild/mozbuild/test/frontend/data/config-file-substitution/moz.build new file mode 100644 index 0000000000..f42dc0a517 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/config-file-substitution/moz.build @@ -0,0 +1,6 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +CONFIGURE_SUBST_FILES += ["foo"] +CONFIGURE_SUBST_FILES += ["bar"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/Cargo.toml new file mode 100644 index 0000000000..b080d53b5a --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "random-crate" +version = "0.1.0" +authors = [ + "The Mozilla Project Developers", +] + +[lib] +crate-type = ["staticlib"] + +[dependencies] +deep-crate = { version = "0.1.0", path = "the/depths" } + +[profile.dev] +panic = "abort" + +[profile.release] +panic = "abort" diff --git a/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/moz.build b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/moz.build new file mode 100644 index 0000000000..de1967c519 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/moz.build @@ -0,0 +1,19 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +@template +def RustLibrary(name): + """Template for Rust libraries.""" + Library(name) + + IS_RUST_LIBRARY = True + + +RustLibrary("random-crate") diff --git a/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/shallow/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/shallow/Cargo.toml new file mode 100644 index 0000000000..e918f9228d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/shallow/Cargo.toml @@ -0,0 +1,6 @@ +[package] +name = "shallow-crate" +version = "0.1.0" +authors = [ + "The Mozilla Project Developers", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/the/depths/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/the/depths/Cargo.toml new file mode 100644 index 0000000000..cebcb38ab7 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/the/depths/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "deep-crate" +version = "0.1.0" +authors = [ + "The Mozilla Project Developers", +] + +[dependencies] +shallow-crate = { path = "../../shallow" } diff --git a/python/mozbuild/mozbuild/test/frontend/data/defines/moz.build b/python/mozbuild/mozbuild/test/frontend/data/defines/moz.build new file mode 100644 index 0000000000..6085619c58 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/defines/moz.build @@ -0,0 +1,9 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +value = "xyz" +DEFINES["FOO"] = True +DEFINES["BAZ"] = '"abcd"' +DEFINES["BAR"] = 7 +DEFINES["VALUE"] = value +DEFINES["QUX"] = False diff --git a/python/mozbuild/mozbuild/test/frontend/data/disable-compiler-warnings/moz.build b/python/mozbuild/mozbuild/test/frontend/data/disable-compiler-warnings/moz.build new file mode 100644 index 0000000000..064fa09893 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/disable-compiler-warnings/moz.build @@ -0,0 +1,20 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def DisableCompilerWarnings(): + COMPILE_FLAGS["WARNINGS_CFLAGS"] = [] + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + +UNIFIED_SOURCES += ["test1.c"] + +DisableCompilerWarnings() diff --git a/python/mozbuild/mozbuild/test/frontend/data/disable-compiler-warnings/test1.c b/python/mozbuild/mozbuild/test/frontend/data/disable-compiler-warnings/test1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/disable-stl-wrapping/moz.build b/python/mozbuild/mozbuild/test/frontend/data/disable-stl-wrapping/moz.build new file mode 100644 index 0000000000..40cb3e7781 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/disable-stl-wrapping/moz.build @@ -0,0 +1,21 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + + +@template +def DisableStlWrapping(): + COMPILE_FLAGS["STL"] = [] + + +UNIFIED_SOURCES += ["test1.c"] + +DisableStlWrapping() diff --git a/python/mozbuild/mozbuild/test/frontend/data/disable-stl-wrapping/test1.c b/python/mozbuild/mozbuild/test/frontend/data/disable-stl-wrapping/test1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/install.rdf b/python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/install.rdf new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/moz.build b/python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/moz.build new file mode 100644 index 0000000000..25961f149f --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/moz.build @@ -0,0 +1,8 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +FINAL_TARGET_PP_FILES += [ + "install.rdf", + "main.js", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/dist-files/install.rdf b/python/mozbuild/mozbuild/test/frontend/data/dist-files/install.rdf new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/dist-files/main.js b/python/mozbuild/mozbuild/test/frontend/data/dist-files/main.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/dist-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/dist-files/moz.build new file mode 100644 index 0000000000..25961f149f --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/dist-files/moz.build @@ -0,0 +1,8 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +FINAL_TARGET_PP_FILES += [ + "install.rdf", + "main.js", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-generated/foo.h b/python/mozbuild/mozbuild/test/frontend/data/exports-generated/foo.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-generated/moz.build b/python/mozbuild/mozbuild/test/frontend/data/exports-generated/moz.build new file mode 100644 index 0000000000..bd3507c97b --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/exports-generated/moz.build @@ -0,0 +1,8 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +EXPORTS += ["foo.h"] +EXPORTS.mozilla += ["mozilla1.h"] +EXPORTS.mozilla += ["!mozilla2.h"] + +GENERATED_FILES += ["mozilla2.h"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-generated/mozilla1.h b/python/mozbuild/mozbuild/test/frontend/data/exports-generated/mozilla1.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/foo.h b/python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/foo.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/moz.build b/python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/moz.build new file mode 100644 index 0000000000..d81109d37d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/moz.build @@ -0,0 +1,5 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +EXPORTS += ["foo.h"] +EXPORTS += ["!bar.h"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-missing/foo.h b/python/mozbuild/mozbuild/test/frontend/data/exports-missing/foo.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-missing/moz.build b/python/mozbuild/mozbuild/test/frontend/data/exports-missing/moz.build new file mode 100644 index 0000000000..3f94fbdccd --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/exports-missing/moz.build @@ -0,0 +1,6 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +EXPORTS += ["foo.h"] +EXPORTS.mozilla += ["mozilla1.h"] +EXPORTS.mozilla += ["mozilla2.h"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-missing/mozilla1.h b/python/mozbuild/mozbuild/test/frontend/data/exports-missing/mozilla1.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/bar.h b/python/mozbuild/mozbuild/test/frontend/data/exports/bar.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/baz.h b/python/mozbuild/mozbuild/test/frontend/data/exports/baz.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/dom1.h b/python/mozbuild/mozbuild/test/frontend/data/exports/dom1.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/dom2.h b/python/mozbuild/mozbuild/test/frontend/data/exports/dom2.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/dom3.h b/python/mozbuild/mozbuild/test/frontend/data/exports/dom3.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/foo.h b/python/mozbuild/mozbuild/test/frontend/data/exports/foo.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/gfx.h b/python/mozbuild/mozbuild/test/frontend/data/exports/gfx.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/mem.h b/python/mozbuild/mozbuild/test/frontend/data/exports/mem.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/mem2.h b/python/mozbuild/mozbuild/test/frontend/data/exports/mem2.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/moz.build b/python/mozbuild/mozbuild/test/frontend/data/exports/moz.build new file mode 100644 index 0000000000..64253b1cf0 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/exports/moz.build @@ -0,0 +1,13 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +EXPORTS += ["foo.h"] +EXPORTS += ["bar.h", "baz.h"] +EXPORTS.mozilla += ["mozilla1.h"] +EXPORTS.mozilla += ["mozilla2.h"] +EXPORTS.mozilla.dom += ["dom1.h"] +EXPORTS.mozilla.dom += ["dom2.h", "dom3.h"] +EXPORTS.mozilla.gfx += ["gfx.h"] +EXPORTS.vpx = ["mem.h"] +EXPORTS.vpx += ["mem2.h"] +EXPORTS.nspr.private = ["pprio.h", "pprthred.h"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/mozilla1.h b/python/mozbuild/mozbuild/test/frontend/data/exports/mozilla1.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/mozilla2.h b/python/mozbuild/mozbuild/test/frontend/data/exports/mozilla2.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/pprio.h b/python/mozbuild/mozbuild/test/frontend/data/exports/pprio.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/pprthred.h b/python/mozbuild/mozbuild/test/frontend/data/exports/pprthred.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/bad-assignment/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/bad-assignment/moz.build new file mode 100644 index 0000000000..693b6cc962 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/bad-assignment/moz.build @@ -0,0 +1,2 @@ +with Files("*"): + BUG_COMPONENT = "bad value" diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/different-matchers/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/different-matchers/moz.build new file mode 100644 index 0000000000..ca5c74fd6a --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/different-matchers/moz.build @@ -0,0 +1,4 @@ +with Files("*.jsm"): + BUG_COMPONENT = ("Firefox", "JS") +with Files("*.cpp"): + BUG_COMPONENT = ("Firefox", "C++") diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/moz.build new file mode 100644 index 0000000000..9b1d05a9b0 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/moz.build @@ -0,0 +1,3 @@ +with Files("**/Makefile.in"): + BUG_COMPONENT = ("Firefox Build System", "General") + FINAL = True diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/subcomponent/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/subcomponent/moz.build new file mode 100644 index 0000000000..9b21529812 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/subcomponent/moz.build @@ -0,0 +1,2 @@ +with Files("**"): + BUG_COMPONENT = ("Another", "Component") diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/moz.build new file mode 100644 index 0000000000..4bbca3dc09 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/moz.build @@ -0,0 +1,2 @@ +with Files("**"): + BUG_COMPONENT = ("default_product", "default_component") diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/simple/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/simple/moz.build new file mode 100644 index 0000000000..e8b99df68d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/simple/moz.build @@ -0,0 +1,2 @@ +with Files("*"): + BUG_COMPONENT = ("Firefox Build System", "General") diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/static/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/static/moz.build new file mode 100644 index 0000000000..49acf29196 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/static/moz.build @@ -0,0 +1,5 @@ +with Files("foo"): + BUG_COMPONENT = ("FooProduct", "FooComponent") + +with Files("bar"): + BUG_COMPONENT = ("BarProduct", "BarComponent") diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/moz.build new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/final-target-pp-files-non-srcdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/final-target-pp-files-non-srcdir/moz.build new file mode 100644 index 0000000000..67e5fb5dce --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/final-target-pp-files-non-srcdir/moz.build @@ -0,0 +1,7 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +FINAL_TARGET_PP_FILES += [ + "!foo.js", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/moz.build new file mode 100644 index 0000000000..860f025eac --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/moz.build @@ -0,0 +1,9 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +GENERATED_FILES += ["bar.c"] + +bar = GENERATED_FILES["bar.c"] +bar.script = "/script.py:make_bar" +bar.inputs = [] diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/script.py b/python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/script.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-force/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files-force/moz.build new file mode 100644 index 0000000000..33f54a17e8 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-force/moz.build @@ -0,0 +1,11 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +GENERATED_FILES += [ + "bar.c", + "foo.c", + ("xpidllex.py", "xpidlyacc.py"), +] +GENERATED_FILES["bar.c"].force = True +GENERATED_FILES["foo.c"].force = False diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/moz.build new file mode 100644 index 0000000000..298513383b --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/moz.build @@ -0,0 +1,13 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +GENERATED_FILES += ["bar.c", "foo.c"] + +bar = GENERATED_FILES["bar.c"] +bar.script = "script.py:make_bar" +bar.inputs = [] + +foo = GENERATED_FILES["foo.c"] +foo.script = "script.py" +foo.inputs = [] diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/script.py b/python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/script.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/moz.build new file mode 100644 index 0000000000..50f703c696 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/moz.build @@ -0,0 +1,9 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +GENERATED_FILES += ["bar.c", "foo.c"] + +foo = GENERATED_FILES["foo.c"] +foo.script = "script.py" +foo.inputs = ["datafile"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/script.py b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/script.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/moz.build new file mode 100644 index 0000000000..ebdb7bfaf5 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/moz.build @@ -0,0 +1,8 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +GENERATED_FILES += ["bar.c", "foo.c"] + +bar = GENERATED_FILES["bar.c"] +bar.script = "script.rb" diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/script.rb b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/script.rb new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-script/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-script/moz.build new file mode 100644 index 0000000000..258a0f2325 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-script/moz.build @@ -0,0 +1,8 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +GENERATED_FILES += ["bar.c", "foo.c"] + +bar = GENERATED_FILES["bar.c"] +bar.script = "nonexistent-script.py" diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files/moz.build new file mode 100644 index 0000000000..97267c5d26 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files/moz.build @@ -0,0 +1,9 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +GENERATED_FILES += [ + "bar.c", + "foo.c", + ("xpidllex.py", "xpidlyacc.py"), +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/a.cpp b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/a.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/b.cc b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/b.cc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/c.cxx b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/c.cxx new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/d.c b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/d.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/e.m b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/e.m new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/f.mm b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/f.mm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/g.S b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/g.S new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/h.s b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/h.s new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/i.asm b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/i.asm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/moz.build new file mode 100644 index 0000000000..e305d9d32f --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/moz.build @@ -0,0 +1,39 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + +SOURCES += [ + "!a.cpp", + "!b.cc", + "!c.cxx", +] + +SOURCES += [ + "!d.c", +] + +SOURCES += [ + "!e.m", +] + +SOURCES += [ + "!f.mm", +] + +SOURCES += [ + "!g.S", +] + +SOURCES += [ + "!h.s", + "!i.asm", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated_includes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated_includes/moz.build new file mode 100644 index 0000000000..31f9042c0a --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/generated_includes/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCAL_INCLUDES += ["!/bar/baz", "!foo"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-compile-flags/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-compile-flags/moz.build new file mode 100644 index 0000000000..4225234c65 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/host-compile-flags/moz.build @@ -0,0 +1,22 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def HostLibrary(name): + """Template for libraries.""" + HOST_LIBRARY_NAME = name + + +HostLibrary("dummy") + +HOST_SOURCES += ["test1.c"] + +value = "xyz" +HOST_DEFINES["FOO"] = True +HOST_DEFINES["BAZ"] = '"abcd"' +HOST_DEFINES["BAR"] = 7 +HOST_DEFINES["VALUE"] = value +HOST_DEFINES["QUX"] = False + +HOST_CFLAGS += ["-funroll-loops", "-host-arg"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-compile-flags/test1.c b/python/mozbuild/mozbuild/test/frontend/data/host-compile-flags/test1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/final-target/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/final-target/moz.build new file mode 100644 index 0000000000..a2136749dc --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/final-target/moz.build @@ -0,0 +1,5 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +FINAL_TARGET = "final/target" +HostProgram("final-target") diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/installed/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/installed/moz.build new file mode 100644 index 0000000000..0d10d35508 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/installed/moz.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +HostProgram("dist-host-bin") diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/moz.build new file mode 100644 index 0000000000..ef9175fa54 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/moz.build @@ -0,0 +1,14 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def HostProgram(name): + HOST_PROGRAM = name + + +DIRS += [ + "final-target", + "installed", + "not-installed", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/not-installed/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/not-installed/moz.build new file mode 100644 index 0000000000..4a8451bc8f --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/not-installed/moz.build @@ -0,0 +1,5 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIST_INSTALL = False +HostProgram("not-installed") diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-rust-libraries/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/host-rust-libraries/Cargo.toml new file mode 100644 index 0000000000..aefcab3ddb --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/host-rust-libraries/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "host-lib" +version = "0.1.0" +authors = [ + "The Mozilla Project Developers", +] + +[lib] +crate-type = ["staticlib"] + +[profile.dev] +panic = "abort" + +[profile.release] +panic = "abort" diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-rust-libraries/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-rust-libraries/moz.build new file mode 100644 index 0000000000..37b6728ae3 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/host-rust-libraries/moz.build @@ -0,0 +1,22 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def HostLibrary(name): + """Template for libraries.""" + HOST_LIBRARY_NAME = name + + +@template +def HostRustLibrary(name, features=None): + """Template for Rust libraries.""" + HostLibrary(name) + + IS_RUST_LIBRARY = True + + if features: + RUST_LIBRARY_FEATURES = features + + +HostRustLibrary("host-lib") diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-no-cargo-toml/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-no-cargo-toml/moz.build new file mode 100644 index 0000000000..c60e731d99 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-no-cargo-toml/moz.build @@ -0,0 +1 @@ +HOST_RUST_PROGRAMS += ["none"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-nonexistent-name/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-nonexistent-name/Cargo.toml new file mode 100644 index 0000000000..dee335937f --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-nonexistent-name/Cargo.toml @@ -0,0 +1,7 @@ +[package] +authors = ["The Mozilla Project Developers"] +name = "testing" +version = "0.0.1" + +[[bin]] +name = "some" diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-nonexistent-name/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-nonexistent-name/moz.build new file mode 100644 index 0000000000..c60e731d99 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-nonexistent-name/moz.build @@ -0,0 +1 @@ +HOST_RUST_PROGRAMS += ["none"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-rust-programs/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/host-rust-programs/Cargo.toml new file mode 100644 index 0000000000..dee335937f --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/host-rust-programs/Cargo.toml @@ -0,0 +1,7 @@ +[package] +authors = ["The Mozilla Project Developers"] +name = "testing" +version = "0.0.1" + +[[bin]] +name = "some" diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-rust-programs/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-rust-programs/moz.build new file mode 100644 index 0000000000..2d75958b07 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/host-rust-programs/moz.build @@ -0,0 +1 @@ +HOST_RUST_PROGRAMS += ["some"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/a.cpp b/python/mozbuild/mozbuild/test/frontend/data/host-sources/a.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/b.cc b/python/mozbuild/mozbuild/test/frontend/data/host-sources/b.cc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/c.cxx b/python/mozbuild/mozbuild/test/frontend/data/host-sources/c.cxx new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/d.c b/python/mozbuild/mozbuild/test/frontend/data/host-sources/d.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/e.mm b/python/mozbuild/mozbuild/test/frontend/data/host-sources/e.mm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/f.mm b/python/mozbuild/mozbuild/test/frontend/data/host-sources/f.mm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-sources/moz.build new file mode 100644 index 0000000000..b1f5b98039 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/host-sources/moz.build @@ -0,0 +1,27 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def HostLibrary(name): + """Template for libraries.""" + HOST_LIBRARY_NAME = name + + +HostLibrary("dummy") + +HOST_SOURCES += [ + "a.cpp", + "b.cc", + "c.cxx", +] + +HOST_SOURCES += [ + "d.c", +] + +HOST_SOURCES += [ + "e.mm", + "f.mm", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-basic/included.build b/python/mozbuild/mozbuild/test/frontend/data/include-basic/included.build new file mode 100644 index 0000000000..3532347e27 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/include-basic/included.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS += ["bar"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-basic/moz.build b/python/mozbuild/mozbuild/test/frontend/data/include-basic/moz.build new file mode 100644 index 0000000000..b8e37c69ea --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/include-basic/moz.build @@ -0,0 +1,7 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS = ["foo"] + +include("included.build") diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-1.build b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-1.build new file mode 100644 index 0000000000..b5dc2728c6 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-1.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +include("included-2.build") diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-2.build b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-2.build new file mode 100644 index 0000000000..9bfc65481d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-2.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +ILLEGAL = True diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/moz.build b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/moz.build new file mode 100644 index 0000000000..def43513c7 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +include("included-1.build") diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-missing/moz.build b/python/mozbuild/mozbuild/test/frontend/data/include-missing/moz.build new file mode 100644 index 0000000000..34129f7c93 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/include-missing/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +include("missing.build") diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-outside-topsrcdir/relative.build b/python/mozbuild/mozbuild/test/frontend/data/include-outside-topsrcdir/relative.build new file mode 100644 index 0000000000..714a044436 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/include-outside-topsrcdir/relative.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +include("../moz.build") diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child.build b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child.build new file mode 100644 index 0000000000..ecae03ca7d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +include("../parent.build") diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child2.build b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child2.build new file mode 100644 index 0000000000..36210ba96b --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child2.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +include("grandchild/grandchild.build") diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/grandchild/grandchild.build b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/grandchild/grandchild.build new file mode 100644 index 0000000000..76dcdb899f --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/grandchild/grandchild.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +include("../../parent.build") diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/parent.build b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/parent.build new file mode 100644 index 0000000000..eb1477d0df --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/parent.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS = ["foo"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/moz.build b/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/moz.build new file mode 100644 index 0000000000..879b832ed8 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +include("/sibling.build") diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/sibling.build b/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/sibling.build new file mode 100644 index 0000000000..eb1477d0df --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/sibling.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS = ["foo"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/bar/moz.build b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/bar/moz.build new file mode 100644 index 0000000000..568f361a54 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/bar/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. diff --git a/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/baz/moz.build b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/baz/moz.build new file mode 100644 index 0000000000..9c392681c7 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/baz/moz.build @@ -0,0 +1,7 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +XPIDL_MODULE = "baz" diff --git a/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/moz.build b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/moz.build new file mode 100644 index 0000000000..f3368867ad --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/moz.build @@ -0,0 +1,7 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +DIRS += ["baz"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/moz.build b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/moz.build new file mode 100644 index 0000000000..169e9d1554 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/moz.build @@ -0,0 +1,10 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +XPIDL_MODULE = "foobar" +export("XPIDL_MODULE") + +DIRS += ["foo", "bar"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/bar/moz.build b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/bar/moz.build new file mode 100644 index 0000000000..b49ec1216b --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/bar/moz.build @@ -0,0 +1,14 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +PREPROCESSED_IPDL_SOURCES += [ + "bar1.ipdl", +] + +IPDL_SOURCES += [ + "bar.ipdl", + "bar2.ipdlh", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/foo/moz.build b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/foo/moz.build new file mode 100644 index 0000000000..c2e891572b --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/foo/moz.build @@ -0,0 +1,14 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +PREPROCESSED_IPDL_SOURCES += [ + "foo1.ipdl", +] + +IPDL_SOURCES += [ + "foo.ipdl", + "foo2.ipdlh", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/moz.build new file mode 100644 index 0000000000..9fe7699519 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/moz.build @@ -0,0 +1,10 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +DIRS += [ + "bar", + "foo", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/jar-manifests-multiple-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/jar-manifests-multiple-files/moz.build new file mode 100644 index 0000000000..fa61c94006 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/jar-manifests-multiple-files/moz.build @@ -0,0 +1,7 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +JAR_MANIFESTS += ["jar.mn", "other.jar"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/jar-manifests/moz.build b/python/mozbuild/mozbuild/test/frontend/data/jar-manifests/moz.build new file mode 100644 index 0000000000..d988c0ff9b --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/jar-manifests/moz.build @@ -0,0 +1,7 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +JAR_MANIFESTS += ["jar.mn"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/library-defines/liba/moz.build b/python/mozbuild/mozbuild/test/frontend/data/library-defines/liba/moz.build new file mode 100644 index 0000000000..65fcc6d08e --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/library-defines/liba/moz.build @@ -0,0 +1,5 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +Library("liba") +LIBRARY_DEFINES["IN_LIBA"] = True diff --git a/python/mozbuild/mozbuild/test/frontend/data/library-defines/libb/moz.build b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libb/moz.build new file mode 100644 index 0000000000..f4cf7b31a0 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libb/moz.build @@ -0,0 +1,7 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +Library("libb") +FINAL_LIBRARY = "liba" +LIBRARY_DEFINES["IN_LIBB"] = True +USE_LIBS += ["libd"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/library-defines/libc/moz.build b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libc/moz.build new file mode 100644 index 0000000000..022a67559d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libc/moz.build @@ -0,0 +1,5 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +Library("libc") +FINAL_LIBRARY = "libb" diff --git a/python/mozbuild/mozbuild/test/frontend/data/library-defines/libd/moz.build b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libd/moz.build new file mode 100644 index 0000000000..0bd94be069 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libd/moz.build @@ -0,0 +1,5 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +Library("libd") +FORCE_STATIC_LIB = True diff --git a/python/mozbuild/mozbuild/test/frontend/data/library-defines/moz.build b/python/mozbuild/mozbuild/test/frontend/data/library-defines/moz.build new file mode 100644 index 0000000000..dcc955cf28 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/library-defines/moz.build @@ -0,0 +1,11 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +DIRS = ["liba", "libb", "libc", "libd"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/link-flags/moz.build b/python/mozbuild/mozbuild/test/frontend/data/link-flags/moz.build new file mode 100644 index 0000000000..9e25efdcbf --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/link-flags/moz.build @@ -0,0 +1,16 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + +UNIFIED_SOURCES += ["test1.c"] + +LDFLAGS += ["-Wl,-U_foo"] +LDFLAGS += ["-framework Foo", "-x"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/link-flags/test1.c b/python/mozbuild/mozbuild/test/frontend/data/link-flags/test1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/local_includes-filename/foo.h b/python/mozbuild/mozbuild/test/frontend/data/local_includes-filename/foo.h new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/local_includes-filename/moz.build b/python/mozbuild/mozbuild/test/frontend/data/local_includes-filename/moz.build new file mode 100644 index 0000000000..70259db75b --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/local_includes-filename/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCAL_INCLUDES += ["foo.h"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/local_includes-invalid/objdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/local_includes-invalid/objdir/moz.build new file mode 100644 index 0000000000..6dcbab537d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/local_includes-invalid/objdir/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCAL_INCLUDES += ["!/"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/local_includes-invalid/srcdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/local_includes-invalid/srcdir/moz.build new file mode 100644 index 0000000000..6d8f6cd2af --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/local_includes-invalid/srcdir/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCAL_INCLUDES += ["/"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory b/python/mozbuild/mozbuild/test/frontend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/local_includes/foo/dummy_file_for_nonempty_directory b/python/mozbuild/mozbuild/test/frontend/data/local_includes/foo/dummy_file_for_nonempty_directory new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/local_includes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/local_includes/moz.build new file mode 100644 index 0000000000..1c29ac2ea2 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/local_includes/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCAL_INCLUDES += ["/bar/baz", "foo"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files-from-generated/moz.build b/python/mozbuild/mozbuild/test/frontend/data/localized-files-from-generated/moz.build new file mode 100644 index 0000000000..491a026419 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/localized-files-from-generated/moz.build @@ -0,0 +1,6 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCALIZED_GENERATED_FILES += ["abc.ini"] +LOCALIZED_FILES += ["!abc.ini"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/en-US/bar.ini b/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/en-US/bar.ini new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/foo.js b/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/foo.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/inner/locales/en-US/bar.ini b/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/inner/locales/en-US/bar.ini new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/moz.build b/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/moz.build new file mode 100644 index 0000000000..5c3efc8117 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/moz.build @@ -0,0 +1,9 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCALIZED_FILES.foo += [ + "en-US/bar.ini", + "foo.js", + "inner/locales/en-US/bar.ini", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files-not-localized-generated/moz.build b/python/mozbuild/mozbuild/test/frontend/data/localized-files-not-localized-generated/moz.build new file mode 100644 index 0000000000..678f503174 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/localized-files-not-localized-generated/moz.build @@ -0,0 +1,6 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +GENERATED_FILES += ["abc.ini"] +LOCALIZED_FILES += ["!abc.ini"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files/en-US/bar.ini b/python/mozbuild/mozbuild/test/frontend/data/localized-files/en-US/bar.ini new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files/en-US/foo.js b/python/mozbuild/mozbuild/test/frontend/data/localized-files/en-US/foo.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/localized-files/moz.build new file mode 100644 index 0000000000..25a9030881 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/localized-files/moz.build @@ -0,0 +1,9 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCALIZED_FILES.foo += [ + "en-US/bar.ini", + "en-US/code/*.js", + "en-US/foo.js", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files-final-target-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files-final-target-files/moz.build new file mode 100644 index 0000000000..48acff1447 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files-final-target-files/moz.build @@ -0,0 +1,6 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCALIZED_GENERATED_FILES += ["abc.ini"] +FINAL_TARGET_FILES += ["!abc.ini"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files-force/moz.build b/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files-force/moz.build new file mode 100644 index 0000000000..73685545de --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files-force/moz.build @@ -0,0 +1,6 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCALIZED_GENERATED_FILES += ["abc.ini", ("bar", "baz")] +LOCALIZED_GENERATED_FILES["abc.ini"].force = True diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files/moz.build new file mode 100644 index 0000000000..cc306d5991 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCALIZED_GENERATED_FILES += ["abc.ini", ("bar", "baz")] diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/en-US/bar.ini b/python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/en-US/bar.ini new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/en-US/foo.js b/python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/en-US/foo.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/moz.build new file mode 100644 index 0000000000..b2916a1226 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/moz.build @@ -0,0 +1,8 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCALIZED_PP_FILES.foo += [ + "en-US/bar.ini", + "en-US/foo.js", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/missing-local-includes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/missing-local-includes/moz.build new file mode 100644 index 0000000000..1c29ac2ea2 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/missing-local-includes/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +LOCAL_INCLUDES += ["/bar/baz", "foo"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/missing-xpidl/moz.build b/python/mozbuild/mozbuild/test/frontend/data/missing-xpidl/moz.build new file mode 100644 index 0000000000..e3a2a69d07 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/missing-xpidl/moz.build @@ -0,0 +1,6 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +XPIDL_MODULE = "my_module" +XPIDL_SOURCES = ["nonexistant.idl"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/moz.build b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/moz.build new file mode 100644 index 0000000000..7956580d14 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/moz.build @@ -0,0 +1,29 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +@template +def RustLibrary(name): + """Template for Rust libraries.""" + Library(name) + + IS_RUST_LIBRARY = True + + +Library("test") + +DIRS += [ + "rust1", + "rust2", +] + +USE_LIBS += [ + "rust1", + "rust2", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/Cargo.toml new file mode 100644 index 0000000000..56273d5cf7 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "rust1" +version = "0.1.0" +authors = [ + "The Mozilla Project Developers", +] + +[lib] +crate-type = ["staticlib"] + +[profile.dev] +panic = "abort" + +[profile.release] +panic = "abort" diff --git a/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/moz.build b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/moz.build new file mode 100644 index 0000000000..0cc01e1e24 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/moz.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +RustLibrary("rust1") diff --git a/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/Cargo.toml new file mode 100644 index 0000000000..9c557f6c08 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "rust2" +version = "0.1.0" +authors = [ + "The Mozilla Project Developers", +] + +[lib] +crate-type = ["staticlib"] + +[profile.dev] +panic = "abort" + +[profile.release] +panic = "abort" diff --git a/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/moz.build b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/moz.build new file mode 100644 index 0000000000..4ec4ea9c79 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/moz.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +RustLibrary("rust2") diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/Test.c b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/Test.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/Test.cpp b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/Test.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/moz.build b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/moz.build new file mode 100644 index 0000000000..44610a781c --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/moz.build @@ -0,0 +1,4 @@ +SOURCES += [ + "Test.c", + "Test.cpp", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/Test.cpp b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/Test.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/moz.build b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/moz.build new file mode 100644 index 0000000000..b1064ae0c0 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/moz.build @@ -0,0 +1,4 @@ +SOURCES += [ + "subdir/Test.cpp", + "Test.cpp", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/subdir/Test.cpp b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/subdir/Test.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/Test.c b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/Test.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/Test.cpp b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/Test.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/moz.build b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/moz.build new file mode 100644 index 0000000000..a225907cae --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/moz.build @@ -0,0 +1,7 @@ +SOURCES += [ + "Test.c", +] + +UNIFIED_SOURCES += [ + "Test.cpp", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/Test.c b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/Test.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/Test.cpp b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/Test.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/moz.build b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/moz.build new file mode 100644 index 0000000000..ea5da28d88 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/moz.build @@ -0,0 +1,4 @@ +UNIFIED_SOURCES += [ + "Test.c", + "Test.cpp", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/program-paths/dist-bin/moz.build b/python/mozbuild/mozbuild/test/frontend/data/program-paths/dist-bin/moz.build new file mode 100644 index 0000000000..d8b952c014 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/program-paths/dist-bin/moz.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +Program("dist-bin") diff --git a/python/mozbuild/mozbuild/test/frontend/data/program-paths/dist-subdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/program-paths/dist-subdir/moz.build new file mode 100644 index 0000000000..fc2f664c01 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/program-paths/dist-subdir/moz.build @@ -0,0 +1,5 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIST_SUBDIR = "foo" +Program("dist-subdir") diff --git a/python/mozbuild/mozbuild/test/frontend/data/program-paths/final-target/moz.build b/python/mozbuild/mozbuild/test/frontend/data/program-paths/final-target/moz.build new file mode 100644 index 0000000000..a0d5805262 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/program-paths/final-target/moz.build @@ -0,0 +1,5 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +FINAL_TARGET = "final/target" +Program("final-target") diff --git a/python/mozbuild/mozbuild/test/frontend/data/program-paths/moz.build b/python/mozbuild/mozbuild/test/frontend/data/program-paths/moz.build new file mode 100644 index 0000000000..d1d087fd45 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/program-paths/moz.build @@ -0,0 +1,15 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Program(name): + PROGRAM = name + + +DIRS += [ + "dist-bin", + "dist-subdir", + "final-target", + "not-installed", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/program-paths/not-installed/moz.build b/python/mozbuild/mozbuild/test/frontend/data/program-paths/not-installed/moz.build new file mode 100644 index 0000000000..c725ab7326 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/program-paths/not-installed/moz.build @@ -0,0 +1,5 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIST_INSTALL = False +Program("not-installed") diff --git a/python/mozbuild/mozbuild/test/frontend/data/program/moz.build b/python/mozbuild/mozbuild/test/frontend/data/program/moz.build new file mode 100644 index 0000000000..b3f7062732 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/program/moz.build @@ -0,0 +1,18 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Program(name): + PROGRAM = name + + +@template +def SimplePrograms(names, ext=".cpp"): + SIMPLE_PROGRAMS += names + SOURCES += ["%s%s" % (name, ext) for name in names] + + +Program("test_program") + +SimplePrograms(["test_program1", "test_program2"]) diff --git a/python/mozbuild/mozbuild/test/frontend/data/program/test_program1.cpp b/python/mozbuild/mozbuild/test/frontend/data/program/test_program1.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/program/test_program2.cpp b/python/mozbuild/mozbuild/test/frontend/data/program/test_program2.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-bad-dir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-bad-dir/moz.build new file mode 100644 index 0000000000..68581574b1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-bad-dir/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS = ["foo"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-basic/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-basic/moz.build new file mode 100644 index 0000000000..0a91c4692b --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-basic/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +ILLEGAL = True diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-empty-list/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-empty-list/moz.build new file mode 100644 index 0000000000..4dfba1c60f --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-empty-list/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS = [] diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-error-func/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-error-func/moz.build new file mode 100644 index 0000000000..d0f35c4c1d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-error-func/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +error("Some error.") diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/child.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/child.build new file mode 100644 index 0000000000..9bfc65481d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/child.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +ILLEGAL = True diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/moz.build new file mode 100644 index 0000000000..603f3a7204 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +include("child.build") diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-missing-include/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-missing-include/moz.build new file mode 100644 index 0000000000..34129f7c93 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-missing-include/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +include("missing.build") diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-outside-topsrcdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-outside-topsrcdir/moz.build new file mode 100644 index 0000000000..040c1f5df1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-outside-topsrcdir/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +include("../include-basic/moz.build") diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-read-unknown-global/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-read-unknown-global/moz.build new file mode 100644 index 0000000000..6fc10f766a --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-read-unknown-global/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +l = FOO diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-repeated-dir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-repeated-dir/moz.build new file mode 100644 index 0000000000..91845b337f --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-repeated-dir/moz.build @@ -0,0 +1,7 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS = ["foo"] + +DIRS += ["foo"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-script-error/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-script-error/moz.build new file mode 100644 index 0000000000..a91d38b415 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-script-error/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +foo = True + None diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-syntax/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-syntax/moz.build new file mode 100644 index 0000000000..70a0d2c066 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-syntax/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +foo = diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-bad-value/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-bad-value/moz.build new file mode 100644 index 0000000000..2e8194b223 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-bad-value/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS = "dir" diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-unknown-global/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-unknown-global/moz.build new file mode 100644 index 0000000000..5675031753 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-unknown-global/moz.build @@ -0,0 +1,7 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS = ["dir1", "dir2"] + +FOO = "bar" diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/file new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/moz.build new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/file new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/moz.build new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/moz.build new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file1 b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file1 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file2 b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file2 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/moz.build new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/file new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/moz.build new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/dir1/dir2/dir3/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/dir1/dir2/dir3/file new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/moz.build new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/file new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/moz.build new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/file new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/moz.build new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/moz.build new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/file new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/moz.build new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/resolved-flags-error/moz.build b/python/mozbuild/mozbuild/test/frontend/data/resolved-flags-error/moz.build new file mode 100644 index 0000000000..d4b9a3075d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/resolved-flags-error/moz.build @@ -0,0 +1,17 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + +UNIFIED_SOURCES += ["test1.c"] + +DEFINES["MOZ_TEST_DEFINE"] = True +LIBRARY_DEFINES["MOZ_LIBRARY_DEFINE"] = "MOZ_TEST" +COMPILE_FLAGS["DEFINES"] = ["-DFOO"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/resolved-flags-error/test1.c b/python/mozbuild/mozbuild/test/frontend/data/resolved-flags-error/test1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/Cargo.toml new file mode 100644 index 0000000000..fbb4ae087d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "random-crate" +version = "0.1.0" +authors = [ + "The Mozilla Project Developers", +] + +[lib] +crate-type = ["staticlib"] + +[profile.dev] +panic = "abort" + +[profile.release] +panic = "abort" diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/moz.build new file mode 100644 index 0000000000..de1967c519 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/moz.build @@ -0,0 +1,19 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +@template +def RustLibrary(name): + """Template for Rust libraries.""" + Library(name) + + IS_RUST_LIBRARY = True + + +RustLibrary("random-crate") diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-duplicate-features/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-duplicate-features/Cargo.toml new file mode 100644 index 0000000000..fbb4ae087d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-duplicate-features/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "random-crate" +version = "0.1.0" +authors = [ + "The Mozilla Project Developers", +] + +[lib] +crate-type = ["staticlib"] + +[profile.dev] +panic = "abort" + +[profile.release] +panic = "abort" diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-duplicate-features/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-duplicate-features/moz.build new file mode 100644 index 0000000000..ccd8ede3c0 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-duplicate-features/moz.build @@ -0,0 +1,20 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +@template +def RustLibrary(name, features): + """Template for Rust libraries.""" + Library(name) + + IS_RUST_LIBRARY = True + RUST_LIBRARY_FEATURES = features + + +RustLibrary("random-crate", ["musthave", "cantlivewithout", "musthave"]) diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-features/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-features/Cargo.toml new file mode 100644 index 0000000000..fbb4ae087d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-features/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "random-crate" +version = "0.1.0" +authors = [ + "The Mozilla Project Developers", +] + +[lib] +crate-type = ["staticlib"] + +[profile.dev] +panic = "abort" + +[profile.release] +panic = "abort" diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-features/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-features/moz.build new file mode 100644 index 0000000000..9d88bdea08 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-features/moz.build @@ -0,0 +1,20 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +@template +def RustLibrary(name, features): + """Template for Rust libraries.""" + Library(name) + + IS_RUST_LIBRARY = True + RUST_LIBRARY_FEATURES = features + + +RustLibrary("random-crate", ["musthave", "cantlivewithout"]) diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/Cargo.toml new file mode 100644 index 0000000000..3572550b76 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "random-crate" +version = "0.1.0" +authors = [ + "The Mozilla Project Developers", +] + +[lib] +crate-type = ["dylib"] + +[profile.dev] +panic = "abort" + +[profile.release] +panic = "abort" diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/moz.build new file mode 100644 index 0000000000..de1967c519 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/moz.build @@ -0,0 +1,19 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +@template +def RustLibrary(name): + """Template for Rust libraries.""" + Library(name) + + IS_RUST_LIBRARY = True + + +RustLibrary("random-crate") diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/Cargo.toml new file mode 100644 index 0000000000..9e05fe5cb1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "deterministic-crate" +version = "0.1.0" +authors = [ + "The Mozilla Project Developers", +] + +[profile.dev] +panic = "abort" + +[profile.release] +panic = "abort" diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/moz.build new file mode 100644 index 0000000000..de1967c519 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/moz.build @@ -0,0 +1,19 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +@template +def RustLibrary(name): + """Template for Rust libraries.""" + Library(name) + + IS_RUST_LIBRARY = True + + +RustLibrary("random-crate") diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-cargo-toml/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-cargo-toml/moz.build new file mode 100644 index 0000000000..de1967c519 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-cargo-toml/moz.build @@ -0,0 +1,19 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +@template +def RustLibrary(name): + """Template for Rust libraries.""" + Library(name) + + IS_RUST_LIBRARY = True + + +RustLibrary("random-crate") diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/Cargo.toml new file mode 100644 index 0000000000..0934afcc4f --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "random-crate" +version = "0.1.0" +authors = [ + "The Mozilla Project Developers", +] + +[profile.dev] +panic = "abort" + +[profile.release] +panic = "abort" diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/moz.build new file mode 100644 index 0000000000..de1967c519 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/moz.build @@ -0,0 +1,19 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +@template +def RustLibrary(name): + """Template for Rust libraries.""" + Library(name) + + IS_RUST_LIBRARY = True + + +RustLibrary("random-crate") diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-program-no-cargo-toml/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-program-no-cargo-toml/moz.build new file mode 100644 index 0000000000..56601854f9 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-program-no-cargo-toml/moz.build @@ -0,0 +1 @@ +RUST_PROGRAMS += ["none"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-program-nonexistent-name/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-program-nonexistent-name/Cargo.toml new file mode 100644 index 0000000000..dee335937f --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-program-nonexistent-name/Cargo.toml @@ -0,0 +1,7 @@ +[package] +authors = ["The Mozilla Project Developers"] +name = "testing" +version = "0.0.1" + +[[bin]] +name = "some" diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-program-nonexistent-name/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-program-nonexistent-name/moz.build new file mode 100644 index 0000000000..56601854f9 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-program-nonexistent-name/moz.build @@ -0,0 +1 @@ +RUST_PROGRAMS += ["none"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-programs/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-programs/Cargo.toml new file mode 100644 index 0000000000..dee335937f --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-programs/Cargo.toml @@ -0,0 +1,7 @@ +[package] +authors = ["The Mozilla Project Developers"] +name = "testing" +version = "0.0.1" + +[[bin]] +name = "some" diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-programs/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-programs/moz.build new file mode 100644 index 0000000000..80dc15120a --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/rust-programs/moz.build @@ -0,0 +1 @@ +RUST_PROGRAMS += ["some"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/schedules/moz.build b/python/mozbuild/mozbuild/test/frontend/data/schedules/moz.build new file mode 100644 index 0000000000..3f4f450d37 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/schedules/moz.build @@ -0,0 +1,19 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +with Files("*.win"): + SCHEDULES.exclusive = ["windows"] + +with Files("*.osx"): + SCHEDULES.exclusive = ["macosx"] + +with Files("win.and.osx"): + # this conflicts with the previous clause and will cause an error + # when read + SCHEDULES.exclusive = ["macosx", "windows"] + +with Files("subd/**.py"): + SCHEDULES.inclusive += ["py-lint"] + +with Files("**/*.js"): + SCHEDULES.inclusive += ["js-lint"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/schedules/subd/moz.build b/python/mozbuild/mozbuild/test/frontend/data/schedules/subd/moz.build new file mode 100644 index 0000000000..b9c3bf6c74 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/schedules/subd/moz.build @@ -0,0 +1,5 @@ +with Files("yaml.py"): + SCHEDULES.inclusive += ["yaml-lint"] + +with Files("win.js"): + SCHEDULES.exclusive = ["windows"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/d.c b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/d.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/e.m b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/e.m new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/g.S b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/g.S new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/h.s b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/h.s new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/i.asm b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/i.asm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/moz.build b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/moz.build new file mode 100644 index 0000000000..29abd6de5d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/moz.build @@ -0,0 +1,29 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + +SOURCES += [ + "d.c", +] + +SOURCES += [ + "e.m", +] + +SOURCES += [ + "g.S", +] + +SOURCES += [ + "h.s", + "i.asm", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/a.cpp b/python/mozbuild/mozbuild/test/frontend/data/sources/a.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/b.cc b/python/mozbuild/mozbuild/test/frontend/data/sources/b.cc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/c.cxx b/python/mozbuild/mozbuild/test/frontend/data/sources/c.cxx new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/d.c b/python/mozbuild/mozbuild/test/frontend/data/sources/d.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/e.m b/python/mozbuild/mozbuild/test/frontend/data/sources/e.m new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/f.mm b/python/mozbuild/mozbuild/test/frontend/data/sources/f.mm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/g.S b/python/mozbuild/mozbuild/test/frontend/data/sources/g.S new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/h.s b/python/mozbuild/mozbuild/test/frontend/data/sources/h.s new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/i.asm b/python/mozbuild/mozbuild/test/frontend/data/sources/i.asm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/sources/moz.build new file mode 100644 index 0000000000..e25f865f72 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/sources/moz.build @@ -0,0 +1,39 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + +SOURCES += [ + "a.cpp", + "b.cc", + "c.cxx", +] + +SOURCES += [ + "d.c", +] + +SOURCES += [ + "e.m", +] + +SOURCES += [ + "f.mm", +] + +SOURCES += [ + "g.S", +] + +SOURCES += [ + "h.s", + "i.asm", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/templates/templates.mozbuild b/python/mozbuild/mozbuild/test/frontend/data/templates/templates.mozbuild new file mode 100644 index 0000000000..290104bc72 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/templates/templates.mozbuild @@ -0,0 +1,21 @@ +@template +def Template(foo, bar=[]): + SOURCES += foo + DIRS += bar + +@template +def TemplateError(foo): + ILLEGAL = foo + +@template +def TemplateGlobalVariable(): + SOURCES += illegal + +@template +def TemplateGlobalUPPERVariable(): + SOURCES += DIRS + +@template +def TemplateInherit(foo): + USE_LIBS += ['foo'] + Template(foo) diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files-root/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files-root/moz.build new file mode 100644 index 0000000000..d7f6377d0d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files-root/moz.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +TEST_HARNESS_FILES += ["foo.py"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.ini new file mode 100644 index 0000000000..d87114ac7d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.ini @@ -0,0 +1 @@ +# dummy file so the existence checks for TEST_HARNESS_FILES succeed diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.py b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.py new file mode 100644 index 0000000000..d87114ac7d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.py @@ -0,0 +1 @@ +# dummy file so the existence checks for TEST_HARNESS_FILES succeed diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/moz.build new file mode 100644 index 0000000000..ff3fed0ee0 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/moz.build @@ -0,0 +1,7 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +TEST_HARNESS_FILES.mochitest += ["runtests.py"] +TEST_HARNESS_FILES.mochitest += ["utils.py"] +TEST_HARNESS_FILES.testing.mochitest += ["mochitest.py"] +TEST_HARNESS_FILES.testing.mochitest += ["mochitest.ini"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/runtests.py b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/runtests.py new file mode 100644 index 0000000000..d87114ac7d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/runtests.py @@ -0,0 +1 @@ +# dummy file so the existence checks for TEST_HARNESS_FILES succeed diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/utils.py b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/utils.py new file mode 100644 index 0000000000..d87114ac7d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/utils.py @@ -0,0 +1 @@ +# dummy file so the existence checks for TEST_HARNESS_FILES succeed diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-install-shared-lib/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-install-shared-lib/moz.build new file mode 100644 index 0000000000..fa592c72a3 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-install-shared-lib/moz.build @@ -0,0 +1,16 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def SharedLibrary(name): + LIBRARY_NAME = name + FORCE_SHARED_LIB = True + + +DIST_INSTALL = False +SharedLibrary("foo") + +TEST_HARNESS_FILES.foo.bar += [ + "!%sfoo%s" % (CONFIG["DLL_PREFIX"], CONFIG["DLL_SUFFIX"]) +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/moz.build new file mode 100644 index 0000000000..0f84eb5554 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/moz.build @@ -0,0 +1,14 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS = ["one", "two", "three"] + + +@template +def SharedLibrary(name): + LIBRARY_NAME = name + FORCE_SHARED_LIB = True + + +SharedLibrary("cxx_shared") +USE_LIBS += ["cxx_static"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/foo.cpp b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/foo.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/moz.build new file mode 100644 index 0000000000..f03a34c33f --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/moz.build @@ -0,0 +1,11 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + LIBRARY_NAME = name + + +Library("cxx_static") +SOURCES += ["foo.cpp"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/three/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/three/moz.build new file mode 100644 index 0000000000..08e26c4eb3 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/three/moz.build @@ -0,0 +1,5 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +SharedLibrary("just_c_shared") +USE_LIBS += ["just_c_static"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/foo.c b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/foo.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/moz.build new file mode 100644 index 0000000000..d3bb738ba4 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/moz.build @@ -0,0 +1,11 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + LIBRARY_NAME = name + + +Library("just_c_static") +SOURCES += ["foo.c"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/absolute-support.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/absolute-support.ini new file mode 100644 index 0000000000..900f421584 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/absolute-support.ini @@ -0,0 +1,4 @@ +[DEFAULT] +support-files = /.well-known/foo.txt + +[test_file.js] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/foo.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/foo.txt new file mode 100644 index 0000000000..ce01362503 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/foo.txt @@ -0,0 +1 @@ +hello diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/moz.build new file mode 100644 index 0000000000..5ccb97c1bb --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/moz.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +MOCHITEST_MANIFESTS += ["absolute-support.ini"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/test_file.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/test_file.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/bar.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/bar.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/foo.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/foo.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/mochitest.ini new file mode 100644 index 0000000000..2f1fc406a0 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/mochitest.ini @@ -0,0 +1,7 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +[DEFAULT] +support-files = bar.js foo.js bar.js + +[test_baz.js] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/moz.build new file mode 100644 index 0000000000..4cc0c3d4cf --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/moz.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +MOCHITEST_MANIFESTS += ["mochitest.ini"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/test_baz.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/test_baz.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/included-reftest.list b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/included-reftest.list new file mode 100644 index 0000000000..1caf9cc391 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/included-reftest.list @@ -0,0 +1 @@ +!= reftest2.html reftest2-ref.html \ No newline at end of file diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/moz.build new file mode 100644 index 0000000000..8f321387af --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/moz.build @@ -0,0 +1 @@ +REFTEST_MANIFESTS += ["reftest.list"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest.list b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest.list new file mode 100644 index 0000000000..80caf8ffa4 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest.list @@ -0,0 +1,2 @@ +== reftest1.html reftest1-ref.html +include included-reftest.list diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/empty.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/empty.ini new file mode 100644 index 0000000000..83a0cec0c6 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/empty.ini @@ -0,0 +1,2 @@ +[DEFAULT] +foo = bar diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/moz.build new file mode 100644 index 0000000000..486e879241 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/moz.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +MOCHITEST_MANIFESTS += ["empty.ini"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-inactive-ignored/test_inactive.html b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-inactive-ignored/test_inactive.html new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/common.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/common.ini new file mode 100644 index 0000000000..753cd0ec0d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/common.ini @@ -0,0 +1 @@ +[test_foo.html] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/mochitest.ini new file mode 100644 index 0000000000..fe0af1cd86 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/mochitest.ini @@ -0,0 +1,3 @@ +[DEFAULT] + +[include:common.ini] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/moz.build new file mode 100644 index 0000000000..4cc0c3d4cf --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/moz.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +MOCHITEST_MANIFESTS += ["mochitest.ini"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/test_foo.html b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/test_foo.html new file mode 100644 index 0000000000..18ecdcb795 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/test_foo.html @@ -0,0 +1 @@ + diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/foo.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/foo.txt new file mode 100644 index 0000000000..ce01362503 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/foo.txt @@ -0,0 +1 @@ +hello diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/just-support.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/just-support.ini new file mode 100644 index 0000000000..efa2d4bc05 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/just-support.ini @@ -0,0 +1,2 @@ +[DEFAULT] +support-files = foo.txt diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/moz.build new file mode 100644 index 0000000000..adf2a0d91c --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/moz.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +MOCHITEST_MANIFESTS += ["just-support.ini"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/dir1/bar b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/dir1/bar new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/foo b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/foo new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y.ini new file mode 100644 index 0000000000..9cf7989185 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y.ini @@ -0,0 +1,4 @@ +[DEFAULT] +support-files = a11y-support/** + +[test_a11y.js] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/browser.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/browser.ini new file mode 100644 index 0000000000..a81ee3acbb --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/browser.ini @@ -0,0 +1,4 @@ +[DEFAULT] +support-files = support1 support2 + +[test_browser.js] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/chrome.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/chrome.ini new file mode 100644 index 0000000000..1db07cfac9 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/chrome.ini @@ -0,0 +1,3 @@ +[DEFAULT] + +[test_chrome.js] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/crashtest.list b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/crashtest.list new file mode 100644 index 0000000000..b9d7f2685a --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/crashtest.list @@ -0,0 +1 @@ +== crashtest1.html crashtest1-ref.html diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/metro.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/metro.ini new file mode 100644 index 0000000000..a7eb6def41 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/metro.ini @@ -0,0 +1,3 @@ +[DEFAULT] + +[test_metro.js] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/mochitest.ini new file mode 100644 index 0000000000..69fd71de0b --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/mochitest.ini @@ -0,0 +1,5 @@ +[DEFAULT] +support-files = external1 external2 +generated-files = external1 external2 + +[test_mochitest.js] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/moz.build new file mode 100644 index 0000000000..9de10add3c --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/moz.build @@ -0,0 +1,12 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +A11Y_MANIFESTS += ["a11y.ini"] +BROWSER_CHROME_MANIFESTS += ["browser.ini"] +METRO_CHROME_MANIFESTS += ["metro.ini"] +MOCHITEST_MANIFESTS += ["mochitest.ini"] +MOCHITEST_CHROME_MANIFESTS += ["chrome.ini"] +XPCSHELL_TESTS_MANIFESTS += ["xpcshell.ini"] +REFTEST_MANIFESTS += ["reftest.list"] +CRASHTEST_MANIFESTS += ["crashtest.list"] +PYTHON_UNITTEST_MANIFESTS += ["python.ini"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/python.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/python.ini new file mode 100644 index 0000000000..97a9db6920 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/python.ini @@ -0,0 +1 @@ +[test_foo.py] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest.list b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest.list new file mode 100644 index 0000000000..3fc25b2966 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest.list @@ -0,0 +1 @@ +== reftest1.html reftest1-ref.html diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_a11y.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_a11y.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_browser.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_browser.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_chrome.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_chrome.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_foo.py b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_foo.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_metro.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_metro.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_mochitest.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_mochitest.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_xpcshell.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_xpcshell.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/xpcshell.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/xpcshell.ini new file mode 100644 index 0000000000..c228c24ac1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/xpcshell.ini @@ -0,0 +1,5 @@ +[DEFAULT] +head = head1 head2 +dupe-manifest = + +[test_xpcshell.js] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-manifest/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-manifest/moz.build new file mode 100644 index 0000000000..ec33a37d3d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-manifest/moz.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +XPCSHELL_TESTS_MANIFESTS += ["does_not_exist.ini"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/moz.build new file mode 100644 index 0000000000..d3878746bd --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/moz.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +XPCSHELL_TESTS_MANIFESTS += ["xpcshell.ini"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/xpcshell.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/xpcshell.ini new file mode 100644 index 0000000000..9ab85c0cef --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/xpcshell.ini @@ -0,0 +1,4 @@ +[DEFAULT] +support-files = support/** + +[missing.js] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/mochitest.ini new file mode 100644 index 0000000000..e3ef6216b7 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/mochitest.ini @@ -0,0 +1 @@ +[test_missing.html] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/moz.build new file mode 100644 index 0000000000..4cc0c3d4cf --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/moz.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +MOCHITEST_MANIFESTS += ["mochitest.ini"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/mochitest.ini new file mode 100644 index 0000000000..c788224291 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/mochitest.ini @@ -0,0 +1,4 @@ +[DEFAULT] +support-files = ../support-file.txt + +[test_foo.js] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/test_foo.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/test_foo.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/moz.build new file mode 100644 index 0000000000..275a810a5e --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/moz.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +MOCHITEST_MANIFESTS += ["child/mochitest.ini"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/support-file.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/support-file.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/another-file.sjs b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/another-file.sjs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/browser.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/browser.ini new file mode 100644 index 0000000000..4f1335d6b1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/browser.ini @@ -0,0 +1,6 @@ +[DEFAULT] +support-files = + another-file.sjs + data/** + +[test_sub.js] \ No newline at end of file diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/one.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/one.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/two.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/two.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/test_sub.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/test_sub.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/mochitest.ini new file mode 100644 index 0000000000..ada59d387d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/mochitest.ini @@ -0,0 +1,9 @@ +[DEFAULT] +support-files = + support-file.txt + !/child/test_sub.js + !/child/another-file.sjs + !/child/data/** + !/does/not/exist.sjs + +[test_foo.js] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/moz.build new file mode 100644 index 0000000000..9df54dbc99 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/moz.build @@ -0,0 +1,5 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +MOCHITEST_MANIFESTS += ["mochitest.ini"] +BROWSER_CHROME_MANIFESTS += ["child/browser.ini"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/support-file.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/support-file.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/test_foo.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/test_foo.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/another-file.sjs b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/another-file.sjs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/browser.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/browser.ini new file mode 100644 index 0000000000..4f1335d6b1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/browser.ini @@ -0,0 +1,6 @@ +[DEFAULT] +support-files = + another-file.sjs + data/** + +[test_sub.js] \ No newline at end of file diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/one.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/one.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/two.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/two.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/test_sub.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/test_sub.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/mochitest.ini new file mode 100644 index 0000000000..a9860f3de8 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/mochitest.ini @@ -0,0 +1,8 @@ +[DEFAULT] +support-files = + support-file.txt + !/child/test_sub.js + !/child/another-file.sjs + !/child/data/** + +[test_foo.js] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/moz.build new file mode 100644 index 0000000000..9df54dbc99 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/moz.build @@ -0,0 +1,5 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +MOCHITEST_MANIFESTS += ["mochitest.ini"] +BROWSER_CHROME_MANIFESTS += ["child/browser.ini"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/support-file.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/support-file.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/test_foo.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/test_foo.js new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/moz.build new file mode 100644 index 0000000000..9d098e0eab --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/moz.build @@ -0,0 +1,4 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +MOCHITEST_MANIFESTS += ["test.ini"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test.ini new file mode 100644 index 0000000000..caf3911864 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test.ini @@ -0,0 +1,4 @@ +[DEFAULT] +generated-files = does_not_exist + +[test_foo] diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test_foo b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test_foo new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir-missing-generated/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir-missing-generated/moz.build new file mode 100644 index 0000000000..450af01d9a --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir-missing-generated/moz.build @@ -0,0 +1,12 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def SharedLibrary(name): + LIBRARY_NAME = name + FORCE_SHARED_LIB = True + + +SharedLibrary("foo") +SYMBOLS_FILE = "!foo.symbols" diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/foo.py b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/foo.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/moz.build new file mode 100644 index 0000000000..7ea07b4ee9 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/moz.build @@ -0,0 +1,15 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def SharedLibrary(name): + LIBRARY_NAME = name + FORCE_SHARED_LIB = True + + +SharedLibrary("foo") +SYMBOLS_FILE = "!foo.symbols" + +GENERATED_FILES += ["foo.symbols"] +GENERATED_FILES["foo.symbols"].script = "foo.py" diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/foo.symbols b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/foo.symbols new file mode 100644 index 0000000000..257cc5642c --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/foo.symbols @@ -0,0 +1 @@ +foo diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/moz.build new file mode 100644 index 0000000000..47e435dbf5 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/moz.build @@ -0,0 +1,12 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def SharedLibrary(name): + LIBRARY_NAME = name + FORCE_SHARED_LIB = True + + +SharedLibrary("foo") +SYMBOLS_FILE = "foo.symbols" diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/moz.build new file mode 100644 index 0000000000..480808eb8a --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/moz.build @@ -0,0 +1,6 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS += ["regular"] +TEST_DIRS += ["test"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/parallel/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/parallel/moz.build new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/regular/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/regular/moz.build new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/test/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/test/moz.build new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-outside-topsrcdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-outside-topsrcdir/moz.build new file mode 100644 index 0000000000..dbdc694a6a --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-outside-topsrcdir/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS = ["../../foo"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/bar/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/bar/moz.build new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/foo/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/foo/moz.build new file mode 100644 index 0000000000..4b42bbc5ab --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/foo/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS = ["../bar"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/moz.build new file mode 100644 index 0000000000..68581574b1 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS = ["foo"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/bar/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/bar/moz.build new file mode 100644 index 0000000000..f204e245b4 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/bar/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS = ["../foo"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/foo/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/foo/moz.build new file mode 100644 index 0000000000..4b42bbc5ab --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/foo/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS = ["../bar"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/moz.build new file mode 100644 index 0000000000..5a9445a6e6 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS = ["foo", "bar"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/bar/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/bar/moz.build new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/biz/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/biz/moz.build new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/moz.build new file mode 100644 index 0000000000..3ad8a1501d --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/moz.build @@ -0,0 +1,2 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +DIRS = ["biz"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/moz.build new file mode 100644 index 0000000000..5a9445a6e6 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIRS = ["foo", "bar"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/bar.cxx b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/bar.cxx new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c1.c b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c2.c b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c2.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/foo.cpp b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/foo.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/moz.build b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/moz.build new file mode 100644 index 0000000000..217e43831f --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/moz.build @@ -0,0 +1,30 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + +UNIFIED_SOURCES += [ + "bar.cxx", + "foo.cpp", + "quux.cc", +] + +UNIFIED_SOURCES += [ + "objc1.mm", + "objc2.mm", +] + +UNIFIED_SOURCES += [ + "c1.c", + "c2.c", +] + +FILES_PER_UNIFIED_FILE = 1 diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc1.mm b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc1.mm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc2.mm b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc2.mm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/quux.cc b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/quux.cc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/bar.cxx b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/bar.cxx new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/c1.c b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/c1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/c2.c b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/c2.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/foo.cpp b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/foo.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/moz.build new file mode 100644 index 0000000000..8a86e055da --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/moz.build @@ -0,0 +1,30 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + +UNIFIED_SOURCES += [ + "bar.cxx", + "foo.cpp", + "quux.cc", +] + +UNIFIED_SOURCES += [ + "objc1.mm", + "objc2.mm", +] + +UNIFIED_SOURCES += [ + "c1.c", + "c2.c", +] + +FILES_PER_UNIFIED_FILE = 32 diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc1.mm b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc1.mm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc2.mm b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc2.mm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/quux.cc b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/quux.cc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/use-nasm/moz.build b/python/mozbuild/mozbuild/test/frontend/data/use-nasm/moz.build new file mode 100644 index 0000000000..63ac5283f6 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/use-nasm/moz.build @@ -0,0 +1,15 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + LIBRARY_NAME = name + + +Library("dummy") + +USE_NASM = True + +SOURCES += ["test1.S"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/use-nasm/test1.S b/python/mozbuild/mozbuild/test/frontend/data/use-nasm/test1.S new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/bans.S b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/bans.S new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/baz.def b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/baz.def new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/moz.build b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/moz.build new file mode 100644 index 0000000000..d080b00c92 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/moz.build @@ -0,0 +1,13 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +DIST_INSTALL = False + +DELAYLOAD_DLLS = ["foo.dll", "bar.dll"] + +RCFILE = "foo.rc" +RCINCLUDE = "bar.rc" +DEFFILE = "baz.def" + +WIN32_EXE_LDFLAGS += ["-subsystem:console"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.c b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.cpp b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.mm b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.mm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.c b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.cpp b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.mm b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.mm new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/visibility-flags/moz.build b/python/mozbuild/mozbuild/test/frontend/data/visibility-flags/moz.build new file mode 100644 index 0000000000..630a3afd80 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/visibility-flags/moz.build @@ -0,0 +1,21 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + + +@template +def Library(name): + """Template for libraries.""" + LIBRARY_NAME = name + + +Library("dummy") + + +@template +def NoVisibilityFlags(): + COMPILE_FLAGS["VISIBILITY"] = [] + + +UNIFIED_SOURCES += ["test1.c"] + +NoVisibilityFlags() diff --git a/python/mozbuild/mozbuild/test/frontend/data/visibility-flags/test1.c b/python/mozbuild/mozbuild/test/frontend/data/visibility-flags/test1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/wasm-compile-flags/moz.build b/python/mozbuild/mozbuild/test/frontend/data/wasm-compile-flags/moz.build new file mode 100644 index 0000000000..e7cf13088f --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/wasm-compile-flags/moz.build @@ -0,0 +1,14 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +SANDBOXED_WASM_LIBRARY_NAME = "dummy" + +WASM_SOURCES += ["test1.c"] + +value = "xyz" +WASM_DEFINES["FOO"] = True +WASM_DEFINES["BAZ"] = '"abcd"' +WASM_DEFINES["BAR"] = 7 +WASM_DEFINES["VALUE"] = value +WASM_DEFINES["QUX"] = False +WASM_CFLAGS += ["-funroll-loops", "-wasm-arg"] diff --git a/python/mozbuild/mozbuild/test/frontend/data/wasm-compile-flags/test1.c b/python/mozbuild/mozbuild/test/frontend/data/wasm-compile-flags/test1.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/a.cpp b/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/a.cpp new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/b.cc b/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/b.cc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/c.cxx b/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/c.cxx new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/d.c b/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/d.c new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/moz.build new file mode 100644 index 0000000000..e266bcb0dd --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/moz.build @@ -0,0 +1,15 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +SANDBOXED_WASM_LIBRARY_NAME = "wasmSources" + +WASM_SOURCES += [ + "a.cpp", + "b.cc", + "c.cxx", +] + +WASM_SOURCES += [ + "d.c", +] diff --git a/python/mozbuild/mozbuild/test/frontend/data/xpidl-module-no-sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/xpidl-module-no-sources/moz.build new file mode 100644 index 0000000000..f0abd45382 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/data/xpidl-module-no-sources/moz.build @@ -0,0 +1,5 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +XPIDL_MODULE = "xpidl_module" diff --git a/python/mozbuild/mozbuild/test/frontend/test_context.py b/python/mozbuild/mozbuild/test/frontend/test_context.py new file mode 100644 index 0000000000..fbf35e1c8c --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/test_context.py @@ -0,0 +1,736 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import unittest + +import six +from mozpack import path as mozpath +from mozunit import main + +from mozbuild.frontend.context import ( + FUNCTIONS, + SPECIAL_VARIABLES, + SUBCONTEXTS, + VARIABLES, + AbsolutePath, + Context, + ContextDerivedTypedHierarchicalStringList, + ContextDerivedTypedList, + ContextDerivedTypedListWithItems, + ContextDerivedTypedRecord, + Files, + ObjDirPath, + Path, + SourcePath, +) +from mozbuild.util import StrictOrderingOnAppendListWithFlagsFactory + + +class TestContext(unittest.TestCase): + def test_defaults(self): + test = Context( + { + "foo": (int, int, ""), + "bar": (bool, bool, ""), + "baz": (dict, dict, ""), + } + ) + + self.assertEqual(list(test), []) + + self.assertEqual(test["foo"], 0) + + self.assertEqual(set(test.keys()), {"foo"}) + + self.assertEqual(test["bar"], False) + + self.assertEqual(set(test.keys()), {"foo", "bar"}) + + self.assertEqual(test["baz"], {}) + + self.assertEqual(set(test.keys()), {"foo", "bar", "baz"}) + + with self.assertRaises(KeyError): + test["qux"] + + self.assertEqual(set(test.keys()), {"foo", "bar", "baz"}) + + def test_type_check(self): + test = Context( + { + "foo": (int, int, ""), + "baz": (dict, list, ""), + } + ) + + test["foo"] = 5 + + self.assertEqual(test["foo"], 5) + + with self.assertRaises(ValueError): + test["foo"] = {} + + self.assertEqual(test["foo"], 5) + + with self.assertRaises(KeyError): + test["bar"] = True + + test["baz"] = [("a", 1), ("b", 2)] + + self.assertEqual(test["baz"], {"a": 1, "b": 2}) + + def test_update(self): + test = Context( + { + "foo": (int, int, ""), + "bar": (bool, bool, ""), + "baz": (dict, list, ""), + } + ) + + self.assertEqual(list(test), []) + + with self.assertRaises(ValueError): + test.update(bar=True, foo={}) + + self.assertEqual(list(test), []) + + test.update(bar=True, foo=1) + + self.assertEqual(set(test.keys()), {"foo", "bar"}) + self.assertEqual(test["foo"], 1) + self.assertEqual(test["bar"], True) + + test.update([("bar", False), ("foo", 2)]) + self.assertEqual(test["foo"], 2) + self.assertEqual(test["bar"], False) + + test.update([("foo", 0), ("baz", {"a": 1, "b": 2})]) + self.assertEqual(test["foo"], 0) + self.assertEqual(test["baz"], {"a": 1, "b": 2}) + + test.update([("foo", 42), ("baz", [("c", 3), ("d", 4)])]) + self.assertEqual(test["foo"], 42) + self.assertEqual(test["baz"], {"c": 3, "d": 4}) + + def test_context_paths(self): + test = Context() + + # Newly created context has no paths. + self.assertIsNone(test.main_path) + self.assertIsNone(test.current_path) + self.assertEqual(test.all_paths, set()) + self.assertEqual(test.source_stack, []) + + foo = os.path.abspath("foo") + test.add_source(foo) + + # Adding the first source makes it the main and current path. + self.assertEqual(test.main_path, foo) + self.assertEqual(test.current_path, foo) + self.assertEqual(test.all_paths, set([foo])) + self.assertEqual(test.source_stack, [foo]) + + bar = os.path.abspath("bar") + test.add_source(bar) + + # Adding the second source makes leaves main and current paths alone. + self.assertEqual(test.main_path, foo) + self.assertEqual(test.current_path, foo) + self.assertEqual(test.all_paths, set([bar, foo])) + self.assertEqual(test.source_stack, [foo]) + + qux = os.path.abspath("qux") + test.push_source(qux) + + # Pushing a source makes it the current path + self.assertEqual(test.main_path, foo) + self.assertEqual(test.current_path, qux) + self.assertEqual(test.all_paths, set([bar, foo, qux])) + self.assertEqual(test.source_stack, [foo, qux]) + + hoge = os.path.abspath("hoge") + test.push_source(hoge) + self.assertEqual(test.main_path, foo) + self.assertEqual(test.current_path, hoge) + self.assertEqual(test.all_paths, set([bar, foo, hoge, qux])) + self.assertEqual(test.source_stack, [foo, qux, hoge]) + + fuga = os.path.abspath("fuga") + + # Adding a source after pushing doesn't change the source stack + test.add_source(fuga) + self.assertEqual(test.main_path, foo) + self.assertEqual(test.current_path, hoge) + self.assertEqual(test.all_paths, set([bar, foo, fuga, hoge, qux])) + self.assertEqual(test.source_stack, [foo, qux, hoge]) + + # Adding a source twice doesn't change anything + test.add_source(qux) + self.assertEqual(test.main_path, foo) + self.assertEqual(test.current_path, hoge) + self.assertEqual(test.all_paths, set([bar, foo, fuga, hoge, qux])) + self.assertEqual(test.source_stack, [foo, qux, hoge]) + + last = test.pop_source() + + # Popping a source returns the last pushed one, not the last added one. + self.assertEqual(last, hoge) + self.assertEqual(test.main_path, foo) + self.assertEqual(test.current_path, qux) + self.assertEqual(test.all_paths, set([bar, foo, fuga, hoge, qux])) + self.assertEqual(test.source_stack, [foo, qux]) + + last = test.pop_source() + self.assertEqual(last, qux) + self.assertEqual(test.main_path, foo) + self.assertEqual(test.current_path, foo) + self.assertEqual(test.all_paths, set([bar, foo, fuga, hoge, qux])) + self.assertEqual(test.source_stack, [foo]) + + # Popping the main path is allowed. + last = test.pop_source() + self.assertEqual(last, foo) + self.assertEqual(test.main_path, foo) + self.assertIsNone(test.current_path) + self.assertEqual(test.all_paths, set([bar, foo, fuga, hoge, qux])) + self.assertEqual(test.source_stack, []) + + # Popping past the main path asserts. + with self.assertRaises(AssertionError): + test.pop_source() + + # Pushing after the main path was popped asserts. + with self.assertRaises(AssertionError): + test.push_source(foo) + + test = Context() + test.push_source(foo) + test.push_source(bar) + + # Pushing the same file twice is allowed. + test.push_source(bar) + test.push_source(foo) + self.assertEqual(last, foo) + self.assertEqual(test.main_path, foo) + self.assertEqual(test.current_path, foo) + self.assertEqual(test.all_paths, set([bar, foo])) + self.assertEqual(test.source_stack, [foo, bar, bar, foo]) + + def test_context_dirs(self): + class Config(object): + pass + + config = Config() + config.topsrcdir = mozpath.abspath(os.curdir) + config.topobjdir = mozpath.abspath("obj") + test = Context(config=config) + foo = mozpath.abspath("foo") + test.push_source(foo) + + self.assertEqual(test.srcdir, config.topsrcdir) + self.assertEqual(test.relsrcdir, "") + self.assertEqual(test.objdir, config.topobjdir) + self.assertEqual(test.relobjdir, "") + + foobar = os.path.abspath("foo/bar") + test.push_source(foobar) + self.assertEqual(test.srcdir, mozpath.join(config.topsrcdir, "foo")) + self.assertEqual(test.relsrcdir, "foo") + self.assertEqual(test.objdir, config.topobjdir) + self.assertEqual(test.relobjdir, "") + + +class TestSymbols(unittest.TestCase): + def _verify_doc(self, doc): + # Documentation should be of the format: + # """SUMMARY LINE + # + # EXTRA PARAGRAPHS + # """ + + self.assertNotIn("\r", doc) + + lines = doc.split("\n") + + # No trailing whitespace. + for line in lines[0:-1]: + self.assertEqual(line, line.rstrip()) + + self.assertGreater(len(lines), 0) + self.assertGreater(len(lines[0].strip()), 0) + + # Last line should be empty. + self.assertEqual(lines[-1].strip(), "") + + def test_documentation_formatting(self): + for typ, inp, doc in VARIABLES.values(): + self._verify_doc(doc) + + for attr, args, doc in FUNCTIONS.values(): + self._verify_doc(doc) + + for func, typ, doc in SPECIAL_VARIABLES.values(): + self._verify_doc(doc) + + for name, cls in SUBCONTEXTS.items(): + self._verify_doc(cls.__doc__) + + for name, v in cls.VARIABLES.items(): + self._verify_doc(v[2]) + + +class TestPaths(unittest.TestCase): + @classmethod + def setUpClass(cls): + class Config(object): + pass + + cls.config = config = Config() + config.topsrcdir = mozpath.abspath(os.curdir) + config.topobjdir = mozpath.abspath("obj") + + def test_path(self): + config = self.config + ctxt1 = Context(config=config) + ctxt1.push_source(mozpath.join(config.topsrcdir, "foo", "moz.build")) + ctxt2 = Context(config=config) + ctxt2.push_source(mozpath.join(config.topsrcdir, "bar", "moz.build")) + + path1 = Path(ctxt1, "qux") + self.assertIsInstance(path1, SourcePath) + self.assertEqual(path1, "qux") + self.assertEqual(path1.full_path, mozpath.join(config.topsrcdir, "foo", "qux")) + + path2 = Path(ctxt2, "../foo/qux") + self.assertIsInstance(path2, SourcePath) + self.assertEqual(path2, "../foo/qux") + self.assertEqual(path2.full_path, mozpath.join(config.topsrcdir, "foo", "qux")) + + self.assertEqual(path1, path2) + + self.assertEqual( + path1.join("../../bar/qux").full_path, + mozpath.join(config.topsrcdir, "bar", "qux"), + ) + + path1 = Path(ctxt1, "/qux/qux") + self.assertIsInstance(path1, SourcePath) + self.assertEqual(path1, "/qux/qux") + self.assertEqual(path1.full_path, mozpath.join(config.topsrcdir, "qux", "qux")) + + path2 = Path(ctxt2, "/qux/qux") + self.assertIsInstance(path2, SourcePath) + self.assertEqual(path2, "/qux/qux") + self.assertEqual(path2.full_path, mozpath.join(config.topsrcdir, "qux", "qux")) + + self.assertEqual(path1, path2) + + path1 = Path(ctxt1, "!qux") + self.assertIsInstance(path1, ObjDirPath) + self.assertEqual(path1, "!qux") + self.assertEqual(path1.full_path, mozpath.join(config.topobjdir, "foo", "qux")) + + path2 = Path(ctxt2, "!../foo/qux") + self.assertIsInstance(path2, ObjDirPath) + self.assertEqual(path2, "!../foo/qux") + self.assertEqual(path2.full_path, mozpath.join(config.topobjdir, "foo", "qux")) + + self.assertEqual(path1, path2) + + path1 = Path(ctxt1, "!/qux/qux") + self.assertIsInstance(path1, ObjDirPath) + self.assertEqual(path1, "!/qux/qux") + self.assertEqual(path1.full_path, mozpath.join(config.topobjdir, "qux", "qux")) + + path2 = Path(ctxt2, "!/qux/qux") + self.assertIsInstance(path2, ObjDirPath) + self.assertEqual(path2, "!/qux/qux") + self.assertEqual(path2.full_path, mozpath.join(config.topobjdir, "qux", "qux")) + + self.assertEqual(path1, path2) + + path1 = Path(ctxt1, path1) + self.assertIsInstance(path1, ObjDirPath) + self.assertEqual(path1, "!/qux/qux") + self.assertEqual(path1.full_path, mozpath.join(config.topobjdir, "qux", "qux")) + + path2 = Path(ctxt2, path2) + self.assertIsInstance(path2, ObjDirPath) + self.assertEqual(path2, "!/qux/qux") + self.assertEqual(path2.full_path, mozpath.join(config.topobjdir, "qux", "qux")) + + self.assertEqual(path1, path2) + + path1 = Path(path1) + self.assertIsInstance(path1, ObjDirPath) + self.assertEqual(path1, "!/qux/qux") + self.assertEqual(path1.full_path, mozpath.join(config.topobjdir, "qux", "qux")) + + self.assertEqual(path1, path2) + + path2 = Path(path2) + self.assertIsInstance(path2, ObjDirPath) + self.assertEqual(path2, "!/qux/qux") + self.assertEqual(path2.full_path, mozpath.join(config.topobjdir, "qux", "qux")) + + self.assertEqual(path1, path2) + + def test_source_path(self): + config = self.config + ctxt = Context(config=config) + ctxt.push_source(mozpath.join(config.topsrcdir, "foo", "moz.build")) + + path = SourcePath(ctxt, "qux") + self.assertEqual(path, "qux") + self.assertEqual(path.full_path, mozpath.join(config.topsrcdir, "foo", "qux")) + self.assertEqual(path.translated, mozpath.join(config.topobjdir, "foo", "qux")) + + path = SourcePath(ctxt, "../bar/qux") + self.assertEqual(path, "../bar/qux") + self.assertEqual(path.full_path, mozpath.join(config.topsrcdir, "bar", "qux")) + self.assertEqual(path.translated, mozpath.join(config.topobjdir, "bar", "qux")) + + path = SourcePath(ctxt, "/qux/qux") + self.assertEqual(path, "/qux/qux") + self.assertEqual(path.full_path, mozpath.join(config.topsrcdir, "qux", "qux")) + self.assertEqual(path.translated, mozpath.join(config.topobjdir, "qux", "qux")) + + with self.assertRaises(ValueError): + SourcePath(ctxt, "!../bar/qux") + + with self.assertRaises(ValueError): + SourcePath(ctxt, "!/qux/qux") + + path = SourcePath(path) + self.assertIsInstance(path, SourcePath) + self.assertEqual(path, "/qux/qux") + self.assertEqual(path.full_path, mozpath.join(config.topsrcdir, "qux", "qux")) + self.assertEqual(path.translated, mozpath.join(config.topobjdir, "qux", "qux")) + + path = Path(path) + self.assertIsInstance(path, SourcePath) + + def test_objdir_path(self): + config = self.config + ctxt = Context(config=config) + ctxt.push_source(mozpath.join(config.topsrcdir, "foo", "moz.build")) + + path = ObjDirPath(ctxt, "!qux") + self.assertEqual(path, "!qux") + self.assertEqual(path.full_path, mozpath.join(config.topobjdir, "foo", "qux")) + + path = ObjDirPath(ctxt, "!../bar/qux") + self.assertEqual(path, "!../bar/qux") + self.assertEqual(path.full_path, mozpath.join(config.topobjdir, "bar", "qux")) + + path = ObjDirPath(ctxt, "!/qux/qux") + self.assertEqual(path, "!/qux/qux") + self.assertEqual(path.full_path, mozpath.join(config.topobjdir, "qux", "qux")) + + with self.assertRaises(ValueError): + path = ObjDirPath(ctxt, "../bar/qux") + + with self.assertRaises(ValueError): + path = ObjDirPath(ctxt, "/qux/qux") + + path = ObjDirPath(path) + self.assertIsInstance(path, ObjDirPath) + self.assertEqual(path, "!/qux/qux") + self.assertEqual(path.full_path, mozpath.join(config.topobjdir, "qux", "qux")) + + path = Path(path) + self.assertIsInstance(path, ObjDirPath) + + def test_absolute_path(self): + config = self.config + ctxt = Context(config=config) + ctxt.push_source(mozpath.join(config.topsrcdir, "foo", "moz.build")) + + path = AbsolutePath(ctxt, "%/qux") + self.assertEqual(path, "%/qux") + self.assertEqual(path.full_path, "/qux") + + with self.assertRaises(ValueError): + path = AbsolutePath(ctxt, "%qux") + + def test_path_with_mixed_contexts(self): + config = self.config + ctxt1 = Context(config=config) + ctxt1.push_source(mozpath.join(config.topsrcdir, "foo", "moz.build")) + ctxt2 = Context(config=config) + ctxt2.push_source(mozpath.join(config.topsrcdir, "bar", "moz.build")) + + path1 = Path(ctxt1, "qux") + path2 = Path(ctxt2, path1) + self.assertEqual(path2, path1) + self.assertEqual(path2, "qux") + self.assertEqual(path2.context, ctxt1) + self.assertEqual(path2.full_path, mozpath.join(config.topsrcdir, "foo", "qux")) + + path1 = Path(ctxt1, "../bar/qux") + path2 = Path(ctxt2, path1) + self.assertEqual(path2, path1) + self.assertEqual(path2, "../bar/qux") + self.assertEqual(path2.context, ctxt1) + self.assertEqual(path2.full_path, mozpath.join(config.topsrcdir, "bar", "qux")) + + path1 = Path(ctxt1, "/qux/qux") + path2 = Path(ctxt2, path1) + self.assertEqual(path2, path1) + self.assertEqual(path2, "/qux/qux") + self.assertEqual(path2.context, ctxt1) + self.assertEqual(path2.full_path, mozpath.join(config.topsrcdir, "qux", "qux")) + + path1 = Path(ctxt1, "!qux") + path2 = Path(ctxt2, path1) + self.assertEqual(path2, path1) + self.assertEqual(path2, "!qux") + self.assertEqual(path2.context, ctxt1) + self.assertEqual(path2.full_path, mozpath.join(config.topobjdir, "foo", "qux")) + + path1 = Path(ctxt1, "!../bar/qux") + path2 = Path(ctxt2, path1) + self.assertEqual(path2, path1) + self.assertEqual(path2, "!../bar/qux") + self.assertEqual(path2.context, ctxt1) + self.assertEqual(path2.full_path, mozpath.join(config.topobjdir, "bar", "qux")) + + path1 = Path(ctxt1, "!/qux/qux") + path2 = Path(ctxt2, path1) + self.assertEqual(path2, path1) + self.assertEqual(path2, "!/qux/qux") + self.assertEqual(path2.context, ctxt1) + self.assertEqual(path2.full_path, mozpath.join(config.topobjdir, "qux", "qux")) + + def test_path_typed_list(self): + config = self.config + ctxt1 = Context(config=config) + ctxt1.push_source(mozpath.join(config.topsrcdir, "foo", "moz.build")) + ctxt2 = Context(config=config) + ctxt2.push_source(mozpath.join(config.topsrcdir, "bar", "moz.build")) + + paths = [ + "!../bar/qux", + "!/qux/qux", + "!qux", + "../bar/qux", + "/qux/qux", + "qux", + ] + + MyList = ContextDerivedTypedList(Path) + l = MyList(ctxt1) + l += paths + + for p_str, p_path in zip(paths, l): + self.assertEqual(p_str, p_path) + self.assertEqual(p_path, Path(ctxt1, p_str)) + self.assertEqual( + p_path.join("foo"), Path(ctxt1, mozpath.join(p_str, "foo")) + ) + + l2 = MyList(ctxt2) + l2 += paths + + for p_str, p_path in zip(paths, l2): + self.assertEqual(p_str, p_path) + self.assertEqual(p_path, Path(ctxt2, p_str)) + + # Assigning with Paths from another context doesn't rebase them + l2 = MyList(ctxt2) + l2 += l + + for p_str, p_path in zip(paths, l2): + self.assertEqual(p_str, p_path) + self.assertEqual(p_path, Path(ctxt1, p_str)) + + MyListWithFlags = ContextDerivedTypedListWithItems( + Path, + StrictOrderingOnAppendListWithFlagsFactory( + { + "foo": bool, + } + ), + ) + l = MyListWithFlags(ctxt1) + l += paths + + for p in paths: + l[p].foo = True + + for p_str, p_path in zip(paths, l): + self.assertEqual(p_str, p_path) + self.assertEqual(p_path, Path(ctxt1, p_str)) + self.assertEqual(l[p_str].foo, True) + self.assertEqual(l[p_path].foo, True) + + def test_path_typed_hierarchy_list(self): + config = self.config + ctxt1 = Context(config=config) + ctxt1.push_source(mozpath.join(config.topsrcdir, "foo", "moz.build")) + ctxt2 = Context(config=config) + ctxt2.push_source(mozpath.join(config.topsrcdir, "bar", "moz.build")) + + paths = [ + "!../bar/qux", + "!/qux/qux", + "!qux", + "../bar/qux", + "/qux/qux", + "qux", + ] + + MyList = ContextDerivedTypedHierarchicalStringList(Path) + l = MyList(ctxt1) + l += paths + l.subdir += paths + + for _, files in l.walk(): + for p_str, p_path in zip(paths, files): + self.assertEqual(p_str, p_path) + self.assertEqual(p_path, Path(ctxt1, p_str)) + self.assertEqual( + p_path.join("foo"), Path(ctxt1, mozpath.join(p_str, "foo")) + ) + + l2 = MyList(ctxt2) + l2 += paths + l2.subdir += paths + + for _, files in l2.walk(): + for p_str, p_path in zip(paths, files): + self.assertEqual(p_str, p_path) + self.assertEqual(p_path, Path(ctxt2, p_str)) + + # Assigning with Paths from another context doesn't rebase them + l2 = MyList(ctxt2) + l2 += l + + for _, files in l2.walk(): + for p_str, p_path in zip(paths, files): + self.assertEqual(p_str, p_path) + self.assertEqual(p_path, Path(ctxt1, p_str)) + + +class TestTypedRecord(unittest.TestCase): + def test_fields(self): + T = ContextDerivedTypedRecord(("field1", six.text_type), ("field2", list)) + inst = T(None) + self.assertEqual(inst.field1, "") + self.assertEqual(inst.field2, []) + + inst.field1 = "foo" + inst.field2 += ["bar"] + + self.assertEqual(inst.field1, "foo") + self.assertEqual(inst.field2, ["bar"]) + + with self.assertRaises(AttributeError): + inst.field3 = [] + + def test_coercion(self): + T = ContextDerivedTypedRecord(("field1", six.text_type), ("field2", list)) + inst = T(None) + inst.field1 = 3 + inst.field2 += ("bar",) + self.assertEqual(inst.field1, "3") + self.assertEqual(inst.field2, ["bar"]) + + with self.assertRaises(TypeError): + inst.field2 = object() + + +class TestFiles(unittest.TestCase): + def test_aggregate_empty(self): + c = Context({}) + + files = {"moz.build": Files(c, "**")} + + self.assertEqual( + Files.aggregate(files), + { + "bug_component_counts": [], + "recommended_bug_component": None, + }, + ) + + def test_single_bug_component(self): + c = Context({}) + f = Files(c, "**") + f["BUG_COMPONENT"] = ("Product1", "Component1") + + files = {"moz.build": f} + self.assertEqual( + Files.aggregate(files), + { + "bug_component_counts": [(("Product1", "Component1"), 1)], + "recommended_bug_component": ("Product1", "Component1"), + }, + ) + + def test_multiple_bug_components(self): + c = Context({}) + f1 = Files(c, "**") + f1["BUG_COMPONENT"] = ("Product1", "Component1") + + f2 = Files(c, "**") + f2["BUG_COMPONENT"] = ("Product2", "Component2") + + files = {"a": f1, "b": f2, "c": f1} + self.assertEqual( + Files.aggregate(files), + { + "bug_component_counts": [ + (("Product1", "Component1"), 2), + (("Product2", "Component2"), 1), + ], + "recommended_bug_component": ("Product1", "Component1"), + }, + ) + + def test_no_recommended_bug_component(self): + """If there is no clear count winner, we don't recommend a bug component.""" + c = Context({}) + f1 = Files(c, "**") + f1["BUG_COMPONENT"] = ("Product1", "Component1") + + f2 = Files(c, "**") + f2["BUG_COMPONENT"] = ("Product2", "Component2") + + files = {"a": f1, "b": f2} + self.assertEqual( + Files.aggregate(files), + { + "bug_component_counts": [ + (("Product1", "Component1"), 1), + (("Product2", "Component2"), 1), + ], + "recommended_bug_component": None, + }, + ) + + def test_multiple_patterns(self): + c = Context({}) + f1 = Files(c, "a/**") + f1["BUG_COMPONENT"] = ("Product1", "Component1") + f2 = Files(c, "b/**", "a/bar") + f2["BUG_COMPONENT"] = ("Product2", "Component2") + + files = {"a/foo": f1, "a/bar": f2, "b/foo": f2} + self.assertEqual( + Files.aggregate(files), + { + "bug_component_counts": [ + (("Product2", "Component2"), 2), + (("Product1", "Component1"), 1), + ], + "recommended_bug_component": ("Product2", "Component2"), + }, + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/frontend/test_emitter.py b/python/mozbuild/mozbuild/test/frontend/test_emitter.py new file mode 100644 index 0000000000..4bbab3942a --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/test_emitter.py @@ -0,0 +1,1877 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import unittest + +import mozpack.path as mozpath +import six +from mozunit import main + +from mozbuild.frontend.context import ObjDirPath, Path +from mozbuild.frontend.data import ( + ComputedFlags, + ConfigFileSubstitution, + Defines, + DirectoryTraversal, + Exports, + FinalTargetPreprocessedFiles, + GeneratedFile, + HostProgram, + HostRustLibrary, + HostRustProgram, + HostSources, + IPDLCollection, + JARManifest, + LocalInclude, + LocalizedFiles, + LocalizedPreprocessedFiles, + Program, + RustLibrary, + RustProgram, + SharedLibrary, + SimpleProgram, + Sources, + StaticLibrary, + TestHarnessFiles, + TestManifest, + UnifiedSources, + VariablePassthru, + WasmSources, +) +from mozbuild.frontend.emitter import TreeMetadataEmitter +from mozbuild.frontend.reader import ( + BuildReader, + BuildReaderError, + SandboxValidationError, +) +from mozbuild.test.common import MockConfig + +data_path = mozpath.abspath(mozpath.dirname(__file__)) +data_path = mozpath.join(data_path, "data") + + +class TestEmitterBasic(unittest.TestCase): + def setUp(self): + self._old_env = dict(os.environ) + os.environ.pop("MOZ_OBJDIR", None) + + def tearDown(self): + os.environ.clear() + os.environ.update(self._old_env) + + def reader(self, name, enable_tests=False, extra_substs=None): + substs = dict( + ENABLE_TESTS="1" if enable_tests else "", + BIN_SUFFIX=".prog", + HOST_BIN_SUFFIX=".hostprog", + OS_TARGET="WINNT", + COMPILE_ENVIRONMENT="1", + STL_FLAGS=["-I/path/to/topobjdir/dist/stl_wrappers"], + VISIBILITY_FLAGS=["-include", "$(topsrcdir)/config/gcc_hidden.h"], + OBJ_SUFFIX="obj", + WASM_OBJ_SUFFIX="wasm", + WASM_CFLAGS=["-foo"], + ) + if extra_substs: + substs.update(extra_substs) + config = MockConfig(mozpath.join(data_path, name), extra_substs=substs) + + return BuildReader(config) + + def read_topsrcdir(self, reader, filter_common=True): + emitter = TreeMetadataEmitter(reader.config) + objs = list(emitter.emit(reader.read_topsrcdir())) + self.assertGreater(len(objs), 0) + + filtered = [] + for obj in objs: + if filter_common and isinstance(obj, DirectoryTraversal): + continue + + filtered.append(obj) + + return filtered + + def test_dirs_traversal_simple(self): + reader = self.reader("traversal-simple") + objs = self.read_topsrcdir(reader, filter_common=False) + self.assertEqual(len(objs), 4) + + for o in objs: + self.assertIsInstance(o, DirectoryTraversal) + self.assertTrue(os.path.isabs(o.context_main_path)) + self.assertEqual(len(o.context_all_paths), 1) + + reldirs = [o.relsrcdir for o in objs] + self.assertEqual(reldirs, ["", "foo", "foo/biz", "bar"]) + + dirs = [[d.full_path for d in o.dirs] for o in objs] + self.assertEqual( + dirs, + [ + [ + mozpath.join(reader.config.topsrcdir, "foo"), + mozpath.join(reader.config.topsrcdir, "bar"), + ], + [mozpath.join(reader.config.topsrcdir, "foo", "biz")], + [], + [], + ], + ) + + def test_traversal_all_vars(self): + reader = self.reader("traversal-all-vars") + objs = self.read_topsrcdir(reader, filter_common=False) + self.assertEqual(len(objs), 2) + + for o in objs: + self.assertIsInstance(o, DirectoryTraversal) + + reldirs = set([o.relsrcdir for o in objs]) + self.assertEqual(reldirs, set(["", "regular"])) + + for o in objs: + reldir = o.relsrcdir + + if reldir == "": + self.assertEqual( + [d.full_path for d in o.dirs], + [mozpath.join(reader.config.topsrcdir, "regular")], + ) + + def test_traversal_all_vars_enable_tests(self): + reader = self.reader("traversal-all-vars", enable_tests=True) + objs = self.read_topsrcdir(reader, filter_common=False) + self.assertEqual(len(objs), 3) + + for o in objs: + self.assertIsInstance(o, DirectoryTraversal) + + reldirs = set([o.relsrcdir for o in objs]) + self.assertEqual(reldirs, set(["", "regular", "test"])) + + for o in objs: + reldir = o.relsrcdir + + if reldir == "": + self.assertEqual( + [d.full_path for d in o.dirs], + [ + mozpath.join(reader.config.topsrcdir, "regular"), + mozpath.join(reader.config.topsrcdir, "test"), + ], + ) + + def test_config_file_substitution(self): + reader = self.reader("config-file-substitution") + objs = self.read_topsrcdir(reader) + self.assertEqual(len(objs), 2) + + self.assertIsInstance(objs[0], ConfigFileSubstitution) + self.assertIsInstance(objs[1], ConfigFileSubstitution) + + topobjdir = mozpath.abspath(reader.config.topobjdir) + self.assertEqual(objs[0].relpath, "foo") + self.assertEqual( + mozpath.normpath(objs[0].output_path), + mozpath.normpath(mozpath.join(topobjdir, "foo")), + ) + self.assertEqual( + mozpath.normpath(objs[1].output_path), + mozpath.normpath(mozpath.join(topobjdir, "bar")), + ) + + def test_variable_passthru(self): + reader = self.reader("variable-passthru") + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 1) + self.assertIsInstance(objs[0], VariablePassthru) + + wanted = { + "NO_DIST_INSTALL": True, + "RCFILE": "foo.rc", + "RCINCLUDE": "bar.rc", + "WIN32_EXE_LDFLAGS": ["-subsystem:console"], + } + + variables = objs[0].variables + maxDiff = self.maxDiff + self.maxDiff = None + self.assertEqual(wanted, variables) + self.maxDiff = maxDiff + + def test_compile_flags(self): + reader = self.reader( + "compile-flags", extra_substs={"WARNINGS_AS_ERRORS": "-Werror"} + ) + sources, ldflags, lib, flags = self.read_topsrcdir(reader) + self.assertIsInstance(flags, ComputedFlags) + self.assertEqual(flags.flags["STL"], reader.config.substs["STL_FLAGS"]) + self.assertEqual( + flags.flags["VISIBILITY"], reader.config.substs["VISIBILITY_FLAGS"] + ) + self.assertEqual(flags.flags["WARNINGS_AS_ERRORS"], ["-Werror"]) + self.assertEqual(flags.flags["MOZBUILD_CFLAGS"], ["-Wall", "-funroll-loops"]) + self.assertEqual(flags.flags["MOZBUILD_CXXFLAGS"], ["-funroll-loops", "-Wall"]) + + def test_asflags(self): + reader = self.reader("asflags", extra_substs={"ASFLAGS": ["-safeseh"]}) + as_sources, sources, ldflags, lib, flags, asflags = self.read_topsrcdir(reader) + self.assertIsInstance(asflags, ComputedFlags) + self.assertEqual(asflags.flags["OS"], reader.config.substs["ASFLAGS"]) + self.assertEqual(asflags.flags["MOZBUILD"], ["-no-integrated-as"]) + + def test_debug_flags(self): + reader = self.reader( + "compile-flags", + extra_substs={"MOZ_DEBUG_FLAGS": "-g", "MOZ_DEBUG_SYMBOLS": "1"}, + ) + sources, ldflags, lib, flags = self.read_topsrcdir(reader) + self.assertIsInstance(flags, ComputedFlags) + self.assertEqual(flags.flags["DEBUG"], ["-g"]) + + def test_disable_debug_flags(self): + reader = self.reader( + "compile-flags", + extra_substs={"MOZ_DEBUG_FLAGS": "-g", "MOZ_DEBUG_SYMBOLS": ""}, + ) + sources, ldflags, lib, flags = self.read_topsrcdir(reader) + self.assertIsInstance(flags, ComputedFlags) + self.assertEqual(flags.flags["DEBUG"], []) + + def test_link_flags(self): + reader = self.reader( + "link-flags", + extra_substs={ + "OS_LDFLAGS": ["-Wl,rpath-link=/usr/lib"], + "MOZ_OPTIMIZE": "", + "MOZ_OPTIMIZE_LDFLAGS": ["-Wl,-dead_strip"], + "MOZ_DEBUG_LDFLAGS": ["-framework ExceptionHandling"], + }, + ) + sources, ldflags, lib, compile_flags = self.read_topsrcdir(reader) + self.assertIsInstance(ldflags, ComputedFlags) + self.assertEqual(ldflags.flags["OS"], reader.config.substs["OS_LDFLAGS"]) + self.assertEqual( + ldflags.flags["MOZBUILD"], ["-Wl,-U_foo", "-framework Foo", "-x"] + ) + self.assertEqual(ldflags.flags["OPTIMIZE"], []) + + def test_debug_ldflags(self): + reader = self.reader( + "link-flags", + extra_substs={ + "MOZ_DEBUG_SYMBOLS": "1", + "MOZ_DEBUG_LDFLAGS": ["-framework ExceptionHandling"], + }, + ) + sources, ldflags, lib, compile_flags = self.read_topsrcdir(reader) + self.assertIsInstance(ldflags, ComputedFlags) + self.assertEqual(ldflags.flags["OS"], reader.config.substs["MOZ_DEBUG_LDFLAGS"]) + + def test_windows_opt_link_flags(self): + reader = self.reader( + "link-flags", + extra_substs={ + "OS_ARCH": "WINNT", + "GNU_CC": "", + "MOZ_OPTIMIZE": "1", + "MOZ_DEBUG_LDFLAGS": ["-DEBUG"], + "MOZ_DEBUG_SYMBOLS": "1", + "MOZ_OPTIMIZE_FLAGS": [], + "MOZ_OPTIMIZE_LDFLAGS": [], + }, + ) + sources, ldflags, lib, compile_flags = self.read_topsrcdir(reader) + self.assertIsInstance(ldflags, ComputedFlags) + self.assertIn("-DEBUG", ldflags.flags["OS"]) + self.assertIn("-OPT:REF,ICF", ldflags.flags["OS"]) + + def test_windows_dmd_link_flags(self): + reader = self.reader( + "link-flags", + extra_substs={ + "OS_ARCH": "WINNT", + "GNU_CC": "", + "MOZ_DMD": "1", + "MOZ_DEBUG_LDFLAGS": ["-DEBUG"], + "MOZ_DEBUG_SYMBOLS": "1", + "MOZ_OPTIMIZE": "1", + "MOZ_OPTIMIZE_FLAGS": [], + }, + ) + sources, ldflags, lib, compile_flags = self.read_topsrcdir(reader) + self.assertIsInstance(ldflags, ComputedFlags) + self.assertEqual(ldflags.flags["OS"], ["-DEBUG", "-OPT:REF,ICF"]) + + def test_host_compile_flags(self): + reader = self.reader( + "host-compile-flags", + extra_substs={ + "HOST_CXXFLAGS": ["-Wall", "-Werror"], + "HOST_CFLAGS": ["-Werror", "-Wall"], + }, + ) + sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader) + self.assertIsInstance(flags, ComputedFlags) + self.assertEqual( + flags.flags["HOST_CXXFLAGS"], reader.config.substs["HOST_CXXFLAGS"] + ) + self.assertEqual( + flags.flags["HOST_CFLAGS"], reader.config.substs["HOST_CFLAGS"] + ) + self.assertEqual( + set(flags.flags["HOST_DEFINES"]), + set(["-DFOO", '-DBAZ="abcd"', "-UQUX", "-DBAR=7", "-DVALUE=xyz"]), + ) + self.assertEqual( + flags.flags["MOZBUILD_HOST_CFLAGS"], ["-funroll-loops", "-host-arg"] + ) + self.assertEqual(flags.flags["MOZBUILD_HOST_CXXFLAGS"], []) + + def test_host_no_optimize_flags(self): + reader = self.reader( + "host-compile-flags", + extra_substs={"MOZ_OPTIMIZE": "", "MOZ_OPTIMIZE_FLAGS": ["-O2"]}, + ) + sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader) + self.assertIsInstance(flags, ComputedFlags) + self.assertEqual(flags.flags["HOST_OPTIMIZE"], []) + + def test_host_optimize_flags(self): + reader = self.reader( + "host-compile-flags", + extra_substs={"MOZ_OPTIMIZE": "1", "MOZ_OPTIMIZE_FLAGS": ["-O2"]}, + ) + sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader) + self.assertIsInstance(flags, ComputedFlags) + self.assertEqual(flags.flags["HOST_OPTIMIZE"], ["-O2"]) + + def test_cross_optimize_flags(self): + reader = self.reader( + "host-compile-flags", + extra_substs={ + "MOZ_OPTIMIZE": "1", + "MOZ_OPTIMIZE_FLAGS": ["-O2"], + "HOST_OPTIMIZE_FLAGS": ["-O3"], + "CROSS_COMPILE": "1", + }, + ) + sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader) + self.assertIsInstance(flags, ComputedFlags) + self.assertEqual(flags.flags["HOST_OPTIMIZE"], ["-O3"]) + + def test_host_rtl_flag(self): + reader = self.reader( + "host-compile-flags", extra_substs={"OS_ARCH": "WINNT", "MOZ_DEBUG": "1"} + ) + sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader) + self.assertIsInstance(flags, ComputedFlags) + self.assertEqual(flags.flags["RTL"], ["-MDd"]) + + def test_compile_flags_validation(self): + reader = self.reader("compile-flags-field-validation") + + with six.assertRaisesRegex(self, BuildReaderError, "Invalid value."): + self.read_topsrcdir(reader) + + reader = self.reader("compile-flags-type-validation") + with six.assertRaisesRegex( + self, BuildReaderError, "A list of strings must be provided" + ): + self.read_topsrcdir(reader) + + def test_compile_flags_templates(self): + reader = self.reader( + "compile-flags-templates", + extra_substs={ + "NSPR_CFLAGS": ["-I/nspr/path"], + "NSS_CFLAGS": ["-I/nss/path"], + "MOZ_JPEG_CFLAGS": ["-I/jpeg/path"], + "MOZ_PNG_CFLAGS": ["-I/png/path"], + "MOZ_ZLIB_CFLAGS": ["-I/zlib/path"], + "MOZ_PIXMAN_CFLAGS": ["-I/pixman/path"], + }, + ) + sources, ldflags, lib, flags = self.read_topsrcdir(reader) + self.assertIsInstance(flags, ComputedFlags) + self.assertEqual(flags.flags["STL"], []) + self.assertEqual(flags.flags["VISIBILITY"], []) + self.assertEqual( + flags.flags["OS_INCLUDES"], + [ + "-I/nspr/path", + "-I/nss/path", + "-I/jpeg/path", + "-I/png/path", + "-I/zlib/path", + "-I/pixman/path", + ], + ) + + def test_disable_stl_wrapping(self): + reader = self.reader("disable-stl-wrapping") + sources, ldflags, lib, flags = self.read_topsrcdir(reader) + self.assertIsInstance(flags, ComputedFlags) + self.assertEqual(flags.flags["STL"], []) + + def test_visibility_flags(self): + reader = self.reader("visibility-flags") + sources, ldflags, lib, flags = self.read_topsrcdir(reader) + self.assertIsInstance(flags, ComputedFlags) + self.assertEqual(flags.flags["VISIBILITY"], []) + + def test_defines_in_flags(self): + reader = self.reader("compile-defines") + defines, sources, ldflags, lib, flags = self.read_topsrcdir(reader) + self.assertIsInstance(flags, ComputedFlags) + self.assertEqual( + flags.flags["LIBRARY_DEFINES"], ["-DMOZ_LIBRARY_DEFINE=MOZ_TEST"] + ) + self.assertEqual(flags.flags["DEFINES"], ["-DMOZ_TEST_DEFINE"]) + + def test_resolved_flags_error(self): + reader = self.reader("resolved-flags-error") + with six.assertRaisesRegex( + self, + BuildReaderError, + "`DEFINES` may not be set in COMPILE_FLAGS from moz.build", + ): + self.read_topsrcdir(reader) + + def test_includes_in_flags(self): + reader = self.reader("compile-includes") + defines, sources, ldflags, lib, flags = self.read_topsrcdir(reader) + self.assertIsInstance(flags, ComputedFlags) + self.assertEqual( + flags.flags["BASE_INCLUDES"], + ["-I%s" % reader.config.topsrcdir, "-I%s" % reader.config.topobjdir], + ) + self.assertEqual( + flags.flags["EXTRA_INCLUDES"], + ["-I%s/dist/include" % reader.config.topobjdir], + ) + self.assertEqual( + flags.flags["LOCAL_INCLUDES"], ["-I%s/subdir" % reader.config.topsrcdir] + ) + + def test_allow_compiler_warnings(self): + reader = self.reader( + "allow-compiler-warnings", extra_substs={"WARNINGS_AS_ERRORS": "-Werror"} + ) + sources, ldflags, lib, flags = self.read_topsrcdir(reader) + self.assertEqual(flags.flags["WARNINGS_AS_ERRORS"], []) + + def test_disable_compiler_warnings(self): + reader = self.reader( + "disable-compiler-warnings", extra_substs={"WARNINGS_CFLAGS": "-Wall"} + ) + sources, ldflags, lib, flags = self.read_topsrcdir(reader) + self.assertEqual(flags.flags["WARNINGS_CFLAGS"], []) + + def test_use_nasm(self): + # When nasm is not available, this should raise. + reader = self.reader("use-nasm") + with six.assertRaisesRegex( + self, SandboxValidationError, "nasm is not available" + ): + self.read_topsrcdir(reader) + + # When nasm is available, this should work. + reader = self.reader( + "use-nasm", extra_substs=dict(NASM="nasm", NASM_ASFLAGS="-foo") + ) + + sources, passthru, ldflags, lib, flags, asflags = self.read_topsrcdir(reader) + + self.assertIsInstance(passthru, VariablePassthru) + self.assertIsInstance(ldflags, ComputedFlags) + self.assertIsInstance(flags, ComputedFlags) + self.assertIsInstance(asflags, ComputedFlags) + + self.assertEqual(asflags.flags["OS"], reader.config.substs["NASM_ASFLAGS"]) + + maxDiff = self.maxDiff + self.maxDiff = None + self.assertEqual( + passthru.variables, + {"AS": "nasm", "AS_DASH_C_FLAG": "", "ASOUTOPTION": "-o "}, + ) + self.maxDiff = maxDiff + + def test_generated_files(self): + reader = self.reader("generated-files") + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 3) + for o in objs: + self.assertIsInstance(o, GeneratedFile) + self.assertFalse(o.localized) + self.assertFalse(o.force) + + expected = ["bar.c", "foo.c", ("xpidllex.py", "xpidlyacc.py")] + for o, f in zip(objs, expected): + expected_filename = f if isinstance(f, tuple) else (f,) + self.assertEqual(o.outputs, expected_filename) + self.assertEqual(o.script, None) + self.assertEqual(o.method, None) + self.assertEqual(o.inputs, []) + + def test_generated_files_force(self): + reader = self.reader("generated-files-force") + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 3) + for o in objs: + self.assertIsInstance(o, GeneratedFile) + self.assertEqual(o.force, "bar.c" in o.outputs) + + def test_localized_generated_files(self): + reader = self.reader("localized-generated-files") + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 2) + for o in objs: + self.assertIsInstance(o, GeneratedFile) + self.assertTrue(o.localized) + + expected = ["abc.ini", ("bar", "baz")] + for o, f in zip(objs, expected): + expected_filename = f if isinstance(f, tuple) else (f,) + self.assertEqual(o.outputs, expected_filename) + self.assertEqual(o.script, None) + self.assertEqual(o.method, None) + self.assertEqual(o.inputs, []) + + def test_localized_generated_files_force(self): + reader = self.reader("localized-generated-files-force") + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 2) + for o in objs: + self.assertIsInstance(o, GeneratedFile) + self.assertTrue(o.localized) + self.assertEqual(o.force, "abc.ini" in o.outputs) + + def test_localized_files_from_generated(self): + """Test that using LOCALIZED_GENERATED_FILES and then putting the output in + LOCALIZED_FILES as an objdir path works. + """ + reader = self.reader("localized-files-from-generated") + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 2) + self.assertIsInstance(objs[0], GeneratedFile) + self.assertIsInstance(objs[1], LocalizedFiles) + + def test_localized_files_not_localized_generated(self): + """Test that using GENERATED_FILES and then putting the output in + LOCALIZED_FILES as an objdir path produces an error. + """ + reader = self.reader("localized-files-not-localized-generated") + with six.assertRaisesRegex( + self, + SandboxValidationError, + "Objdir file listed in LOCALIZED_FILES not in LOCALIZED_GENERATED_FILES:", + ): + self.read_topsrcdir(reader) + + def test_localized_generated_files_final_target_files(self): + """Test that using LOCALIZED_GENERATED_FILES and then putting the output in + FINAL_TARGET_FILES as an objdir path produces an error. + """ + reader = self.reader("localized-generated-files-final-target-files") + with six.assertRaisesRegex( + self, + SandboxValidationError, + "Outputs of LOCALIZED_GENERATED_FILES cannot be used in FINAL_TARGET_FILES:", + ): + self.read_topsrcdir(reader) + + def test_generated_files_method_names(self): + reader = self.reader("generated-files-method-names") + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 2) + for o in objs: + self.assertIsInstance(o, GeneratedFile) + + expected = ["bar.c", "foo.c"] + expected_method_names = ["make_bar", "main"] + for o, expected_filename, expected_method in zip( + objs, expected, expected_method_names + ): + self.assertEqual(o.outputs, (expected_filename,)) + self.assertEqual(o.method, expected_method) + self.assertEqual(o.inputs, []) + + def test_generated_files_absolute_script(self): + reader = self.reader("generated-files-absolute-script") + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 1) + + o = objs[0] + self.assertIsInstance(o, GeneratedFile) + self.assertEqual(o.outputs, ("bar.c",)) + self.assertRegex(o.script, "script.py$") + self.assertEqual(o.method, "make_bar") + self.assertEqual(o.inputs, []) + + def test_generated_files_no_script(self): + reader = self.reader("generated-files-no-script") + with six.assertRaisesRegex( + self, SandboxValidationError, "Script for generating bar.c does not exist" + ): + self.read_topsrcdir(reader) + + def test_generated_files_no_inputs(self): + reader = self.reader("generated-files-no-inputs") + with six.assertRaisesRegex( + self, SandboxValidationError, "Input for generating foo.c does not exist" + ): + self.read_topsrcdir(reader) + + def test_generated_files_no_python_script(self): + reader = self.reader("generated-files-no-python-script") + with six.assertRaisesRegex( + self, + SandboxValidationError, + "Script for generating bar.c does not end in .py", + ): + self.read_topsrcdir(reader) + + def test_exports(self): + reader = self.reader("exports") + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 1) + self.assertIsInstance(objs[0], Exports) + + expected = [ + ("", ["foo.h", "bar.h", "baz.h"]), + ("mozilla", ["mozilla1.h", "mozilla2.h"]), + ("mozilla/dom", ["dom1.h", "dom2.h", "dom3.h"]), + ("mozilla/gfx", ["gfx.h"]), + ("nspr/private", ["pprio.h", "pprthred.h"]), + ("vpx", ["mem.h", "mem2.h"]), + ] + for (expect_path, expect_headers), (actual_path, actual_headers) in zip( + expected, [(path, list(seq)) for path, seq in objs[0].files.walk()] + ): + self.assertEqual(expect_path, actual_path) + self.assertEqual(expect_headers, actual_headers) + + def test_exports_missing(self): + """ + Missing files in EXPORTS is an error. + """ + reader = self.reader("exports-missing") + with six.assertRaisesRegex( + self, SandboxValidationError, "File listed in EXPORTS does not exist:" + ): + self.read_topsrcdir(reader) + + def test_exports_missing_generated(self): + """ + An objdir file in EXPORTS that is not in GENERATED_FILES is an error. + """ + reader = self.reader("exports-missing-generated") + with six.assertRaisesRegex( + self, + SandboxValidationError, + "Objdir file listed in EXPORTS not in GENERATED_FILES:", + ): + self.read_topsrcdir(reader) + + def test_exports_generated(self): + reader = self.reader("exports-generated") + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 2) + self.assertIsInstance(objs[0], GeneratedFile) + self.assertIsInstance(objs[1], Exports) + exports = [(path, list(seq)) for path, seq in objs[1].files.walk()] + self.assertEqual( + exports, [("", ["foo.h"]), ("mozilla", ["mozilla1.h", "!mozilla2.h"])] + ) + path, files = exports[1] + self.assertIsInstance(files[1], ObjDirPath) + + def test_test_harness_files(self): + reader = self.reader("test-harness-files") + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 1) + self.assertIsInstance(objs[0], TestHarnessFiles) + + expected = { + "mochitest": ["runtests.py", "utils.py"], + "testing/mochitest": ["mochitest.py", "mochitest.ini"], + } + + for path, strings in objs[0].files.walk(): + self.assertTrue(path in expected) + basenames = sorted(mozpath.basename(s) for s in strings) + self.assertEqual(sorted(expected[path]), basenames) + + def test_test_harness_files_root(self): + reader = self.reader("test-harness-files-root") + with six.assertRaisesRegex( + self, + SandboxValidationError, + "Cannot install files to the root of TEST_HARNESS_FILES", + ): + self.read_topsrcdir(reader) + + def test_program(self): + reader = self.reader("program") + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 6) + self.assertIsInstance(objs[0], Sources) + self.assertIsInstance(objs[1], ComputedFlags) + self.assertIsInstance(objs[2], ComputedFlags) + self.assertIsInstance(objs[3], Program) + self.assertIsInstance(objs[4], SimpleProgram) + self.assertIsInstance(objs[5], SimpleProgram) + + self.assertEqual(objs[3].program, "test_program.prog") + self.assertEqual(objs[4].program, "test_program1.prog") + self.assertEqual(objs[5].program, "test_program2.prog") + + self.assertEqual(objs[3].name, "test_program.prog") + self.assertEqual(objs[4].name, "test_program1.prog") + self.assertEqual(objs[5].name, "test_program2.prog") + + self.assertEqual( + objs[4].objs, + [ + mozpath.join( + reader.config.topobjdir, + "test_program1.%s" % reader.config.substs["OBJ_SUFFIX"], + ) + ], + ) + self.assertEqual( + objs[5].objs, + [ + mozpath.join( + reader.config.topobjdir, + "test_program2.%s" % reader.config.substs["OBJ_SUFFIX"], + ) + ], + ) + + def test_program_paths(self): + """Various moz.build settings that change the destination of PROGRAM should be + accurately reflected in Program.output_path.""" + reader = self.reader("program-paths") + objs = self.read_topsrcdir(reader) + prog_paths = [o.output_path for o in objs if isinstance(o, Program)] + self.assertEqual( + prog_paths, + [ + "!/dist/bin/dist-bin.prog", + "!/dist/bin/foo/dist-subdir.prog", + "!/final/target/final-target.prog", + "!not-installed.prog", + ], + ) + + def test_host_program_paths(self): + """The destination of a HOST_PROGRAM (almost always dist/host/bin) + should be accurately reflected in Program.output_path.""" + reader = self.reader("host-program-paths") + objs = self.read_topsrcdir(reader) + prog_paths = [o.output_path for o in objs if isinstance(o, HostProgram)] + self.assertEqual( + prog_paths, + [ + "!/dist/host/bin/final-target.hostprog", + "!/dist/host/bin/dist-host-bin.hostprog", + "!not-installed.hostprog", + ], + ) + + def test_test_manifest_missing_manifest(self): + """A missing manifest file should result in an error.""" + reader = self.reader("test-manifest-missing-manifest") + + with six.assertRaisesRegex(self, BuildReaderError, "Missing files"): + self.read_topsrcdir(reader) + + def test_empty_test_manifest_rejected(self): + """A test manifest without any entries is rejected.""" + reader = self.reader("test-manifest-empty") + + with six.assertRaisesRegex(self, SandboxValidationError, "Empty test manifest"): + self.read_topsrcdir(reader) + + def test_test_manifest_just_support_files(self): + """A test manifest with no tests but support-files is not supported.""" + reader = self.reader("test-manifest-just-support") + + with six.assertRaisesRegex(self, SandboxValidationError, "Empty test manifest"): + self.read_topsrcdir(reader) + + def test_test_manifest_dupe_support_files(self): + """A test manifest with dupe support-files in a single test is not + supported. + """ + reader = self.reader("test-manifest-dupes") + + with six.assertRaisesRegex( + self, + SandboxValidationError, + "bar.js appears multiple times " + "in a test manifest under a support-files field, please omit the duplicate entry.", + ): + self.read_topsrcdir(reader) + + def test_test_manifest_absolute_support_files(self): + """Support files starting with '/' are placed relative to the install root""" + reader = self.reader("test-manifest-absolute-support") + + objs = self.read_topsrcdir(reader) + self.assertEqual(len(objs), 1) + o = objs[0] + self.assertEqual(len(o.installs), 3) + expected = [ + mozpath.normpath(mozpath.join(o.install_prefix, "../.well-known/foo.txt")), + mozpath.join(o.install_prefix, "absolute-support.ini"), + mozpath.join(o.install_prefix, "test_file.js"), + ] + paths = sorted([v[0] for v in o.installs.values()]) + self.assertEqual(paths, expected) + + @unittest.skip("Bug 1304316 - Items in the second set but not the first") + def test_test_manifest_shared_support_files(self): + """Support files starting with '!' are given separate treatment, so their + installation can be resolved when running tests. + """ + reader = self.reader("test-manifest-shared-support") + supported, child = self.read_topsrcdir(reader) + + expected_deferred_installs = { + "!/child/test_sub.js", + "!/child/another-file.sjs", + "!/child/data/**", + } + + self.assertEqual(len(supported.installs), 3) + self.assertEqual(set(supported.deferred_installs), expected_deferred_installs) + self.assertEqual(len(child.installs), 3) + self.assertEqual(len(child.pattern_installs), 1) + + def test_test_manifest_deffered_install_missing(self): + """A non-existent shared support file reference produces an error.""" + reader = self.reader("test-manifest-shared-missing") + + with six.assertRaisesRegex( + self, + SandboxValidationError, + "entry in support-files not present in the srcdir", + ): + self.read_topsrcdir(reader) + + def test_test_manifest_install_includes(self): + """Ensure that any [include:foo.ini] are copied to the objdir.""" + reader = self.reader("test-manifest-install-includes") + + objs = self.read_topsrcdir(reader) + self.assertEqual(len(objs), 1) + o = objs[0] + self.assertEqual(len(o.installs), 3) + self.assertEqual(o.manifest_relpath, "mochitest.ini") + self.assertEqual(o.manifest_obj_relpath, "mochitest.ini") + expected = [ + mozpath.normpath(mozpath.join(o.install_prefix, "common.ini")), + mozpath.normpath(mozpath.join(o.install_prefix, "mochitest.ini")), + mozpath.normpath(mozpath.join(o.install_prefix, "test_foo.html")), + ] + paths = sorted([v[0] for v in o.installs.values()]) + self.assertEqual(paths, expected) + + def test_test_manifest_includes(self): + """Ensure that manifest objects from the emitter list a correct manifest.""" + reader = self.reader("test-manifest-emitted-includes") + [obj] = self.read_topsrcdir(reader) + + # Expected manifest leafs for our tests. + expected_manifests = { + "reftest1.html": "reftest.list", + "reftest1-ref.html": "reftest.list", + "reftest2.html": "included-reftest.list", + "reftest2-ref.html": "included-reftest.list", + } + + for t in obj.tests: + self.assertTrue(t["manifest"].endswith(expected_manifests[t["name"]])) + + def test_test_manifest_keys_extracted(self): + """Ensure all metadata from test manifests is extracted.""" + reader = self.reader("test-manifest-keys-extracted") + + objs = [o for o in self.read_topsrcdir(reader) if isinstance(o, TestManifest)] + + self.assertEqual(len(objs), 8) + + metadata = { + "a11y.ini": { + "flavor": "a11y", + "installs": {"a11y.ini": False, "test_a11y.js": True}, + "pattern-installs": 1, + }, + "browser.ini": { + "flavor": "browser-chrome", + "installs": { + "browser.ini": False, + "test_browser.js": True, + "support1": False, + "support2": False, + }, + }, + "mochitest.ini": { + "flavor": "mochitest", + "installs": {"mochitest.ini": False, "test_mochitest.js": True}, + "external": {"external1", "external2"}, + }, + "chrome.ini": { + "flavor": "chrome", + "installs": {"chrome.ini": False, "test_chrome.js": True}, + }, + "xpcshell.ini": { + "flavor": "xpcshell", + "dupe": True, + "installs": { + "xpcshell.ini": False, + "test_xpcshell.js": True, + "head1": False, + "head2": False, + }, + }, + "reftest.list": {"flavor": "reftest", "installs": {}}, + "crashtest.list": {"flavor": "crashtest", "installs": {}}, + "python.ini": {"flavor": "python", "installs": {"python.ini": False}}, + } + + for o in objs: + m = metadata[mozpath.basename(o.manifest_relpath)] + + self.assertTrue(o.path.startswith(o.directory)) + self.assertEqual(o.flavor, m["flavor"]) + self.assertEqual(o.dupe_manifest, m.get("dupe", False)) + + external_normalized = set(mozpath.basename(p) for p in o.external_installs) + self.assertEqual(external_normalized, m.get("external", set())) + + self.assertEqual(len(o.installs), len(m["installs"])) + for path in o.installs.keys(): + self.assertTrue(path.startswith(o.directory)) + relpath = path[len(o.directory) + 1 :] + + self.assertIn(relpath, m["installs"]) + self.assertEqual(o.installs[path][1], m["installs"][relpath]) + + if "pattern-installs" in m: + self.assertEqual(len(o.pattern_installs), m["pattern-installs"]) + + def test_test_manifest_unmatched_generated(self): + reader = self.reader("test-manifest-unmatched-generated") + + with six.assertRaisesRegex( + self, + SandboxValidationError, + "entry in generated-files not present elsewhere", + ): + self.read_topsrcdir(reader), + + def test_test_manifest_parent_support_files_dir(self): + """support-files referencing a file in a parent directory works.""" + reader = self.reader("test-manifest-parent-support-files-dir") + + objs = [o for o in self.read_topsrcdir(reader) if isinstance(o, TestManifest)] + + self.assertEqual(len(objs), 1) + + o = objs[0] + + expected = mozpath.join(o.srcdir, "support-file.txt") + self.assertIn(expected, o.installs) + self.assertEqual( + o.installs[expected], + ("testing/mochitest/tests/child/support-file.txt", False), + ) + + def test_test_manifest_missing_test_error(self): + """Missing test files should result in error.""" + reader = self.reader("test-manifest-missing-test-file") + + with six.assertRaisesRegex( + self, + SandboxValidationError, + "lists test that does not exist: test_missing.html", + ): + self.read_topsrcdir(reader) + + def test_test_manifest_missing_test_error_unfiltered(self): + """Missing test files should result in error, even when the test list is not filtered.""" + reader = self.reader("test-manifest-missing-test-file-unfiltered") + + with six.assertRaisesRegex( + self, SandboxValidationError, "lists test that does not exist: missing.js" + ): + self.read_topsrcdir(reader) + + def test_ipdl_sources(self): + reader = self.reader( + "ipdl_sources", + extra_substs={"IPDL_ROOT": mozpath.abspath("/path/to/topobjdir")}, + ) + objs = self.read_topsrcdir(reader) + ipdl_collection = objs[0] + self.assertIsInstance(ipdl_collection, IPDLCollection) + + ipdls = set( + mozpath.relpath(p, ipdl_collection.topsrcdir) + for p in ipdl_collection.all_regular_sources() + ) + expected = set( + ["bar/bar.ipdl", "bar/bar2.ipdlh", "foo/foo.ipdl", "foo/foo2.ipdlh"] + ) + + self.assertEqual(ipdls, expected) + + pp_ipdls = set( + mozpath.relpath(p, ipdl_collection.topsrcdir) + for p in ipdl_collection.all_preprocessed_sources() + ) + expected = set(["bar/bar1.ipdl", "foo/foo1.ipdl"]) + self.assertEqual(pp_ipdls, expected) + + def test_local_includes(self): + """Test that LOCAL_INCLUDES is emitted correctly.""" + reader = self.reader("local_includes") + objs = self.read_topsrcdir(reader) + + local_includes = [o.path for o in objs if isinstance(o, LocalInclude)] + expected = ["/bar/baz", "foo"] + + self.assertEqual(local_includes, expected) + + local_includes = [o.path.full_path for o in objs if isinstance(o, LocalInclude)] + expected = [ + mozpath.join(reader.config.topsrcdir, "bar/baz"), + mozpath.join(reader.config.topsrcdir, "foo"), + ] + + self.assertEqual(local_includes, expected) + + def test_local_includes_invalid(self): + """Test that invalid LOCAL_INCLUDES are properly detected.""" + reader = self.reader("local_includes-invalid/srcdir") + + with six.assertRaisesRegex( + self, + SandboxValidationError, + "Path specified in LOCAL_INCLUDES.*resolves to the " + "topsrcdir or topobjdir", + ): + self.read_topsrcdir(reader) + + reader = self.reader("local_includes-invalid/objdir") + + with six.assertRaisesRegex( + self, + SandboxValidationError, + "Path specified in LOCAL_INCLUDES.*resolves to the " + "topsrcdir or topobjdir", + ): + self.read_topsrcdir(reader) + + def test_local_includes_file(self): + """Test that a filename can't be used in LOCAL_INCLUDES.""" + reader = self.reader("local_includes-filename") + + with six.assertRaisesRegex( + self, + SandboxValidationError, + "Path specified in LOCAL_INCLUDES is a filename", + ): + self.read_topsrcdir(reader) + + def test_generated_includes(self): + """Test that GENERATED_INCLUDES is emitted correctly.""" + reader = self.reader("generated_includes") + objs = self.read_topsrcdir(reader) + + generated_includes = [o.path for o in objs if isinstance(o, LocalInclude)] + expected = ["!/bar/baz", "!foo"] + + self.assertEqual(generated_includes, expected) + + generated_includes = [ + o.path.full_path for o in objs if isinstance(o, LocalInclude) + ] + expected = [ + mozpath.join(reader.config.topobjdir, "bar/baz"), + mozpath.join(reader.config.topobjdir, "foo"), + ] + + self.assertEqual(generated_includes, expected) + + def test_defines(self): + reader = self.reader("defines") + objs = self.read_topsrcdir(reader) + + defines = {} + for o in objs: + if isinstance(o, Defines): + defines = o.defines + + expected = { + "BAR": 7, + "BAZ": '"abcd"', + "FOO": True, + "VALUE": "xyz", + "QUX": False, + } + + self.assertEqual(defines, expected) + + def test_jar_manifests(self): + reader = self.reader("jar-manifests") + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 1) + for obj in objs: + self.assertIsInstance(obj, JARManifest) + self.assertIsInstance(obj.path, Path) + + def test_jar_manifests_multiple_files(self): + with six.assertRaisesRegex( + self, SandboxValidationError, "limited to one value" + ): + reader = self.reader("jar-manifests-multiple-files") + self.read_topsrcdir(reader) + + def test_xpidl_module_no_sources(self): + """XPIDL_MODULE without XPIDL_SOURCES should be rejected.""" + with six.assertRaisesRegex( + self, SandboxValidationError, "XPIDL_MODULE " "cannot be defined" + ): + reader = self.reader("xpidl-module-no-sources") + self.read_topsrcdir(reader) + + def test_xpidl_module_missing_sources(self): + """Missing XPIDL_SOURCES should be rejected.""" + with six.assertRaisesRegex( + self, SandboxValidationError, "File .* " "from XPIDL_SOURCES does not exist" + ): + reader = self.reader("missing-xpidl") + self.read_topsrcdir(reader) + + def test_missing_local_includes(self): + """LOCAL_INCLUDES containing non-existent directories should be rejected.""" + with six.assertRaisesRegex( + self, + SandboxValidationError, + "Path specified in " "LOCAL_INCLUDES does not exist", + ): + reader = self.reader("missing-local-includes") + self.read_topsrcdir(reader) + + def test_library_defines(self): + """Test that LIBRARY_DEFINES is propagated properly.""" + reader = self.reader("library-defines") + objs = self.read_topsrcdir(reader) + + libraries = [o for o in objs if isinstance(o, StaticLibrary)] + library_flags = [ + o + for o in objs + if isinstance(o, ComputedFlags) and "LIBRARY_DEFINES" in o.flags + ] + expected = { + "liba": "-DIN_LIBA", + "libb": "-DIN_LIBB -DIN_LIBA", + "libc": "-DIN_LIBA -DIN_LIBB", + "libd": "", + } + defines = {} + for lib in libraries: + defines[lib.basename] = " ".join(lib.lib_defines.get_defines()) + self.assertEqual(expected, defines) + defines_in_flags = {} + for flags in library_flags: + defines_in_flags[flags.relobjdir] = " ".join( + flags.flags["LIBRARY_DEFINES"] or [] + ) + self.assertEqual(expected, defines_in_flags) + + def test_sources(self): + """Test that SOURCES works properly.""" + reader = self.reader("sources") + objs = self.read_topsrcdir(reader) + + as_flags = objs.pop() + self.assertIsInstance(as_flags, ComputedFlags) + computed_flags = objs.pop() + self.assertIsInstance(computed_flags, ComputedFlags) + # The third to last object is a Linkable. + linkable = objs.pop() + self.assertTrue(linkable.cxx_link) + ld_flags = objs.pop() + self.assertIsInstance(ld_flags, ComputedFlags) + self.assertEqual(len(objs), 6) + for o in objs: + self.assertIsInstance(o, Sources) + + suffix_map = {obj.canonical_suffix: obj for obj in objs} + self.assertEqual(len(suffix_map), 6) + + expected = { + ".cpp": ["a.cpp", "b.cc", "c.cxx"], + ".c": ["d.c"], + ".m": ["e.m"], + ".mm": ["f.mm"], + ".S": ["g.S"], + ".s": ["h.s", "i.asm"], + } + for suffix, files in expected.items(): + sources = suffix_map[suffix] + self.assertEqual( + sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files] + ) + + for f in files: + self.assertIn( + mozpath.join( + reader.config.topobjdir, + "%s.%s" + % (mozpath.splitext(f)[0], reader.config.substs["OBJ_SUFFIX"]), + ), + linkable.objs, + ) + + def test_sources_just_c(self): + """Test that a linkable with no C++ sources doesn't have cxx_link set.""" + reader = self.reader("sources-just-c") + objs = self.read_topsrcdir(reader) + + as_flags = objs.pop() + self.assertIsInstance(as_flags, ComputedFlags) + flags = objs.pop() + self.assertIsInstance(flags, ComputedFlags) + # The third to last object is a Linkable. + linkable = objs.pop() + self.assertFalse(linkable.cxx_link) + + def test_linkables_cxx_link(self): + """Test that linkables transitively set cxx_link properly.""" + reader = self.reader("test-linkables-cxx-link") + got_results = 0 + for obj in self.read_topsrcdir(reader): + if isinstance(obj, SharedLibrary): + if obj.basename == "cxx_shared": + self.assertEqual( + obj.name, + "%scxx_shared%s" + % (reader.config.dll_prefix, reader.config.dll_suffix), + ) + self.assertTrue(obj.cxx_link) + got_results += 1 + elif obj.basename == "just_c_shared": + self.assertEqual( + obj.name, + "%sjust_c_shared%s" + % (reader.config.dll_prefix, reader.config.dll_suffix), + ) + self.assertFalse(obj.cxx_link) + got_results += 1 + self.assertEqual(got_results, 2) + + def test_generated_sources(self): + """Test that GENERATED_SOURCES works properly.""" + reader = self.reader("generated-sources") + objs = self.read_topsrcdir(reader) + + as_flags = objs.pop() + self.assertIsInstance(as_flags, ComputedFlags) + flags = objs.pop() + self.assertIsInstance(flags, ComputedFlags) + # The third to last object is a Linkable. + linkable = objs.pop() + self.assertTrue(linkable.cxx_link) + flags = objs.pop() + self.assertIsInstance(flags, ComputedFlags) + self.assertEqual(len(objs), 6) + + generated_sources = [ + o for o in objs if isinstance(o, Sources) and o.generated_files + ] + self.assertEqual(len(generated_sources), 6) + + suffix_map = {obj.canonical_suffix: obj for obj in generated_sources} + self.assertEqual(len(suffix_map), 6) + + expected = { + ".cpp": ["a.cpp", "b.cc", "c.cxx"], + ".c": ["d.c"], + ".m": ["e.m"], + ".mm": ["f.mm"], + ".S": ["g.S"], + ".s": ["h.s", "i.asm"], + } + for suffix, files in expected.items(): + sources = suffix_map[suffix] + self.assertEqual( + sources.generated_files, + [mozpath.join(reader.config.topobjdir, f) for f in files], + ) + + for f in files: + self.assertIn( + mozpath.join( + reader.config.topobjdir, + "%s.%s" + % (mozpath.splitext(f)[0], reader.config.substs["OBJ_SUFFIX"]), + ), + linkable.objs, + ) + + def test_host_sources(self): + """Test that HOST_SOURCES works properly.""" + reader = self.reader("host-sources") + objs = self.read_topsrcdir(reader) + + # This objdir will generate target flags. + flags = objs.pop() + self.assertIsInstance(flags, ComputedFlags) + # The second to last object is a Linkable + linkable = objs.pop() + self.assertTrue(linkable.cxx_link) + # This objdir will also generate host flags. + host_flags = objs.pop() + self.assertIsInstance(host_flags, ComputedFlags) + # ...and ldflags. + ldflags = objs.pop() + self.assertIsInstance(ldflags, ComputedFlags) + self.assertEqual(len(objs), 3) + for o in objs: + self.assertIsInstance(o, HostSources) + + suffix_map = {obj.canonical_suffix: obj for obj in objs} + self.assertEqual(len(suffix_map), 3) + + expected = { + ".cpp": ["a.cpp", "b.cc", "c.cxx"], + ".c": ["d.c"], + ".mm": ["e.mm", "f.mm"], + } + for suffix, files in expected.items(): + sources = suffix_map[suffix] + self.assertEqual( + sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files] + ) + + for f in files: + self.assertIn( + mozpath.join( + reader.config.topobjdir, + "host_%s.%s" + % (mozpath.splitext(f)[0], reader.config.substs["OBJ_SUFFIX"]), + ), + linkable.objs, + ) + + def test_wasm_sources(self): + """Test that WASM_SOURCES works properly.""" + reader = self.reader( + "wasm-sources", extra_substs={"WASM_CC": "clang", "WASM_CXX": "clang++"} + ) + objs = list(self.read_topsrcdir(reader)) + + # The second to last object is a linkable. + linkable = objs[-2] + # Other than that, we only care about the WasmSources objects. + objs = objs[:2] + for o in objs: + self.assertIsInstance(o, WasmSources) + + suffix_map = {obj.canonical_suffix: obj for obj in objs} + self.assertEqual(len(suffix_map), 2) + + expected = {".cpp": ["a.cpp", "b.cc", "c.cxx"], ".c": ["d.c"]} + for suffix, files in expected.items(): + sources = suffix_map[suffix] + self.assertEqual( + sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files] + ) + for f in files: + self.assertIn( + mozpath.join( + reader.config.topobjdir, + "%s.%s" + % ( + mozpath.splitext(f)[0], + reader.config.substs["WASM_OBJ_SUFFIX"], + ), + ), + linkable.objs, + ) + + def test_unified_sources(self): + """Test that UNIFIED_SOURCES works properly.""" + reader = self.reader("unified-sources") + objs = self.read_topsrcdir(reader) + + # The last object is a ComputedFlags, the second to last a Linkable, + # followed by ldflags, ignore them. + linkable = objs[-2] + objs = objs[:-3] + self.assertEqual(len(objs), 3) + for o in objs: + self.assertIsInstance(o, UnifiedSources) + + suffix_map = {obj.canonical_suffix: obj for obj in objs} + self.assertEqual(len(suffix_map), 3) + + expected = { + ".cpp": ["bar.cxx", "foo.cpp", "quux.cc"], + ".mm": ["objc1.mm", "objc2.mm"], + ".c": ["c1.c", "c2.c"], + } + for suffix, files in expected.items(): + sources = suffix_map[suffix] + self.assertEqual( + sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files] + ) + + # Unified sources are not required + if sources.have_unified_mapping: + + for f in dict(sources.unified_source_mapping).keys(): + self.assertIn( + mozpath.join( + reader.config.topobjdir, + "%s.%s" + % ( + mozpath.splitext(f)[0], + reader.config.substs["OBJ_SUFFIX"], + ), + ), + linkable.objs, + ) + + def test_unified_sources_non_unified(self): + """Test that UNIFIED_SOURCES with FILES_PER_UNIFIED_FILE=1 works properly.""" + reader = self.reader("unified-sources-non-unified") + objs = self.read_topsrcdir(reader) + + # The last object is a Linkable, the second to last ComputedFlags, + # followed by ldflags, ignore them. + objs = objs[:-3] + self.assertEqual(len(objs), 3) + for o in objs: + self.assertIsInstance(o, UnifiedSources) + + suffix_map = {obj.canonical_suffix: obj for obj in objs} + self.assertEqual(len(suffix_map), 3) + + expected = { + ".cpp": ["bar.cxx", "foo.cpp", "quux.cc"], + ".mm": ["objc1.mm", "objc2.mm"], + ".c": ["c1.c", "c2.c"], + } + for suffix, files in expected.items(): + sources = suffix_map[suffix] + self.assertEqual( + sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files] + ) + self.assertFalse(sources.have_unified_mapping) + + def test_object_conflicts(self): + """Test that object name conflicts are detected.""" + reader = self.reader("object-conflicts/1") + with self.assertRaisesRegex( + SandboxValidationError, + "Test.cpp from SOURCES would have the same object name as" + " Test.c from SOURCES\.", + ): + self.read_topsrcdir(reader) + + reader = self.reader("object-conflicts/2") + with self.assertRaisesRegex( + SandboxValidationError, + "Test.cpp from SOURCES would have the same object name as" + " subdir/Test.cpp from SOURCES\.", + ): + self.read_topsrcdir(reader) + + reader = self.reader("object-conflicts/3") + with self.assertRaisesRegex( + SandboxValidationError, + "Test.cpp from UNIFIED_SOURCES would have the same object name as" + " Test.c from SOURCES in non-unified builds\.", + ): + self.read_topsrcdir(reader) + + reader = self.reader("object-conflicts/4") + with self.assertRaisesRegex( + SandboxValidationError, + "Test.cpp from UNIFIED_SOURCES would have the same object name as" + " Test.c from UNIFIED_SOURCES in non-unified builds\.", + ): + self.read_topsrcdir(reader) + + def test_final_target_pp_files(self): + """Test that FINAL_TARGET_PP_FILES works properly.""" + reader = self.reader("dist-files") + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 1) + self.assertIsInstance(objs[0], FinalTargetPreprocessedFiles) + + # Ideally we'd test hierarchies, but that would just be testing + # the HierarchicalStringList class, which we test separately. + for path, files in objs[0].files.walk(): + self.assertEqual(path, "") + self.assertEqual(len(files), 2) + + expected = {"install.rdf", "main.js"} + for f in files: + self.assertTrue(six.text_type(f) in expected) + + def test_missing_final_target_pp_files(self): + """Test that FINAL_TARGET_PP_FILES with missing files throws errors.""" + with six.assertRaisesRegex( + self, + SandboxValidationError, + "File listed in " "FINAL_TARGET_PP_FILES does not exist", + ): + reader = self.reader("dist-files-missing") + self.read_topsrcdir(reader) + + def test_final_target_pp_files_non_srcdir(self): + """Test that non-srcdir paths in FINAL_TARGET_PP_FILES throws errors.""" + reader = self.reader("final-target-pp-files-non-srcdir") + with six.assertRaisesRegex( + self, + SandboxValidationError, + "Only source directory paths allowed in FINAL_TARGET_PP_FILES:", + ): + self.read_topsrcdir(reader) + + def test_localized_files(self): + """Test that LOCALIZED_FILES works properly.""" + reader = self.reader("localized-files") + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 1) + self.assertIsInstance(objs[0], LocalizedFiles) + + for path, files in objs[0].files.walk(): + self.assertEqual(path, "foo") + self.assertEqual(len(files), 3) + + expected = {"en-US/bar.ini", "en-US/code/*.js", "en-US/foo.js"} + for f in files: + self.assertTrue(six.text_type(f) in expected) + + def test_localized_files_no_en_us(self): + """Test that LOCALIZED_FILES errors if a path does not start with + `en-US/` or contain `locales/en-US/`.""" + reader = self.reader("localized-files-no-en-us") + with six.assertRaisesRegex( + self, + SandboxValidationError, + "LOCALIZED_FILES paths must start with `en-US/` or contain `locales/en-US/`: " + "foo.js", + ): + self.read_topsrcdir(reader) + + def test_localized_pp_files(self): + """Test that LOCALIZED_PP_FILES works properly.""" + reader = self.reader("localized-pp-files") + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 1) + self.assertIsInstance(objs[0], LocalizedPreprocessedFiles) + + for path, files in objs[0].files.walk(): + self.assertEqual(path, "foo") + self.assertEqual(len(files), 2) + + expected = {"en-US/bar.ini", "en-US/foo.js"} + for f in files: + self.assertTrue(six.text_type(f) in expected) + + def test_rust_library_no_cargo_toml(self): + """Test that defining a RustLibrary without a Cargo.toml fails.""" + reader = self.reader("rust-library-no-cargo-toml") + with six.assertRaisesRegex( + self, SandboxValidationError, "No Cargo.toml file found" + ): + self.read_topsrcdir(reader) + + def test_rust_library_name_mismatch(self): + """Test that defining a RustLibrary that doesn't match Cargo.toml fails.""" + reader = self.reader("rust-library-name-mismatch") + with six.assertRaisesRegex( + self, + SandboxValidationError, + "library.*does not match Cargo.toml-defined package", + ): + self.read_topsrcdir(reader) + + def test_rust_library_no_lib_section(self): + """Test that a RustLibrary Cargo.toml with no [lib] section fails.""" + reader = self.reader("rust-library-no-lib-section") + with six.assertRaisesRegex( + self, SandboxValidationError, "Cargo.toml for.* has no \\[lib\\] section" + ): + self.read_topsrcdir(reader) + + def test_rust_library_invalid_crate_type(self): + """Test that a RustLibrary Cargo.toml has a permitted crate-type.""" + reader = self.reader("rust-library-invalid-crate-type") + with six.assertRaisesRegex( + self, SandboxValidationError, "crate-type.* is not permitted" + ): + self.read_topsrcdir(reader) + + def test_rust_library_dash_folding(self): + """Test that on-disk names of RustLibrary objects convert dashes to underscores.""" + reader = self.reader( + "rust-library-dash-folding", + extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc"), + ) + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 4) + ldflags, host_cflags, lib, cflags = objs + self.assertIsInstance(ldflags, ComputedFlags) + self.assertIsInstance(cflags, ComputedFlags) + self.assertIsInstance(host_cflags, ComputedFlags) + self.assertIsInstance(lib, RustLibrary) + self.assertRegex(lib.lib_name, "random_crate") + self.assertRegex(lib.import_name, "random_crate") + self.assertRegex(lib.basename, "random-crate") + + def test_multiple_rust_libraries(self): + """Test that linking multiple Rust libraries throws an error""" + reader = self.reader( + "multiple-rust-libraries", + extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc"), + ) + with six.assertRaisesRegex( + self, SandboxValidationError, "Cannot link the following Rust libraries" + ): + self.read_topsrcdir(reader) + + def test_rust_library_features(self): + """Test that RustLibrary features are correctly emitted.""" + reader = self.reader( + "rust-library-features", + extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc"), + ) + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 4) + ldflags, host_cflags, lib, cflags = objs + self.assertIsInstance(ldflags, ComputedFlags) + self.assertIsInstance(cflags, ComputedFlags) + self.assertIsInstance(host_cflags, ComputedFlags) + self.assertIsInstance(lib, RustLibrary) + self.assertEqual(lib.features, ["musthave", "cantlivewithout"]) + + def test_rust_library_duplicate_features(self): + """Test that duplicate RustLibrary features are rejected.""" + reader = self.reader("rust-library-duplicate-features") + with six.assertRaisesRegex( + self, + SandboxValidationError, + "features for .* should not contain duplicates", + ): + self.read_topsrcdir(reader) + + def test_rust_program_no_cargo_toml(self): + """Test that specifying RUST_PROGRAMS without a Cargo.toml fails.""" + reader = self.reader("rust-program-no-cargo-toml") + with six.assertRaisesRegex( + self, SandboxValidationError, "No Cargo.toml file found" + ): + self.read_topsrcdir(reader) + + def test_host_rust_program_no_cargo_toml(self): + """Test that specifying HOST_RUST_PROGRAMS without a Cargo.toml fails.""" + reader = self.reader("host-rust-program-no-cargo-toml") + with six.assertRaisesRegex( + self, SandboxValidationError, "No Cargo.toml file found" + ): + self.read_topsrcdir(reader) + + def test_rust_program_nonexistent_name(self): + """Test that specifying RUST_PROGRAMS that don't exist in Cargo.toml + correctly throws an error.""" + reader = self.reader("rust-program-nonexistent-name") + with six.assertRaisesRegex( + self, SandboxValidationError, "Cannot find Cargo.toml definition for" + ): + self.read_topsrcdir(reader) + + def test_host_rust_program_nonexistent_name(self): + """Test that specifying HOST_RUST_PROGRAMS that don't exist in + Cargo.toml correctly throws an error.""" + reader = self.reader("host-rust-program-nonexistent-name") + with six.assertRaisesRegex( + self, SandboxValidationError, "Cannot find Cargo.toml definition for" + ): + self.read_topsrcdir(reader) + + def test_rust_programs(self): + """Test RUST_PROGRAMS emission.""" + reader = self.reader( + "rust-programs", + extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc", BIN_SUFFIX=".exe"), + ) + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 4) + ldflags, host_cflags, cflags, prog = objs + self.assertIsInstance(ldflags, ComputedFlags) + self.assertIsInstance(cflags, ComputedFlags) + self.assertIsInstance(host_cflags, ComputedFlags) + self.assertIsInstance(prog, RustProgram) + self.assertEqual(prog.name, "some") + + def test_host_rust_programs(self): + """Test HOST_RUST_PROGRAMS emission.""" + reader = self.reader( + "host-rust-programs", + extra_substs=dict( + RUST_HOST_TARGET="i686-pc-windows-msvc", HOST_BIN_SUFFIX=".exe" + ), + ) + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 4) + print(objs) + ldflags, cflags, hostflags, prog = objs + self.assertIsInstance(ldflags, ComputedFlags) + self.assertIsInstance(cflags, ComputedFlags) + self.assertIsInstance(hostflags, ComputedFlags) + self.assertIsInstance(prog, HostRustProgram) + self.assertEqual(prog.name, "some") + + def test_host_rust_libraries(self): + """Test HOST_RUST_LIBRARIES emission.""" + reader = self.reader( + "host-rust-libraries", + extra_substs=dict( + RUST_HOST_TARGET="i686-pc-windows-msvc", HOST_BIN_SUFFIX=".exe" + ), + ) + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 4) + ldflags, host_cflags, lib, cflags = objs + self.assertIsInstance(ldflags, ComputedFlags) + self.assertIsInstance(cflags, ComputedFlags) + self.assertIsInstance(host_cflags, ComputedFlags) + self.assertIsInstance(lib, HostRustLibrary) + self.assertRegex(lib.lib_name, "host_lib") + self.assertRegex(lib.import_name, "host_lib") + + def test_crate_dependency_path_resolution(self): + """Test recursive dependencies resolve with the correct paths.""" + reader = self.reader( + "crate-dependency-path-resolution", + extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc"), + ) + objs = self.read_topsrcdir(reader) + + self.assertEqual(len(objs), 4) + ldflags, host_cflags, lib, cflags = objs + self.assertIsInstance(ldflags, ComputedFlags) + self.assertIsInstance(cflags, ComputedFlags) + self.assertIsInstance(host_cflags, ComputedFlags) + self.assertIsInstance(lib, RustLibrary) + + def test_install_shared_lib(self): + """Test that we can install a shared library with TEST_HARNESS_FILES""" + reader = self.reader("test-install-shared-lib") + objs = self.read_topsrcdir(reader) + self.assertIsInstance(objs[0], TestHarnessFiles) + self.assertIsInstance(objs[1], VariablePassthru) + self.assertIsInstance(objs[2], ComputedFlags) + self.assertIsInstance(objs[3], SharedLibrary) + self.assertIsInstance(objs[4], ComputedFlags) + for path, files in objs[0].files.walk(): + for f in files: + self.assertEqual(str(f), "!libfoo.so") + self.assertEqual(path, "foo/bar") + + def test_symbols_file(self): + """Test that SYMBOLS_FILE works""" + reader = self.reader("test-symbols-file") + genfile, ldflags, shlib, flags = self.read_topsrcdir(reader) + self.assertIsInstance(genfile, GeneratedFile) + self.assertIsInstance(flags, ComputedFlags) + self.assertIsInstance(ldflags, ComputedFlags) + self.assertIsInstance(shlib, SharedLibrary) + # This looks weird but MockConfig sets DLL_{PREFIX,SUFFIX} and + # the reader method in this class sets OS_TARGET=WINNT. + self.assertEqual(shlib.symbols_file, "libfoo.so.def") + + def test_symbols_file_objdir(self): + """Test that a SYMBOLS_FILE in the objdir works""" + reader = self.reader("test-symbols-file-objdir") + genfile, ldflags, shlib, flags = self.read_topsrcdir(reader) + self.assertIsInstance(genfile, GeneratedFile) + self.assertEqual( + genfile.script, mozpath.join(reader.config.topsrcdir, "foo.py") + ) + self.assertIsInstance(flags, ComputedFlags) + self.assertIsInstance(ldflags, ComputedFlags) + self.assertIsInstance(shlib, SharedLibrary) + self.assertEqual(shlib.symbols_file, "foo.symbols") + + def test_symbols_file_objdir_missing_generated(self): + """Test that a SYMBOLS_FILE in the objdir that's missing + from GENERATED_FILES is an error. + """ + reader = self.reader("test-symbols-file-objdir-missing-generated") + with six.assertRaisesRegex( + self, + SandboxValidationError, + "Objdir file specified in SYMBOLS_FILE not in GENERATED_FILES:", + ): + self.read_topsrcdir(reader) + + def test_wasm_compile_flags(self): + reader = self.reader( + "wasm-compile-flags", + extra_substs={"WASM_CC": "clang", "WASM_CXX": "clang++"}, + ) + flags = list(self.read_topsrcdir(reader))[2] + self.assertIsInstance(flags, ComputedFlags) + self.assertEqual( + flags.flags["WASM_CFLAGS"], reader.config.substs["WASM_CFLAGS"] + ) + self.assertEqual( + flags.flags["MOZBUILD_WASM_CFLAGS"], ["-funroll-loops", "-wasm-arg"] + ) + self.assertEqual( + set(flags.flags["WASM_DEFINES"]), + set(["-DFOO", '-DBAZ="abcd"', "-UQUX", "-DBAR=7", "-DVALUE=xyz"]), + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/frontend/test_namespaces.py b/python/mozbuild/mozbuild/test/frontend/test_namespaces.py new file mode 100644 index 0000000000..e8c1a3eb00 --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/test_namespaces.py @@ -0,0 +1,225 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import unittest + +import six +from mozunit import main + +from mozbuild.frontend.context import ( + Context, + ContextDerivedTypedList, + ContextDerivedTypedListWithItems, + ContextDerivedValue, +) +from mozbuild.util import ( + StrictOrderingOnAppendList, + StrictOrderingOnAppendListWithFlagsFactory, + UnsortedError, +) + + +class Fuga(object): + def __init__(self, value): + self.value = value + + +class Piyo(ContextDerivedValue): + def __init__(self, context, value): + if not isinstance(value, six.text_type): + raise ValueError + self.context = context + self.value = value + + def lower(self): + return self.value.lower() + + def __str__(self): + return self.value + + def __eq__(self, other): + return self.value == six.text_type(other) + + def __lt__(self, other): + return self.value < six.text_type(other) + + def __le__(self, other): + return self.value <= six.text_type(other) + + def __gt__(self, other): + return self.value > six.text_type(other) + + def __ge__(self, other): + return self.value >= six.text_type(other) + + def __hash__(self): + return hash(self.value) + + +VARIABLES = { + "HOGE": (six.text_type, six.text_type, None), + "FUGA": (Fuga, six.text_type, None), + "PIYO": (Piyo, six.text_type, None), + "HOGERA": (ContextDerivedTypedList(Piyo, StrictOrderingOnAppendList), list, None), + "HOGEHOGE": ( + ContextDerivedTypedListWithItems( + Piyo, + StrictOrderingOnAppendListWithFlagsFactory( + { + "foo": bool, + } + ), + ), + list, + None, + ), +} + + +class TestContext(unittest.TestCase): + def test_key_rejection(self): + # Lowercase keys should be rejected during normal operation. + ns = Context(allowed_variables=VARIABLES) + + with self.assertRaises(KeyError) as ke: + ns["foo"] = True + + e = ke.exception.args + self.assertEqual(e[0], "global_ns") + self.assertEqual(e[1], "set_unknown") + self.assertEqual(e[2], "foo") + self.assertTrue(e[3]) + + # Unknown uppercase keys should be rejected. + with self.assertRaises(KeyError) as ke: + ns["FOO"] = True + + e = ke.exception.args + self.assertEqual(e[0], "global_ns") + self.assertEqual(e[1], "set_unknown") + self.assertEqual(e[2], "FOO") + self.assertTrue(e[3]) + + def test_allowed_set(self): + self.assertIn("HOGE", VARIABLES) + + ns = Context(allowed_variables=VARIABLES) + + ns["HOGE"] = "foo" + self.assertEqual(ns["HOGE"], "foo") + + def test_value_checking(self): + ns = Context(allowed_variables=VARIABLES) + + # Setting to a non-allowed type should not work. + with self.assertRaises(ValueError) as ve: + ns["HOGE"] = True + + e = ve.exception.args + self.assertEqual(e[0], "global_ns") + self.assertEqual(e[1], "set_type") + self.assertEqual(e[2], "HOGE") + self.assertEqual(e[3], True) + self.assertEqual(e[4], six.text_type) + + def test_key_checking(self): + # Checking for existence of a key should not populate the key if it + # doesn't exist. + g = Context(allowed_variables=VARIABLES) + + self.assertFalse("HOGE" in g) + self.assertFalse("HOGE" in g) + + def test_coercion(self): + ns = Context(allowed_variables=VARIABLES) + + # Setting to a type different from the allowed input type should not + # work. + with self.assertRaises(ValueError) as ve: + ns["FUGA"] = False + + e = ve.exception.args + self.assertEqual(e[0], "global_ns") + self.assertEqual(e[1], "set_type") + self.assertEqual(e[2], "FUGA") + self.assertEqual(e[3], False) + self.assertEqual(e[4], six.text_type) + + ns["FUGA"] = "fuga" + self.assertIsInstance(ns["FUGA"], Fuga) + self.assertEqual(ns["FUGA"].value, "fuga") + + ns["FUGA"] = Fuga("hoge") + self.assertIsInstance(ns["FUGA"], Fuga) + self.assertEqual(ns["FUGA"].value, "hoge") + + def test_context_derived_coercion(self): + ns = Context(allowed_variables=VARIABLES) + + # Setting to a type different from the allowed input type should not + # work. + with self.assertRaises(ValueError) as ve: + ns["PIYO"] = False + + e = ve.exception.args + self.assertEqual(e[0], "global_ns") + self.assertEqual(e[1], "set_type") + self.assertEqual(e[2], "PIYO") + self.assertEqual(e[3], False) + self.assertEqual(e[4], six.text_type) + + ns["PIYO"] = "piyo" + self.assertIsInstance(ns["PIYO"], Piyo) + self.assertEqual(ns["PIYO"].value, "piyo") + self.assertEqual(ns["PIYO"].context, ns) + + ns["PIYO"] = Piyo(ns, "fuga") + self.assertIsInstance(ns["PIYO"], Piyo) + self.assertEqual(ns["PIYO"].value, "fuga") + self.assertEqual(ns["PIYO"].context, ns) + + def test_context_derived_typed_list(self): + ns = Context(allowed_variables=VARIABLES) + + # Setting to a type that's rejected by coercion should not work. + with self.assertRaises(ValueError): + ns["HOGERA"] = [False] + + ns["HOGERA"] += ["a", "b", "c"] + + self.assertIsInstance(ns["HOGERA"], VARIABLES["HOGERA"][0]) + for n in range(0, 3): + self.assertIsInstance(ns["HOGERA"][n], Piyo) + self.assertEqual(ns["HOGERA"][n].value, ["a", "b", "c"][n]) + self.assertEqual(ns["HOGERA"][n].context, ns) + + with self.assertRaises(UnsortedError): + ns["HOGERA"] += ["f", "e", "d"] + + def test_context_derived_typed_list_with_items(self): + ns = Context(allowed_variables=VARIABLES) + + # Setting to a type that's rejected by coercion should not work. + with self.assertRaises(ValueError): + ns["HOGEHOGE"] = [False] + + values = ["a", "b", "c"] + ns["HOGEHOGE"] += values + + self.assertIsInstance(ns["HOGEHOGE"], VARIABLES["HOGEHOGE"][0]) + for v in values: + ns["HOGEHOGE"][v].foo = True + + for v, item in zip(values, ns["HOGEHOGE"]): + self.assertIsInstance(item, Piyo) + self.assertEqual(v, item) + self.assertEqual(ns["HOGEHOGE"][v].foo, True) + self.assertEqual(ns["HOGEHOGE"][item].foo, True) + + with self.assertRaises(UnsortedError): + ns["HOGEHOGE"] += ["f", "e", "d"] + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/frontend/test_reader.py b/python/mozbuild/mozbuild/test/frontend/test_reader.py new file mode 100644 index 0000000000..a15bb15d7e --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/test_reader.py @@ -0,0 +1,531 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import sys +import unittest + +import mozpack.path as mozpath +from mozunit import main + +from mozbuild import schedules +from mozbuild.frontend.context import BugzillaComponent +from mozbuild.frontend.reader import BuildReader, BuildReaderError +from mozbuild.test.common import MockConfig + +if sys.version_info.major == 2: + text_type = "unicode" +else: + text_type = "str" + +data_path = mozpath.abspath(mozpath.dirname(__file__)) +data_path = mozpath.join(data_path, "data") + + +class TestBuildReader(unittest.TestCase): + def setUp(self): + self._old_env = dict(os.environ) + os.environ.pop("MOZ_OBJDIR", None) + + def tearDown(self): + os.environ.clear() + os.environ.update(self._old_env) + + def config(self, name, **kwargs): + path = mozpath.join(data_path, name) + + return MockConfig(path, **kwargs) + + def reader(self, name, enable_tests=False, error_is_fatal=True, **kwargs): + extra = {} + if enable_tests: + extra["ENABLE_TESTS"] = "1" + config = self.config(name, extra_substs=extra, error_is_fatal=error_is_fatal) + + return BuildReader(config, **kwargs) + + def file_path(self, name, *args): + return mozpath.join(data_path, name, *args) + + def test_dirs_traversal_simple(self): + reader = self.reader("traversal-simple") + + contexts = list(reader.read_topsrcdir()) + + self.assertEqual(len(contexts), 4) + + def test_dirs_traversal_no_descend(self): + reader = self.reader("traversal-simple") + + path = mozpath.join(reader.config.topsrcdir, "moz.build") + self.assertTrue(os.path.exists(path)) + + contexts = list(reader.read_mozbuild(path, reader.config, descend=False)) + + self.assertEqual(len(contexts), 1) + + def test_dirs_traversal_all_variables(self): + reader = self.reader("traversal-all-vars") + + contexts = list(reader.read_topsrcdir()) + self.assertEqual(len(contexts), 2) + + reader = self.reader("traversal-all-vars", enable_tests=True) + + contexts = list(reader.read_topsrcdir()) + self.assertEqual(len(contexts), 3) + + def test_relative_dirs(self): + # Ensure relative directories are traversed. + reader = self.reader("traversal-relative-dirs") + + contexts = list(reader.read_topsrcdir()) + self.assertEqual(len(contexts), 3) + + def test_repeated_dirs_ignored(self): + # Ensure repeated directories are ignored. + reader = self.reader("traversal-repeated-dirs") + + contexts = list(reader.read_topsrcdir()) + self.assertEqual(len(contexts), 3) + + def test_outside_topsrcdir(self): + # References to directories outside the topsrcdir should fail. + reader = self.reader("traversal-outside-topsrcdir") + + with self.assertRaises(Exception): + list(reader.read_topsrcdir()) + + def test_error_basic(self): + reader = self.reader("reader-error-basic") + + with self.assertRaises(BuildReaderError) as bre: + list(reader.read_topsrcdir()) + + e = bre.exception + self.assertEqual( + e.actual_file, self.file_path("reader-error-basic", "moz.build") + ) + + self.assertIn("The error occurred while processing the", str(e)) + + def test_error_included_from(self): + reader = self.reader("reader-error-included-from") + + with self.assertRaises(BuildReaderError) as bre: + list(reader.read_topsrcdir()) + + e = bre.exception + self.assertEqual( + e.actual_file, self.file_path("reader-error-included-from", "child.build") + ) + self.assertEqual( + e.main_file, self.file_path("reader-error-included-from", "moz.build") + ) + + self.assertIn("This file was included as part of processing", str(e)) + + def test_error_syntax_error(self): + reader = self.reader("reader-error-syntax") + + with self.assertRaises(BuildReaderError) as bre: + list(reader.read_topsrcdir()) + + e = bre.exception + self.assertIn("Python syntax error on line 5", str(e)) + self.assertIn(" foo =", str(e)) + self.assertIn(" ^", str(e)) + + def test_error_read_unknown_global(self): + reader = self.reader("reader-error-read-unknown-global") + + with self.assertRaises(BuildReaderError) as bre: + list(reader.read_topsrcdir()) + + e = bre.exception + self.assertIn("The error was triggered on line 5", str(e)) + self.assertIn("The underlying problem is an attempt to read", str(e)) + self.assertIn(" FOO", str(e)) + + def test_error_write_unknown_global(self): + reader = self.reader("reader-error-write-unknown-global") + + with self.assertRaises(BuildReaderError) as bre: + list(reader.read_topsrcdir()) + + e = bre.exception + self.assertIn("The error was triggered on line 7", str(e)) + self.assertIn("The underlying problem is an attempt to write", str(e)) + self.assertIn(" FOO", str(e)) + + def test_error_write_bad_value(self): + reader = self.reader("reader-error-write-bad-value") + + with self.assertRaises(BuildReaderError) as bre: + list(reader.read_topsrcdir()) + + e = bre.exception + self.assertIn("The error was triggered on line 5", str(e)) + self.assertIn("is an attempt to write an illegal value to a special", str(e)) + + self.assertIn("variable whose value was rejected is:\n\n DIRS", str(e)) + + self.assertIn( + "written to it was of the following type:\n\n %s" % text_type, str(e) + ) + + self.assertIn("expects the following type(s):\n\n list", str(e)) + + def test_error_illegal_path(self): + reader = self.reader("reader-error-outside-topsrcdir") + + with self.assertRaises(BuildReaderError) as bre: + list(reader.read_topsrcdir()) + + e = bre.exception + self.assertIn("The underlying problem is an illegal file access", str(e)) + + def test_error_missing_include_path(self): + reader = self.reader("reader-error-missing-include") + + with self.assertRaises(BuildReaderError) as bre: + list(reader.read_topsrcdir()) + + e = bre.exception + self.assertIn("we referenced a path that does not exist", str(e)) + + def test_error_script_error(self): + reader = self.reader("reader-error-script-error") + + with self.assertRaises(BuildReaderError) as bre: + list(reader.read_topsrcdir()) + + e = bre.exception + self.assertIn("The error appears to be the fault of the script", str(e)) + self.assertIn(' ["TypeError: unsupported operand', str(e)) + + def test_error_bad_dir(self): + reader = self.reader("reader-error-bad-dir") + + with self.assertRaises(BuildReaderError) as bre: + list(reader.read_topsrcdir()) + + e = bre.exception + self.assertIn("we referenced a path that does not exist", str(e)) + + def test_error_repeated_dir(self): + reader = self.reader("reader-error-repeated-dir") + + with self.assertRaises(BuildReaderError) as bre: + list(reader.read_topsrcdir()) + + e = bre.exception + self.assertIn("Directory (foo) registered multiple times", str(e)) + + def test_error_error_func(self): + reader = self.reader("reader-error-error-func") + + with self.assertRaises(BuildReaderError) as bre: + list(reader.read_topsrcdir()) + + e = bre.exception + self.assertIn("A moz.build file called the error() function.", str(e)) + self.assertIn(" Some error.", str(e)) + + def test_error_error_func_ok(self): + reader = self.reader("reader-error-error-func", error_is_fatal=False) + + list(reader.read_topsrcdir()) + + def test_error_empty_list(self): + reader = self.reader("reader-error-empty-list") + + with self.assertRaises(BuildReaderError) as bre: + list(reader.read_topsrcdir()) + + e = bre.exception + self.assertIn("Variable DIRS assigned an empty value.", str(e)) + + def test_inheriting_variables(self): + reader = self.reader("inheriting-variables") + + contexts = list(reader.read_topsrcdir()) + + self.assertEqual(len(contexts), 4) + self.assertEqual( + [context.relsrcdir for context in contexts], ["", "foo", "foo/baz", "bar"] + ) + self.assertEqual( + [context["XPIDL_MODULE"] for context in contexts], + ["foobar", "foobar", "baz", "foobar"], + ) + + def test_find_relevant_mozbuilds(self): + reader = self.reader("reader-relevant-mozbuild") + + # Absolute paths outside topsrcdir are rejected. + with self.assertRaises(Exception): + reader._find_relevant_mozbuilds(["/foo"]) + + # File in root directory. + paths = reader._find_relevant_mozbuilds(["file"]) + self.assertEqual(paths, {"file": ["moz.build"]}) + + # File in child directory. + paths = reader._find_relevant_mozbuilds(["d1/file1"]) + self.assertEqual(paths, {"d1/file1": ["moz.build", "d1/moz.build"]}) + + # Multiple files in same directory. + paths = reader._find_relevant_mozbuilds(["d1/file1", "d1/file2"]) + self.assertEqual( + paths, + { + "d1/file1": ["moz.build", "d1/moz.build"], + "d1/file2": ["moz.build", "d1/moz.build"], + }, + ) + + # Missing moz.build from missing intermediate directory. + paths = reader._find_relevant_mozbuilds( + ["d1/no-intermediate-moz-build/child/file"] + ) + self.assertEqual( + paths, + { + "d1/no-intermediate-moz-build/child/file": [ + "moz.build", + "d1/moz.build", + "d1/no-intermediate-moz-build/child/moz.build", + ] + }, + ) + + # Lots of empty directories. + paths = reader._find_relevant_mozbuilds( + ["d1/parent-is-far/dir1/dir2/dir3/file"] + ) + self.assertEqual( + paths, + { + "d1/parent-is-far/dir1/dir2/dir3/file": [ + "moz.build", + "d1/moz.build", + "d1/parent-is-far/moz.build", + ] + }, + ) + + # Lots of levels. + paths = reader._find_relevant_mozbuilds( + ["d1/every-level/a/file", "d1/every-level/b/file"] + ) + self.assertEqual( + paths, + { + "d1/every-level/a/file": [ + "moz.build", + "d1/moz.build", + "d1/every-level/moz.build", + "d1/every-level/a/moz.build", + ], + "d1/every-level/b/file": [ + "moz.build", + "d1/moz.build", + "d1/every-level/moz.build", + "d1/every-level/b/moz.build", + ], + }, + ) + + # Different root directories. + paths = reader._find_relevant_mozbuilds(["d1/file", "d2/file", "file"]) + self.assertEqual( + paths, + { + "file": ["moz.build"], + "d1/file": ["moz.build", "d1/moz.build"], + "d2/file": ["moz.build", "d2/moz.build"], + }, + ) + + def test_read_relevant_mozbuilds(self): + reader = self.reader("reader-relevant-mozbuild") + + paths, contexts = reader.read_relevant_mozbuilds( + ["d1/every-level/a/file", "d1/every-level/b/file", "d2/file"] + ) + self.assertEqual(len(paths), 3) + self.assertEqual(len(contexts), 6) + + self.assertEqual( + [ctx.relsrcdir for ctx in paths["d1/every-level/a/file"]], + ["", "d1", "d1/every-level", "d1/every-level/a"], + ) + self.assertEqual( + [ctx.relsrcdir for ctx in paths["d1/every-level/b/file"]], + ["", "d1", "d1/every-level", "d1/every-level/b"], + ) + self.assertEqual([ctx.relsrcdir for ctx in paths["d2/file"]], ["", "d2"]) + + def test_all_mozbuild_paths(self): + reader = self.reader("reader-relevant-mozbuild") + + paths = list(reader.all_mozbuild_paths()) + # Ensure no duplicate paths. + self.assertEqual(sorted(paths), sorted(set(paths))) + self.assertEqual(len(paths), 10) + + def test_files_bad_bug_component(self): + reader = self.reader("files-info") + + with self.assertRaises(BuildReaderError): + reader.files_info(["bug_component/bad-assignment/moz.build"]) + + def test_files_bug_component_static(self): + reader = self.reader("files-info") + + v = reader.files_info( + [ + "bug_component/static/foo", + "bug_component/static/bar", + "bug_component/static/foo/baz", + ] + ) + self.assertEqual(len(v), 3) + self.assertEqual( + v["bug_component/static/foo"]["BUG_COMPONENT"], + BugzillaComponent("FooProduct", "FooComponent"), + ) + self.assertEqual( + v["bug_component/static/bar"]["BUG_COMPONENT"], + BugzillaComponent("BarProduct", "BarComponent"), + ) + self.assertEqual( + v["bug_component/static/foo/baz"]["BUG_COMPONENT"], + BugzillaComponent("default_product", "default_component"), + ) + + def test_files_bug_component_simple(self): + reader = self.reader("files-info") + + v = reader.files_info(["bug_component/simple/moz.build"]) + self.assertEqual(len(v), 1) + flags = v["bug_component/simple/moz.build"] + self.assertEqual(flags["BUG_COMPONENT"].product, "Firefox Build System") + self.assertEqual(flags["BUG_COMPONENT"].component, "General") + + def test_files_bug_component_different_matchers(self): + reader = self.reader("files-info") + + v = reader.files_info( + [ + "bug_component/different-matchers/foo.jsm", + "bug_component/different-matchers/bar.cpp", + "bug_component/different-matchers/baz.misc", + ] + ) + self.assertEqual(len(v), 3) + + js_flags = v["bug_component/different-matchers/foo.jsm"] + cpp_flags = v["bug_component/different-matchers/bar.cpp"] + misc_flags = v["bug_component/different-matchers/baz.misc"] + + self.assertEqual(js_flags["BUG_COMPONENT"], BugzillaComponent("Firefox", "JS")) + self.assertEqual( + cpp_flags["BUG_COMPONENT"], BugzillaComponent("Firefox", "C++") + ) + self.assertEqual( + misc_flags["BUG_COMPONENT"], + BugzillaComponent("default_product", "default_component"), + ) + + def test_files_bug_component_final(self): + reader = self.reader("files-info") + + v = reader.files_info( + [ + "bug_component/final/foo", + "bug_component/final/Makefile.in", + "bug_component/final/subcomponent/Makefile.in", + "bug_component/final/subcomponent/bar", + ] + ) + + self.assertEqual( + v["bug_component/final/foo"]["BUG_COMPONENT"], + BugzillaComponent("default_product", "default_component"), + ) + self.assertEqual( + v["bug_component/final/Makefile.in"]["BUG_COMPONENT"], + BugzillaComponent("Firefox Build System", "General"), + ) + self.assertEqual( + v["bug_component/final/subcomponent/Makefile.in"]["BUG_COMPONENT"], + BugzillaComponent("Firefox Build System", "General"), + ) + self.assertEqual( + v["bug_component/final/subcomponent/bar"]["BUG_COMPONENT"], + BugzillaComponent("Another", "Component"), + ) + + def test_invalid_flavor(self): + reader = self.reader("invalid-files-flavor") + + with self.assertRaises(BuildReaderError): + reader.files_info(["foo.js"]) + + def test_schedules(self): + reader = self.reader("schedules") + info = reader.files_info( + [ + "win.and.osx", + "somefile", + "foo.win", + "foo.osx", + "subd/aa.py", + "subd/yaml.py", + "subd/win.js", + ] + ) + # default: all exclusive, no inclusive + self.assertEqual(info["somefile"]["SCHEDULES"].inclusive, []) + self.assertEqual( + info["somefile"]["SCHEDULES"].exclusive, schedules.EXCLUSIVE_COMPONENTS + ) + # windows-only + self.assertEqual(info["foo.win"]["SCHEDULES"].inclusive, []) + self.assertEqual(info["foo.win"]["SCHEDULES"].exclusive, ["windows"]) + # osx-only + self.assertEqual(info["foo.osx"]["SCHEDULES"].inclusive, []) + self.assertEqual(info["foo.osx"]["SCHEDULES"].exclusive, ["macosx"]) + # top-level moz.build specifies subd/**.py with an inclusive option + self.assertEqual(info["subd/aa.py"]["SCHEDULES"].inclusive, ["py-lint"]) + self.assertEqual( + info["subd/aa.py"]["SCHEDULES"].exclusive, schedules.EXCLUSIVE_COMPONENTS + ) + # Files('yaml.py') in subd/moz.build combines with Files('subdir/**.py') + self.assertEqual( + info["subd/yaml.py"]["SCHEDULES"].inclusive, ["py-lint", "yaml-lint"] + ) + self.assertEqual( + info["subd/yaml.py"]["SCHEDULES"].exclusive, schedules.EXCLUSIVE_COMPONENTS + ) + # .. but exlusive does not override inclusive + self.assertEqual(info["subd/win.js"]["SCHEDULES"].inclusive, ["js-lint"]) + self.assertEqual(info["subd/win.js"]["SCHEDULES"].exclusive, ["windows"]) + + self.assertEqual( + set(info["subd/yaml.py"]["SCHEDULES"].components), + set(schedules.EXCLUSIVE_COMPONENTS + ["py-lint", "yaml-lint"]), + ) + + # win.and.osx is defined explicitly, and matches *.osx, and the two have + # conflicting SCHEDULES.exclusive settings, so the later one is used + self.assertEqual( + set(info["win.and.osx"]["SCHEDULES"].exclusive), set(["macosx", "windows"]) + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/frontend/test_sandbox.py b/python/mozbuild/mozbuild/test/frontend/test_sandbox.py new file mode 100644 index 0000000000..017de1ce9c --- /dev/null +++ b/python/mozbuild/mozbuild/test/frontend/test_sandbox.py @@ -0,0 +1,536 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import unittest + +import mozpack.path as mozpath +from mozunit import main + +from mozbuild.frontend.context import ( + FUNCTIONS, + SPECIAL_VARIABLES, + VARIABLES, + Context, + SourcePath, +) +from mozbuild.frontend.reader import MozbuildSandbox, SandboxCalledError +from mozbuild.frontend.sandbox import Sandbox, SandboxExecutionError, SandboxLoadError +from mozbuild.test.common import MockConfig + +test_data_path = mozpath.abspath(mozpath.dirname(__file__)) +test_data_path = mozpath.join(test_data_path, "data") + + +class TestSandbox(unittest.TestCase): + def sandbox(self): + return Sandbox( + Context( + { + "DIRS": (list, list, None), + } + ) + ) + + def test_exec_source_success(self): + sandbox = self.sandbox() + context = sandbox._context + + sandbox.exec_source("foo = True", mozpath.abspath("foo.py")) + + self.assertNotIn("foo", context) + self.assertEqual(context.main_path, mozpath.abspath("foo.py")) + self.assertEqual(context.all_paths, set([mozpath.abspath("foo.py")])) + + def test_exec_compile_error(self): + sandbox = self.sandbox() + + with self.assertRaises(SandboxExecutionError) as se: + sandbox.exec_source("2f23;k;asfj", mozpath.abspath("foo.py")) + + self.assertEqual(se.exception.file_stack, [mozpath.abspath("foo.py")]) + self.assertIsInstance(se.exception.exc_value, SyntaxError) + self.assertEqual(sandbox._context.main_path, mozpath.abspath("foo.py")) + + def test_exec_import_denied(self): + sandbox = self.sandbox() + + with self.assertRaises(SandboxExecutionError) as se: + sandbox.exec_source("import sys") + + self.assertIsInstance(se.exception, SandboxExecutionError) + self.assertEqual(se.exception.exc_type, ImportError) + + def test_exec_source_multiple(self): + sandbox = self.sandbox() + + sandbox.exec_source('DIRS = ["foo"]') + sandbox.exec_source('DIRS += ["bar"]') + + self.assertEqual(sandbox["DIRS"], ["foo", "bar"]) + + def test_exec_source_illegal_key_set(self): + sandbox = self.sandbox() + + with self.assertRaises(SandboxExecutionError) as se: + sandbox.exec_source("ILLEGAL = True") + + e = se.exception + self.assertIsInstance(e.exc_value, KeyError) + + e = se.exception.exc_value + self.assertEqual(e.args[0], "global_ns") + self.assertEqual(e.args[1], "set_unknown") + + def test_exec_source_reassign(self): + sandbox = self.sandbox() + + sandbox.exec_source('DIRS = ["foo"]') + with self.assertRaises(SandboxExecutionError) as se: + sandbox.exec_source('DIRS = ["bar"]') + + self.assertEqual(sandbox["DIRS"], ["foo"]) + e = se.exception + self.assertIsInstance(e.exc_value, KeyError) + + e = se.exception.exc_value + self.assertEqual(e.args[0], "global_ns") + self.assertEqual(e.args[1], "reassign") + self.assertEqual(e.args[2], "DIRS") + + def test_exec_source_reassign_builtin(self): + sandbox = self.sandbox() + + with self.assertRaises(SandboxExecutionError) as se: + sandbox.exec_source("sorted = 1") + + e = se.exception + self.assertIsInstance(e.exc_value, KeyError) + + e = se.exception.exc_value + self.assertEqual(e.args[0], "Cannot reassign builtins") + + +class TestedSandbox(MozbuildSandbox): + """Version of MozbuildSandbox with a little more convenience for testing. + + It automatically normalizes paths given to exec_file and exec_source. This + helps simplify the test code. + """ + + def normalize_path(self, path): + return mozpath.normpath(mozpath.join(self._context.config.topsrcdir, path)) + + def source_path(self, path): + return SourcePath(self._context, path) + + def exec_file(self, path): + super(TestedSandbox, self).exec_file(self.normalize_path(path)) + + def exec_source(self, source, path=""): + super(TestedSandbox, self).exec_source( + source, self.normalize_path(path) if path else "" + ) + + +class TestMozbuildSandbox(unittest.TestCase): + def sandbox(self, data_path=None, metadata={}): + config = None + + if data_path is not None: + config = MockConfig(mozpath.join(test_data_path, data_path)) + else: + config = MockConfig() + + return TestedSandbox(Context(VARIABLES, config), metadata) + + def test_default_state(self): + sandbox = self.sandbox() + sandbox._context.add_source(sandbox.normalize_path("moz.build")) + config = sandbox._context.config + + self.assertEqual(sandbox["TOPSRCDIR"], config.topsrcdir) + self.assertEqual(sandbox["TOPOBJDIR"], config.topobjdir) + self.assertEqual(sandbox["RELATIVEDIR"], "") + self.assertEqual(sandbox["SRCDIR"], config.topsrcdir) + self.assertEqual(sandbox["OBJDIR"], config.topobjdir) + + def test_symbol_presence(self): + # Ensure no discrepancies between the master symbol table and what's in + # the sandbox. + sandbox = self.sandbox() + sandbox._context.add_source(sandbox.normalize_path("moz.build")) + + all_symbols = set() + all_symbols |= set(FUNCTIONS.keys()) + all_symbols |= set(SPECIAL_VARIABLES.keys()) + + for symbol in all_symbols: + self.assertIsNotNone(sandbox[symbol]) + + def test_path_calculation(self): + sandbox = self.sandbox() + sandbox._context.add_source(sandbox.normalize_path("foo/bar/moz.build")) + config = sandbox._context.config + + self.assertEqual(sandbox["TOPSRCDIR"], config.topsrcdir) + self.assertEqual(sandbox["TOPOBJDIR"], config.topobjdir) + self.assertEqual(sandbox["RELATIVEDIR"], "foo/bar") + self.assertEqual(sandbox["SRCDIR"], mozpath.join(config.topsrcdir, "foo/bar")) + self.assertEqual(sandbox["OBJDIR"], mozpath.join(config.topobjdir, "foo/bar")) + + def test_config_access(self): + sandbox = self.sandbox() + config = sandbox._context.config + + self.assertEqual(sandbox["CONFIG"]["MOZ_TRUE"], "1") + self.assertEqual(sandbox["CONFIG"]["MOZ_FOO"], config.substs["MOZ_FOO"]) + + # Access to an undefined substitution should return None. + self.assertNotIn("MISSING", sandbox["CONFIG"]) + self.assertIsNone(sandbox["CONFIG"]["MISSING"]) + + # Should shouldn't be allowed to assign to the config. + with self.assertRaises(Exception): + sandbox["CONFIG"]["FOO"] = "" + + def test_special_variables(self): + sandbox = self.sandbox() + sandbox._context.add_source(sandbox.normalize_path("moz.build")) + + for k in SPECIAL_VARIABLES: + with self.assertRaises(KeyError): + sandbox[k] = 0 + + def test_exec_source_reassign_exported(self): + template_sandbox = self.sandbox(data_path="templates") + + # Templates need to be defined in actual files because of + # inspect.getsourcelines. + template_sandbox.exec_file("templates.mozbuild") + + config = MockConfig() + + exports = {"DIST_SUBDIR": "browser"} + + sandbox = TestedSandbox( + Context(VARIABLES, config), + metadata={ + "exports": exports, + "templates": template_sandbox.templates, + }, + ) + + self.assertEqual(sandbox["DIST_SUBDIR"], "browser") + + # Templates should not interfere + sandbox.exec_source("Template([])", "foo.mozbuild") + + sandbox.exec_source('DIST_SUBDIR = "foo"') + with self.assertRaises(SandboxExecutionError) as se: + sandbox.exec_source('DIST_SUBDIR = "bar"') + + self.assertEqual(sandbox["DIST_SUBDIR"], "foo") + e = se.exception + self.assertIsInstance(e.exc_value, KeyError) + + e = se.exception.exc_value + self.assertEqual(e.args[0], "global_ns") + self.assertEqual(e.args[1], "reassign") + self.assertEqual(e.args[2], "DIST_SUBDIR") + + def test_include_basic(self): + sandbox = self.sandbox(data_path="include-basic") + + sandbox.exec_file("moz.build") + + self.assertEqual( + sandbox["DIRS"], + [ + sandbox.source_path("foo"), + sandbox.source_path("bar"), + ], + ) + self.assertEqual( + sandbox._context.main_path, sandbox.normalize_path("moz.build") + ) + self.assertEqual(len(sandbox._context.all_paths), 2) + + def test_include_outside_topsrcdir(self): + sandbox = self.sandbox(data_path="include-outside-topsrcdir") + + with self.assertRaises(SandboxLoadError) as se: + sandbox.exec_file("relative.build") + + self.assertEqual( + se.exception.illegal_path, sandbox.normalize_path("../moz.build") + ) + + def test_include_error_stack(self): + # Ensure the path stack is reported properly in exceptions. + sandbox = self.sandbox(data_path="include-file-stack") + + with self.assertRaises(SandboxExecutionError) as se: + sandbox.exec_file("moz.build") + + e = se.exception + self.assertIsInstance(e.exc_value, KeyError) + + args = e.exc_value.args + self.assertEqual(args[0], "global_ns") + self.assertEqual(args[1], "set_unknown") + self.assertEqual(args[2], "ILLEGAL") + + expected_stack = [ + mozpath.join(sandbox._context.config.topsrcdir, p) + for p in ["moz.build", "included-1.build", "included-2.build"] + ] + + self.assertEqual(e.file_stack, expected_stack) + + def test_include_missing(self): + sandbox = self.sandbox(data_path="include-missing") + + with self.assertRaises(SandboxLoadError) as sle: + sandbox.exec_file("moz.build") + + self.assertIsNotNone(sle.exception.read_error) + + def test_include_relative_from_child_dir(self): + # A relative path from a subdirectory should be relative from that + # child directory. + sandbox = self.sandbox(data_path="include-relative-from-child") + sandbox.exec_file("child/child.build") + self.assertEqual(sandbox["DIRS"], [sandbox.source_path("../foo")]) + + sandbox = self.sandbox(data_path="include-relative-from-child") + sandbox.exec_file("child/child2.build") + self.assertEqual(sandbox["DIRS"], [sandbox.source_path("../foo")]) + + def test_include_topsrcdir_relative(self): + # An absolute path for include() is relative to topsrcdir. + + sandbox = self.sandbox(data_path="include-topsrcdir-relative") + sandbox.exec_file("moz.build") + + self.assertEqual(sandbox["DIRS"], [sandbox.source_path("foo")]) + + def test_error(self): + sandbox = self.sandbox() + + with self.assertRaises(SandboxCalledError) as sce: + sandbox.exec_source('error("This is an error.")') + + e = sce.exception.message + self.assertIn("This is an error.", str(e)) + + def test_substitute_config_files(self): + sandbox = self.sandbox() + sandbox._context.add_source(sandbox.normalize_path("moz.build")) + + sandbox.exec_source('CONFIGURE_SUBST_FILES += ["bar", "foo"]') + self.assertEqual(sandbox["CONFIGURE_SUBST_FILES"], ["bar", "foo"]) + for item in sandbox["CONFIGURE_SUBST_FILES"]: + self.assertIsInstance(item, SourcePath) + + def test_invalid_exports_set_base(self): + sandbox = self.sandbox() + + with self.assertRaises(SandboxExecutionError) as se: + sandbox.exec_source('EXPORTS = "foo.h"') + + self.assertEqual(se.exception.exc_type, ValueError) + + def test_templates(self): + sandbox = self.sandbox(data_path="templates") + + # Templates need to be defined in actual files because of + # inspect.getsourcelines. + sandbox.exec_file("templates.mozbuild") + + sandbox2 = self.sandbox(metadata={"templates": sandbox.templates}) + source = """ +Template([ + 'foo.cpp', +]) +""" + sandbox2.exec_source(source, "foo.mozbuild") + + self.assertEqual( + sandbox2._context, + { + "SOURCES": ["foo.cpp"], + "DIRS": [], + }, + ) + + sandbox2 = self.sandbox(metadata={"templates": sandbox.templates}) + source = """ +SOURCES += ['qux.cpp'] +Template([ + 'bar.cpp', + 'foo.cpp', +],[ + 'foo', +]) +SOURCES += ['hoge.cpp'] +""" + sandbox2.exec_source(source, "foo.mozbuild") + + self.assertEqual( + sandbox2._context, + { + "SOURCES": ["qux.cpp", "bar.cpp", "foo.cpp", "hoge.cpp"], + "DIRS": [sandbox2.source_path("foo")], + }, + ) + + sandbox2 = self.sandbox(metadata={"templates": sandbox.templates}) + source = """ +TemplateError([ + 'foo.cpp', +]) +""" + with self.assertRaises(SandboxExecutionError) as se: + sandbox2.exec_source(source, "foo.mozbuild") + + e = se.exception + self.assertIsInstance(e.exc_value, KeyError) + + e = se.exception.exc_value + self.assertEqual(e.args[0], "global_ns") + self.assertEqual(e.args[1], "set_unknown") + + # TemplateGlobalVariable tries to access 'illegal' but that is expected + # to throw. + sandbox2 = self.sandbox(metadata={"templates": sandbox.templates}) + source = """ +illegal = True +TemplateGlobalVariable() +""" + with self.assertRaises(SandboxExecutionError) as se: + sandbox2.exec_source(source, "foo.mozbuild") + + e = se.exception + self.assertIsInstance(e.exc_value, NameError) + + # TemplateGlobalUPPERVariable sets SOURCES with DIRS, but the context + # used when running the template is not expected to access variables + # from the global context. + sandbox2 = self.sandbox(metadata={"templates": sandbox.templates}) + source = """ +DIRS += ['foo'] +TemplateGlobalUPPERVariable() +""" + sandbox2.exec_source(source, "foo.mozbuild") + self.assertEqual( + sandbox2._context, + { + "SOURCES": [], + "DIRS": [sandbox2.source_path("foo")], + }, + ) + + # However, the result of the template is mixed with the global + # context. + sandbox2 = self.sandbox(metadata={"templates": sandbox.templates}) + source = """ +SOURCES += ['qux.cpp'] +TemplateInherit([ + 'bar.cpp', + 'foo.cpp', +]) +SOURCES += ['hoge.cpp'] +""" + sandbox2.exec_source(source, "foo.mozbuild") + + self.assertEqual( + sandbox2._context, + { + "SOURCES": ["qux.cpp", "bar.cpp", "foo.cpp", "hoge.cpp"], + "USE_LIBS": ["foo"], + "DIRS": [], + }, + ) + + # Template names must be CamelCase. Here, we can define the template + # inline because the error happens before inspect.getsourcelines. + sandbox2 = self.sandbox(metadata={"templates": sandbox.templates}) + source = """ +@template +def foo(): + pass +""" + + with self.assertRaises(SandboxExecutionError) as se: + sandbox2.exec_source(source, "foo.mozbuild") + + e = se.exception + self.assertIsInstance(e.exc_value, NameError) + + e = se.exception.exc_value + self.assertIn("Template function names must be CamelCase.", str(e)) + + # Template names must not already be registered. + sandbox2 = self.sandbox(metadata={"templates": sandbox.templates}) + source = """ +@template +def Template(): + pass +""" + with self.assertRaises(SandboxExecutionError) as se: + sandbox2.exec_source(source, "foo.mozbuild") + + e = se.exception + self.assertIsInstance(e.exc_value, KeyError) + + e = se.exception.exc_value + self.assertIn( + 'A template named "Template" was already declared in %s.' + % sandbox.normalize_path("templates.mozbuild"), + str(e), + ) + + def test_function_args(self): + class Foo(int): + pass + + def foo(a, b): + return type(a), type(b) + + FUNCTIONS.update( + { + "foo": (lambda self: foo, (Foo, int), ""), + } + ) + + try: + sandbox = self.sandbox() + source = 'foo("a", "b")' + + with self.assertRaises(SandboxExecutionError) as se: + sandbox.exec_source(source, "foo.mozbuild") + + e = se.exception + self.assertIsInstance(e.exc_value, ValueError) + + sandbox = self.sandbox() + source = 'foo(1, "b")' + + with self.assertRaises(SandboxExecutionError) as se: + sandbox.exec_source(source, "foo.mozbuild") + + e = se.exception + self.assertIsInstance(e.exc_value, ValueError) + + sandbox = self.sandbox() + source = "a = foo(1, 2)" + sandbox.exec_source(source, "foo.mozbuild") + + self.assertEqual(sandbox["a"], (Foo, int)) + finally: + del FUNCTIONS["foo"] + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/python.ini b/python/mozbuild/mozbuild/test/python.ini new file mode 100644 index 0000000000..b55612c43b --- /dev/null +++ b/python/mozbuild/mozbuild/test/python.ini @@ -0,0 +1,64 @@ +[DEFAULT] +subsuite = mozbuild + +[action/test_buildlist.py] +[action/test_html_fragment_preprocessor.py] +[action/test_langpack_manifest.py] +[action/test_node.py] +[action/test_process_install_manifest.py] +[backend/test_fastermake.py] +[backend/test_recursivemake.py] +[backend/test_build.py] +[backend/test_database.py] +[backend/test_configenvironment.py] +[backend/test_partialconfigenvironment.py] +[backend/test_test_manifest.py] +[backend/test_visualstudio.py] +[code_analysis/test_mach_commands.py] +[codecoverage/test_lcov_rewrite.py] +[compilation/test_warnings.py] +[configure/lint.py] +[configure/test_bootstrap.py] +[configure/test_checks_configure.py] +[configure/test_compile_checks.py] +[configure/test_configure.py] +[configure/test_lint.py] +[configure/test_moz_configure.py] +[configure/test_options.py] +[configure/test_toolchain_configure.py] +[configure/test_toolchain_helpers.py] +[configure/test_toolkit_moz_configure.py] +[configure/test_util.py] +[controller/test_ccachestats.py] +[controller/test_clobber.py] +[frontend/test_context.py] +[frontend/test_emitter.py] +[frontend/test_namespaces.py] +[frontend/test_reader.py] +[frontend/test_sandbox.py] +[repackaging/test_deb.py] +[test_artifact_cache.py] +[test_artifacts.py] +[test_base.py] +[test_containers.py] +[test_dotproperties.py] +[test_expression.py] +[test_jarmaker.py] +[test_licenses.py] +[test_line_endings.py] +[test_makeutil.py] +[test_manifest.py] +[test_mozconfig.py] +[test_mozinfo.py] +[test_preprocessor.py] +[test_pythonutil.py] +[test_rewrite_mozbuild.py] +[test_telemetry.py] +[test_telemetry_settings.py] +[test_util.py] +[test_util_fileavoidwrite.py] +[test_vendor.py] +skip-if = true # Bug 1765416 +requirements = python/mozbuild/mozbuild/test/vendor_requirements.txt +[test_vendor_tools.py] +skip-if = os == "win" # Windows doesn't have the same path seperator as linux, and we just don't need to run it there diff --git a/python/mozbuild/mozbuild/test/repackaging/test_deb.py b/python/mozbuild/mozbuild/test/repackaging/test_deb.py new file mode 100644 index 0000000000..477f7ea346 --- /dev/null +++ b/python/mozbuild/mozbuild/test/repackaging/test_deb.py @@ -0,0 +1,551 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import datetime +import json +import logging +import os +import tarfile +import tempfile +import zipfile +from contextlib import nullcontext as does_not_raise +from io import StringIO +from unittest.mock import MagicMock, Mock, call + +import mozpack.path as mozpath +import mozunit +import pytest + +from mozbuild.repackaging import deb + +_APPLICATION_INI_CONTENT = """[App] +Vendor=Mozilla +Name=Firefox +RemotingName=firefox-nightly-try +CodeName=Firefox Nightly +BuildID=20230222000000 +""" + +_APPLICATION_INI_CONTENT_DATA = { + "name": "Firefox", + "display_name": "Firefox Nightly", + "vendor": "Mozilla", + "remoting_name": "firefox-nightly-try", + "build_id": "20230222000000", + "timestamp": datetime.datetime(2023, 2, 22), +} + + +@pytest.mark.parametrize( + "number_of_application_ini_files, expectaction, expected_result", + ( + (0, pytest.raises(ValueError), None), + (1, does_not_raise(), _APPLICATION_INI_CONTENT_DATA), + (2, pytest.raises(ValueError), None), + ), +) +def test_extract_application_ini_data( + number_of_application_ini_files, expectaction, expected_result +): + with tempfile.TemporaryDirectory() as d: + tar_path = os.path.join(d, "input.tar") + with tarfile.open(tar_path, "w") as tar: + application_ini_path = os.path.join(d, "application.ini") + with open(application_ini_path, "w") as application_ini_file: + application_ini_file.write(_APPLICATION_INI_CONTENT) + + for i in range(number_of_application_ini_files): + tar.add(application_ini_path, f"{i}/application.ini") + + with expectaction: + assert deb._extract_application_ini_data(tar_path) == expected_result + + +def test_extract_application_ini_data_from_directory(): + with tempfile.TemporaryDirectory() as d: + with open(os.path.join(d, "application.ini"), "w") as f: + f.write(_APPLICATION_INI_CONTENT) + + assert ( + deb._extract_application_ini_data_from_directory(d) + == _APPLICATION_INI_CONTENT_DATA + ) + + +@pytest.mark.parametrize( + "version, build_number, package_name_suffix, description_suffix, expected", + ( + ( + "112.0a1", + 1, + "", + "", + { + "DEB_DESCRIPTION": "Mozilla Firefox", + "DEB_PKG_INSTALL_PATH": "usr/lib/firefox-nightly-try", + "DEB_PKG_NAME": "firefox-nightly-try", + "DEB_PKG_VERSION": "112.0a1~20230222000000", + }, + ), + ( + "112.0a1", + 1, + "-l10n-fr", + " - Language pack for Firefox Nightly for fr", + { + "DEB_DESCRIPTION": "Mozilla Firefox - Language pack for Firefox Nightly for fr", + "DEB_PKG_INSTALL_PATH": "usr/lib/firefox-nightly-try", + "DEB_PKG_NAME": "firefox-nightly-try-l10n-fr", + "DEB_PKG_VERSION": "112.0a1~20230222000000", + }, + ), + ( + "112.0b1", + 1, + "", + "", + { + "DEB_DESCRIPTION": "Mozilla Firefox", + "DEB_PKG_INSTALL_PATH": "usr/lib/firefox-nightly-try", + "DEB_PKG_NAME": "firefox-nightly-try", + "DEB_PKG_VERSION": "112.0b1~build1", + }, + ), + ( + "112.0", + 2, + "", + "", + { + "DEB_DESCRIPTION": "Mozilla Firefox", + "DEB_PKG_INSTALL_PATH": "usr/lib/firefox-nightly-try", + "DEB_PKG_NAME": "firefox-nightly-try", + "DEB_PKG_VERSION": "112.0~build2", + }, + ), + ), +) +def test_get_build_variables( + version, build_number, package_name_suffix, description_suffix, expected +): + application_ini_data = { + "name": "Firefox", + "display_name": "Firefox", + "vendor": "Mozilla", + "remoting_name": "firefox-nightly-try", + "build_id": "20230222000000", + "timestamp": datetime.datetime(2023, 2, 22), + } + assert deb._get_build_variables( + application_ini_data, + "x86", + version, + build_number, + depends="${shlibs:Depends},", + package_name_suffix=package_name_suffix, + description_suffix=description_suffix, + ) == { + **{ + "DEB_CHANGELOG_DATE": "Wed, 22 Feb 2023 00:00:00 -0000", + "DEB_ARCH_NAME": "i386", + "DEB_DEPENDS": "${shlibs:Depends},", + }, + **expected, + } + + +def test_copy_plain_deb_config(monkeypatch): + def mock_listdir(dir): + assert dir == "/template_dir" + return [ + "/template_dir/debian_file1.in", + "/template_dir/debian_file2.in", + "/template_dir/debian_file3", + "/template_dir/debian_file4", + ] + + monkeypatch.setattr(deb.os, "listdir", mock_listdir) + + def mock_makedirs(dir, exist_ok): + assert dir == "/source_dir/debian" + assert exist_ok is True + + monkeypatch.setattr(deb.os, "makedirs", mock_makedirs) + + mock_copy = MagicMock() + monkeypatch.setattr(deb.shutil, "copy", mock_copy) + + deb._copy_plain_deb_config("/template_dir", "/source_dir") + assert mock_copy.call_args_list == [ + call("/template_dir/debian_file3", "/source_dir/debian/debian_file3"), + call("/template_dir/debian_file4", "/source_dir/debian/debian_file4"), + ] + + +def test_render_deb_templates(): + with tempfile.TemporaryDirectory() as template_dir, tempfile.TemporaryDirectory() as source_dir: + with open(os.path.join(template_dir, "debian_file1.in"), "w") as f: + f.write("${some_build_variable}") + + with open(os.path.join(template_dir, "debian_file2.in"), "w") as f: + f.write("Some hardcoded value") + + with open(os.path.join(template_dir, "ignored_file.in"), "w") as f: + f.write("Must not be copied") + + deb._render_deb_templates( + template_dir, + source_dir, + {"some_build_variable": "some_value"}, + exclude_file_names=["ignored_file.in"], + ) + + with open(os.path.join(source_dir, "debian", "debian_file1")) as f: + assert f.read() == "some_value" + + with open(os.path.join(source_dir, "debian", "debian_file2")) as f: + assert f.read() == "Some hardcoded value" + + assert not os.path.exists(os.path.join(source_dir, "debian", "ignored_file")) + assert not os.path.exists(os.path.join(source_dir, "debian", "ignored_file.in")) + + +def test_inject_deb_distribution_folder(monkeypatch): + def mock_check_call(command): + global clone_dir + clone_dir = command[-1] + os.makedirs(os.path.join(clone_dir, "desktop/deb/distribution")) + + monkeypatch.setattr(deb.subprocess, "check_call", mock_check_call) + + def mock_copytree(source_tree, destination_tree): + global clone_dir + assert source_tree == mozpath.join(clone_dir, "desktop/deb/distribution") + assert destination_tree == "/source_dir/firefox/distribution" + + monkeypatch.setattr(deb.shutil, "copytree", mock_copytree) + + deb._inject_deb_distribution_folder("/source_dir", "Firefox") + + +ZH_TW_FTL = """\ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +# These messages are used by the Firefox ".desktop" file on Linux. +# https://specifications.freedesktop.org/desktop-entry-spec/desktop-entry-spec-latest.html + +# The entry name is the label on the desktop icon, among other things. +desktop-entry-name = { -brand-shortcut-name } +# The comment usually appears as a tooltip when hovering over application menu entry. +desktop-entry-comment = ç€è¦½å…¨çƒè³‡è¨Šç¶² +desktop-entry-generic-name = 網é ç€è¦½å™¨ +# Keywords are search terms used to find this application. +# The string is a list of keywords separated by semicolons: +# - Do NOT replace semicolons with other punctuation signs. +# - The list MUST end with a semicolon. +desktop-entry-keywords = 網際網路;網路;ç€è¦½å™¨;網é ;上網;Internet;WWW;Browser;Web;Explorer; + +## Actions are visible in a context menu after right clicking the +## taskbar icon, possibly other places depending on the environment. + +desktop-action-new-window-name = 開新視窗 +desktop-action-new-private-window-name = é–‹æ–°éš±ç§è¦–窗 +""" + +DESKTOP_ENTRY_FILE_TEXT = """\ +[Desktop Entry] +Version=1.0 +Type=Application +Exec=firefox-nightly %u +Terminal=false +X-MultipleArgs=false +Icon=firefox-nightly +StartupWMClass=firefox-nightly +Categories=GNOME;GTK;Network;WebBrowser; +MimeType=application/json;application/pdf;application/rdf+xml;application/rss+xml;application/x-xpinstall;application/xhtml+xml;application/xml;audio/flac;audio/ogg;audio/webm;image/avif;image/gif;image/jpeg;image/png;image/svg+xml;image/webp;text/html;text/xml;video/ogg;video/webm;x-scheme-handler/chrome;x-scheme-handler/http;x-scheme-handler/https; +StartupNotify=true +Actions=new-window;new-private-window;open-profile-manager; +Name=en-US-desktop-entry-name +Name[zh_TW]=zh-TW-desktop-entry-name +Comment=en-US-desktop-entry-comment +Comment[zh_TW]=zh-TW-desktop-entry-comment +GenericName=en-US-desktop-entry-generic-name +GenericName[zh_TW]=zh-TW-desktop-entry-generic-name +Keywords=en-US-desktop-entry-keywords +Keywords[zh_TW]=zh-TW-desktop-entry-keywords +X-GNOME-FullName=en-US-desktop-entry-x-gnome-full-name +X-GNOME-FullName[zh_TW]=zh-TW-desktop-entry-x-gnome-full-name + +[Desktop Action new-window] +Exec=firefox-nightly --new-window %u +Name=en-US-desktop-action-new-window-name +Name[zh_TW]=zh-TW-desktop-action-new-window-name + +[Desktop Action new-private-window] +Exec=firefox-nightly --private-window %u +Name=en-US-desktop-action-new-private-window-name +Name[zh_TW]=zh-TW-desktop-action-new-private-window-name + +[Desktop Action open-profile-manager] +Exec=firefox-nightly --ProfileManager +Name=en-US-desktop-action-open-profile-manager +Name[zh_TW]=zh-TW-desktop-action-open-profile-manager +""" + + +def test_generate_deb_desktop_entry_file_text(monkeypatch): + def responsive(url): + if "zh-TW" in url: + return Mock( + **{ + "status_code": 200, + "text": ZH_TW_FTL, + } + ) + return Mock(**{"status_code": 404}) + + monkeypatch.setattr(deb.requests, "get", responsive) + + output_stream = StringIO() + logger = logging.getLogger("mozbuild:test:repackaging") + logger.setLevel(logging.DEBUG) + stream_handler = logging.StreamHandler(output_stream) + logger.addHandler(stream_handler) + + def log(level, action, params, format_str): + logger.log( + level, + format_str.format(**params), + extra={"action": action, "params": params}, + ) + + build_variables = { + "DEB_PKG_NAME": "firefox-nightly", + } + release_product = "firefox" + release_type = "nightly" + + def fluent_localization(locales, resources, loader): + def format_value(resource): + return f"{locales[0]}-{resource}" + + return Mock(**{"format_value": format_value}) + + fluent_resource_loader = Mock() + + desktop_entry_file_text = deb._generate_browser_desktop_entry_file_text( + log, + build_variables, + release_product, + release_type, + fluent_localization, + fluent_resource_loader, + ) + + assert desktop_entry_file_text == DESKTOP_ENTRY_FILE_TEXT + + def outage(url): + return Mock(**{"status_code": 500}) + + monkeypatch.setattr(deb.requests, "get", outage) + + with pytest.raises(deb.HgServerError): + desktop_entry_file_text = deb._generate_browser_desktop_entry_file_text( + log, + build_variables, + release_product, + release_type, + fluent_localization, + fluent_resource_loader, + ) + + +@pytest.mark.parametrize( + "does_path_exits, expectation", + ( + (True, does_not_raise()), + (False, pytest.raises(deb.NoDebPackageFound)), + ), +) +def test_generate_deb_archive( + monkeypatch, + does_path_exits, + expectation, +): + monkeypatch.setattr(deb, "_get_command", lambda _: ["mock_command"]) + monkeypatch.setattr(deb.subprocess, "check_call", lambda *_, **__: None) + + def mock_exists(path): + assert path == "/target_dir/firefox_111.0_amd64.deb" + return does_path_exits + + monkeypatch.setattr(deb.os.path, "exists", mock_exists) + + def mock_move(source_path, destination_path): + assert source_path == "/target_dir/firefox_111.0_amd64.deb" + assert destination_path == "/output/target.deb" + + monkeypatch.setattr(deb.shutil, "move", mock_move) + + with expectation: + deb._generate_deb_archive( + source_dir="/source_dir", + target_dir="/target_dir", + output_file_path="/output/target.deb", + build_variables={ + "DEB_PKG_NAME": "firefox", + "DEB_PKG_VERSION": "111.0", + }, + arch="x86_64", + ) + + +@pytest.mark.parametrize( + "arch, is_chroot_available, expected", + ( + ( + "all", + True, + [ + "chroot", + "/srv/jessie-amd64", + "bash", + "-c", + "cd /tmp/*/source; dpkg-buildpackage -us -uc -b", + ], + ), + ("all", False, ["dpkg-buildpackage", "-us", "-uc", "-b"]), + ( + "x86", + True, + [ + "chroot", + "/srv/jessie-i386", + "bash", + "-c", + "cd /tmp/*/source; dpkg-buildpackage -us -uc -b --host-arch=i386", + ], + ), + ("x86", False, ["dpkg-buildpackage", "-us", "-uc", "-b", "--host-arch=i386"]), + ( + "x86_64", + True, + [ + "chroot", + "/srv/jessie-amd64", + "bash", + "-c", + "cd /tmp/*/source; dpkg-buildpackage -us -uc -b --host-arch=amd64", + ], + ), + ( + "x86_64", + False, + ["dpkg-buildpackage", "-us", "-uc", "-b", "--host-arch=amd64"], + ), + ), +) +def test_get_command(monkeypatch, arch, is_chroot_available, expected): + monkeypatch.setattr(deb, "_is_chroot_available", lambda _: is_chroot_available) + assert deb._get_command(arch) == expected + + +@pytest.mark.parametrize( + "arch, does_dir_exist, expected_path, expected_result", + ( + ("all", False, "/srv/jessie-amd64", False), + ("all", True, "/srv/jessie-amd64", True), + ("x86", False, "/srv/jessie-i386", False), + ("x86_64", False, "/srv/jessie-amd64", False), + ("x86", True, "/srv/jessie-i386", True), + ("x86_64", True, "/srv/jessie-amd64", True), + ), +) +def test_is_chroot_available( + monkeypatch, arch, does_dir_exist, expected_path, expected_result +): + def _mock_is_dir(path): + assert path == expected_path + return does_dir_exist + + monkeypatch.setattr(deb.os.path, "isdir", _mock_is_dir) + assert deb._is_chroot_available(arch) == expected_result + + +@pytest.mark.parametrize( + "arch, expected", + ( + ("all", "/srv/jessie-amd64"), + ("x86", "/srv/jessie-i386"), + ("x86_64", "/srv/jessie-amd64"), + ), +) +def test_get_chroot_path(arch, expected): + assert deb._get_chroot_path(arch) == expected + + +_MANIFEST_JSON_DATA = { + "langpack_id": "fr", + "manifest_version": 2, + "browser_specific_settings": { + "gecko": { + "id": "langpack-fr@devedition.mozilla.org", + "strict_min_version": "112.0a1", + "strict_max_version": "112.0a1", + } + }, + "name": "Language: Français (French)", + "description": "Firefox Developer Edition Language Pack for Français (fr) – French", + "version": "112.0.20230227.181253", + "languages": { + "fr": { + "version": "20230223164410", + "chrome_resources": { + "app-marketplace-icons": "browser/chrome/browser/locale/fr/app-marketplace-icons/", + "branding": "browser/chrome/fr/locale/branding/", + "browser": "browser/chrome/fr/locale/browser/", + "browser-region": "browser/chrome/fr/locale/browser-region/", + "devtools": "browser/chrome/fr/locale/fr/devtools/client/", + "devtools-shared": "browser/chrome/fr/locale/fr/devtools/shared/", + "formautofill": "browser/features/formautofill@mozilla.org/fr/locale/fr/", + "report-site-issue": "browser/features/webcompat-reporter@mozilla.org/fr/locale/fr/", + "alerts": "chrome/fr/locale/fr/alerts/", + "autoconfig": "chrome/fr/locale/fr/autoconfig/", + "global": "chrome/fr/locale/fr/global/", + "global-platform": { + "macosx": "chrome/fr/locale/fr/global-platform/mac/", + "linux": "chrome/fr/locale/fr/global-platform/unix/", + "android": "chrome/fr/locale/fr/global-platform/unix/", + "win": "chrome/fr/locale/fr/global-platform/win/", + }, + "mozapps": "chrome/fr/locale/fr/mozapps/", + "necko": "chrome/fr/locale/fr/necko/", + "passwordmgr": "chrome/fr/locale/fr/passwordmgr/", + "pdf.js": "chrome/fr/locale/pdfviewer/", + "pipnss": "chrome/fr/locale/fr/pipnss/", + "pippki": "chrome/fr/locale/fr/pippki/", + "places": "chrome/fr/locale/fr/places/", + "weave": "chrome/fr/locale/fr/services/", + }, + } + }, + "sources": {"browser": {"base_path": "browser/"}}, + "author": "mozfr.org (contributors: L’équipe francophone)", +} + + +def test_extract_langpack_metadata(): + with tempfile.TemporaryDirectory() as d: + langpack_path = os.path.join(d, "langpack.xpi") + with zipfile.ZipFile(langpack_path, "w") as zip: + zip.writestr("manifest.json", json.dumps(_MANIFEST_JSON_DATA)) + + assert deb._extract_langpack_metadata(langpack_path) == _MANIFEST_JSON_DATA + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/test_android_version_code.py b/python/mozbuild/mozbuild/test/test_android_version_code.py new file mode 100644 index 0000000000..7600ebe0d8 --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_android_version_code.py @@ -0,0 +1,111 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import unittest + +from mozunit import main + +from mozbuild.android_version_code import ( + android_version_code_v0, + android_version_code_v1, +) + + +class TestAndroidVersionCode(unittest.TestCase): + def test_android_version_code_v0(self): + # From https://treeherder.mozilla.org/#/jobs?repo=mozilla-central&revision=e25de9972a77. + buildid = "20150708104620" + arm_api9 = 2015070819 + arm_api11 = 2015070821 + x86_api9 = 2015070822 + self.assertEqual( + android_version_code_v0( + buildid, cpu_arch="armeabi", min_sdk=9, max_sdk=None + ), + arm_api9, + ) + self.assertEqual( + android_version_code_v0( + buildid, cpu_arch="armeabi-v7a", min_sdk=11, max_sdk=None + ), + arm_api11, + ) + self.assertEqual( + android_version_code_v0(buildid, cpu_arch="x86", min_sdk=9, max_sdk=None), + x86_api9, + ) + + def test_android_version_code_v1(self): + buildid = "20150825141628" + arm_api16 = 0b01111000001000000001001001110001 + arm64_api21 = 0b01111000001000000001001001110100 + x86_api9 = 0b01111000001000000001001001110100 + self.assertEqual( + android_version_code_v1( + buildid, cpu_arch="armeabi-v7a", min_sdk=16, max_sdk=None + ), + arm_api16, + ) + self.assertEqual( + android_version_code_v1( + buildid, cpu_arch="arm64-v8a", min_sdk=21, max_sdk=None + ), + arm64_api21, + ) + self.assertEqual( + android_version_code_v1(buildid, cpu_arch="x86", min_sdk=9, max_sdk=None), + x86_api9, + ) + + def test_android_version_code_v1_underflow(self): + """Verify that it is an error to ask for v1 codes predating the cutoff.""" + buildid = "201508010000" # Earliest possible. + arm_api9 = 0b01111000001000000000000000000000 + self.assertEqual( + android_version_code_v1( + buildid, cpu_arch="armeabi", min_sdk=9, max_sdk=None + ), + arm_api9, + ) + with self.assertRaises(ValueError) as cm: + underflow = "201507310000" # Latest possible (valid) underflowing date. + android_version_code_v1( + underflow, cpu_arch="armeabi", min_sdk=9, max_sdk=None + ) + self.assertTrue("underflow" in cm.exception.message) + + def test_android_version_code_v1_running_low(self): + """Verify there is an informative message if one asks for v1 + codes that are close to overflow.""" + with self.assertRaises(ValueError) as cm: + overflow = "20290801000000" + android_version_code_v1( + overflow, cpu_arch="armeabi", min_sdk=9, max_sdk=None + ) + self.assertTrue("Running out of low order bits" in cm.exception.message) + + def test_android_version_code_v1_overflow(self): + """Verify that it is an error to ask for v1 codes that actually does overflow.""" + with self.assertRaises(ValueError) as cm: + overflow = "20310801000000" + android_version_code_v1( + overflow, cpu_arch="armeabi", min_sdk=9, max_sdk=None + ) + self.assertTrue("overflow" in cm.exception.message) + + def test_android_version_code_v0_relative_v1(self): + """Verify that the first v1 code is greater than the equivalent v0 code.""" + buildid = "20150801000000" + self.assertGreater( + android_version_code_v1( + buildid, cpu_arch="armeabi", min_sdk=9, max_sdk=None + ), + android_version_code_v0( + buildid, cpu_arch="armeabi", min_sdk=9, max_sdk=None + ), + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/test_artifact_cache.py b/python/mozbuild/mozbuild/test/test_artifact_cache.py new file mode 100644 index 0000000000..d12d150183 --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_artifact_cache.py @@ -0,0 +1,145 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import time +import unittest +from shutil import rmtree +from tempfile import mkdtemp + +import mozunit + +from mozbuild import artifact_cache +from mozbuild.artifact_cache import ArtifactCache + +CONTENTS = { + "http://server/foo": b"foo", + "http://server/bar": b"bar" * 400, + "http://server/qux": b"qux" * 400, + "http://server/fuga": b"fuga" * 300, + "http://server/hoge": b"hoge" * 300, + "http://server/larger": b"larger" * 3000, +} + + +class FakeResponse(object): + def __init__(self, content): + self._content = content + + @property + def headers(self): + return {"Content-length": str(len(self._content))} + + def iter_content(self, chunk_size): + content = memoryview(self._content) + while content: + yield content[:chunk_size] + content = content[chunk_size:] + + def raise_for_status(self): + pass + + def close(self): + pass + + +class FakeSession(object): + def get(self, url, stream=True): + assert stream is True + return FakeResponse(CONTENTS[url]) + + +class TestArtifactCache(unittest.TestCase): + def setUp(self): + self.min_cached_artifacts = artifact_cache.MIN_CACHED_ARTIFACTS + self.max_cached_artifacts_size = artifact_cache.MAX_CACHED_ARTIFACTS_SIZE + artifact_cache.MIN_CACHED_ARTIFACTS = 2 + artifact_cache.MAX_CACHED_ARTIFACTS_SIZE = 4096 + + self._real_utime = os.utime + os.utime = self.utime + self.timestamp = time.time() - 86400 + + self.tmpdir = mkdtemp() + + def tearDown(self): + rmtree(self.tmpdir) + artifact_cache.MIN_CACHED_ARTIFACTS = self.min_cached_artifacts + artifact_cache.MAX_CACHED_ARTIFACTS_SIZE = self.max_cached_artifacts_size + os.utime = self._real_utime + + def utime(self, path, times): + if times is None: + # Ensure all downloaded files have a different timestamp + times = (self.timestamp, self.timestamp) + self.timestamp += 2 + self._real_utime(path, times) + + def listtmpdir(self): + return [p for p in os.listdir(self.tmpdir) if p != ".metadata_never_index"] + + def test_artifact_cache_persistence(self): + cache = ArtifactCache(self.tmpdir) + cache._download_manager.session = FakeSession() + + path = cache.fetch("http://server/foo") + expected = [os.path.basename(path)] + self.assertEqual(self.listtmpdir(), expected) + + path = cache.fetch("http://server/bar") + expected.append(os.path.basename(path)) + self.assertEqual(sorted(self.listtmpdir()), sorted(expected)) + + # We're downloading more than the cache allows us, but since it's all + # in the same session, no purge happens. + path = cache.fetch("http://server/qux") + expected.append(os.path.basename(path)) + self.assertEqual(sorted(self.listtmpdir()), sorted(expected)) + + path = cache.fetch("http://server/fuga") + expected.append(os.path.basename(path)) + self.assertEqual(sorted(self.listtmpdir()), sorted(expected)) + + cache = ArtifactCache(self.tmpdir) + cache._download_manager.session = FakeSession() + + # Downloading a new file in a new session purges the oldest files in + # the cache. + path = cache.fetch("http://server/hoge") + expected.append(os.path.basename(path)) + expected = expected[2:] + self.assertEqual(sorted(self.listtmpdir()), sorted(expected)) + + # Downloading a file already in the cache leaves the cache untouched + cache = ArtifactCache(self.tmpdir) + cache._download_manager.session = FakeSession() + + path = cache.fetch("http://server/qux") + self.assertEqual(sorted(self.listtmpdir()), sorted(expected)) + + # bar was purged earlier, re-downloading it should purge the oldest + # downloaded file, which at this point would be qux, but we also + # re-downloaded it in the mean time, so the next one (fuga) should be + # the purged one. + cache = ArtifactCache(self.tmpdir) + cache._download_manager.session = FakeSession() + + path = cache.fetch("http://server/bar") + expected.append(os.path.basename(path)) + expected = [p for p in expected if "fuga" not in p] + self.assertEqual(sorted(self.listtmpdir()), sorted(expected)) + + # Downloading one file larger than the cache size should still leave + # MIN_CACHED_ARTIFACTS files. + cache = ArtifactCache(self.tmpdir) + cache._download_manager.session = FakeSession() + + path = cache.fetch("http://server/larger") + expected.append(os.path.basename(path)) + expected = expected[-2:] + self.assertEqual(sorted(self.listtmpdir()), sorted(expected)) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/test_artifacts.py b/python/mozbuild/mozbuild/test/test_artifacts.py new file mode 100644 index 0000000000..397b6dbdb2 --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_artifacts.py @@ -0,0 +1,115 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from unittest import TestCase + +import buildconfig +import mozunit + +from mozbuild.artifacts import ArtifactJob, ThunderbirdMixin + + +class FakeArtifactJob(ArtifactJob): + package_re = r"" + + +class TestArtifactJob(TestCase): + def _assert_candidate_trees(self, version_display, expected_trees): + buildconfig.substs["MOZ_APP_VERSION_DISPLAY"] = version_display + + job = FakeArtifactJob() + self.assertGreater(len(job.candidate_trees), 0) + self.assertEqual(job.candidate_trees, expected_trees) + + def test_candidate_trees_with_empty_file(self): + self._assert_candidate_trees( + version_display="", expected_trees=ArtifactJob.default_candidate_trees + ) + + def test_candidate_trees_with_beta_version(self): + self._assert_candidate_trees( + version_display="92.1b2", expected_trees=ArtifactJob.beta_candidate_trees + ) + + def test_candidate_trees_with_esr_version(self): + self._assert_candidate_trees( + version_display="91.3.0esr", expected_trees=ArtifactJob.esr_candidate_trees + ) + + def test_candidate_trees_with_nightly_version(self): + self._assert_candidate_trees( + version_display="95.0a1", expected_trees=ArtifactJob.nightly_candidate_trees + ) + + def test_candidate_trees_with_release_version(self): + self._assert_candidate_trees( + version_display="93.0.1", expected_trees=ArtifactJob.default_candidate_trees + ) + + def test_candidate_trees_with_newline_before_version(self): + self._assert_candidate_trees( + version_display="\n\n91.3.0esr", + expected_trees=ArtifactJob.esr_candidate_trees, + ) + + def test_property_is_cached(self): + job = FakeArtifactJob() + expected_trees = ArtifactJob.esr_candidate_trees + + buildconfig.substs["MOZ_APP_VERSION_DISPLAY"] = "91.3.0.esr" + self.assertEqual(job.candidate_trees, expected_trees) + # Because the property is cached, changing the + # `MOZ_APP_VERSION_DISPLAY` won't have any impact. + buildconfig.substs["MOZ_APP_VERSION_DISPLAY"] = "" + self.assertEqual(job.candidate_trees, expected_trees) + + +class FakeThunderbirdJob(ThunderbirdMixin, FakeArtifactJob): + pass + + +class TestThunderbirdMixin(TestCase): + def _assert_candidate_trees(self, version_display, source_repo, expected_trees): + buildconfig.substs["MOZ_APP_VERSION_DISPLAY"] = version_display + buildconfig.substs["MOZ_SOURCE_REPO"] = source_repo + + job = FakeThunderbirdJob() + self.assertGreater(len(job.candidate_trees), 0) + self.assertEqual(job.candidate_trees, expected_trees) + + def test_candidate_trees_with_beta_version(self): + self._assert_candidate_trees( + version_display="92.1b2", + source_repo="https://hg.mozilla.org/releases/comm-beta", + expected_trees=ThunderbirdMixin.beta_candidate_trees, + ) + + def test_candidate_trees_with_esr_version(self): + self._assert_candidate_trees( + version_display="91.3.0", + source_repo="https://hg.mozilla.org/releases/comm-esr91", + expected_trees=ThunderbirdMixin.esr_candidate_trees, + ) + + def test_candidate_trees_with_nightly_version(self): + self._assert_candidate_trees( + version_display="95.0a1", + source_repo="https://hg.mozilla.org/comm-central", + expected_trees=ThunderbirdMixin.nightly_candidate_trees, + ) + + def test_property_is_cached(self): + job = FakeThunderbirdJob() + expected_trees = ThunderbirdMixin.esr_candidate_trees + + buildconfig.substs["MOZ_APP_VERSION_DISPLAY"] = "91.3.0.esr" + self.assertEqual(job.candidate_trees, expected_trees) + # Because the property is cached, changing the + # `MOZ_APP_VERSION_DISPLAY` won't have any impact. + buildconfig.substs["MOZ_APP_VERSION_DISPLAY"] = "" + self.assertEqual(job.candidate_trees, expected_trees) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/test_base.py b/python/mozbuild/mozbuild/test/test_base.py new file mode 100644 index 0000000000..c75a71ef5d --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_base.py @@ -0,0 +1,446 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import json +import os +import shutil +import sys +import tempfile +import unittest + +import mozpack.path as mozpath +from buildconfig import topobjdir, topsrcdir +from mach.logging import LoggingManager +from mozfile.mozfile import NamedTemporaryFile +from mozunit import main +from six import StringIO + +from mozbuild.backend.configenvironment import ConfigEnvironment +from mozbuild.base import ( + BadEnvironmentException, + MachCommandBase, + MozbuildObject, + PathArgument, +) +from mozbuild.test.common import prepare_tmp_topsrcdir + +curdir = os.path.dirname(__file__) +log_manager = LoggingManager() + + +class TestMozbuildObject(unittest.TestCase): + def setUp(self): + self._old_cwd = os.getcwd() + self._old_env = dict(os.environ) + os.environ.pop("MOZCONFIG", None) + os.environ.pop("MOZ_OBJDIR", None) + + def tearDown(self): + os.chdir(self._old_cwd) + os.environ.clear() + os.environ.update(self._old_env) + + def get_base(self, topobjdir=None): + return MozbuildObject(topsrcdir, None, log_manager, topobjdir=topobjdir) + + def test_objdir_config_guess(self): + base = self.get_base() + + with NamedTemporaryFile(mode="wt") as mozconfig: + os.environ["MOZCONFIG"] = mozconfig.name + + self.assertIsNotNone(base.topobjdir) + self.assertEqual(len(base.topobjdir.split()), 1) + config_guess = base.resolve_config_guess() + self.assertTrue(base.topobjdir.endswith(config_guess)) + self.assertTrue(os.path.isabs(base.topobjdir)) + self.assertTrue(base.topobjdir.startswith(base.topsrcdir)) + + def test_objdir_trailing_slash(self): + """Trailing slashes in topobjdir should be removed.""" + base = self.get_base() + + with NamedTemporaryFile(mode="wt") as mozconfig: + mozconfig.write("mk_add_options MOZ_OBJDIR=@TOPSRCDIR@/foo/") + mozconfig.flush() + os.environ["MOZCONFIG"] = mozconfig.name + + self.assertEqual(base.topobjdir, mozpath.join(base.topsrcdir, "foo")) + self.assertTrue(base.topobjdir.endswith("foo")) + + def test_objdir_config_status(self): + """Ensure @CONFIG_GUESS@ is handled when loading mozconfig.""" + base = self.get_base() + guess = base.resolve_config_guess() + + # There may be symlinks involved, so we use real paths to ensure + # path consistency. + d = os.path.realpath(tempfile.mkdtemp()) + try: + mozconfig = os.path.join(d, "mozconfig") + with open(mozconfig, "wt") as fh: + fh.write("mk_add_options MOZ_OBJDIR=@TOPSRCDIR@/foo/@CONFIG_GUESS@") + print("Wrote mozconfig %s" % mozconfig) + + topobjdir = os.path.join(d, "foo", guess) + os.makedirs(topobjdir) + + # Create a fake topsrcdir. + prepare_tmp_topsrcdir(d) + + mozinfo = os.path.join(topobjdir, "mozinfo.json") + with open(mozinfo, "wt") as fh: + json.dump( + dict( + topsrcdir=d, + mozconfig=mozconfig, + ), + fh, + ) + + os.environ["MOZCONFIG"] = mozconfig + os.chdir(topobjdir) + + obj = MozbuildObject.from_environment(detect_virtualenv_mozinfo=False) + + self.assertEqual(obj.topobjdir, mozpath.normsep(topobjdir)) + finally: + os.chdir(self._old_cwd) + shutil.rmtree(d) + + def test_relative_objdir(self): + """Relative defined objdirs are loaded properly.""" + d = os.path.realpath(tempfile.mkdtemp()) + try: + mozconfig = os.path.join(d, "mozconfig") + with open(mozconfig, "wt") as fh: + fh.write("mk_add_options MOZ_OBJDIR=./objdir") + + topobjdir = mozpath.join(d, "objdir") + os.mkdir(topobjdir) + + mozinfo = os.path.join(topobjdir, "mozinfo.json") + with open(mozinfo, "wt") as fh: + json.dump( + dict( + topsrcdir=d, + mozconfig=mozconfig, + ), + fh, + ) + + os.environ["MOZCONFIG"] = mozconfig + child = os.path.join(topobjdir, "foo", "bar") + os.makedirs(child) + os.chdir(child) + + obj = MozbuildObject.from_environment(detect_virtualenv_mozinfo=False) + + self.assertEqual(obj.topobjdir, topobjdir) + + finally: + os.chdir(self._old_cwd) + shutil.rmtree(d) + + @unittest.skipIf( + not hasattr(os, "symlink") or os.name == "nt", "symlinks not available." + ) + def test_symlink_objdir(self): + """Objdir that is a symlink is loaded properly.""" + d = os.path.realpath(tempfile.mkdtemp()) + try: + topobjdir_real = os.path.join(d, "objdir") + topobjdir_link = os.path.join(d, "objlink") + + os.mkdir(topobjdir_real) + os.symlink(topobjdir_real, topobjdir_link) + + mozconfig = os.path.join(d, "mozconfig") + with open(mozconfig, "wt") as fh: + fh.write("mk_add_options MOZ_OBJDIR=%s" % topobjdir_link) + + mozinfo = os.path.join(topobjdir_real, "mozinfo.json") + with open(mozinfo, "wt") as fh: + json.dump( + dict( + topsrcdir=d, + mozconfig=mozconfig, + ), + fh, + ) + + os.chdir(topobjdir_link) + obj = MozbuildObject.from_environment(detect_virtualenv_mozinfo=False) + self.assertEqual(obj.topobjdir, topobjdir_real) + + os.chdir(topobjdir_real) + obj = MozbuildObject.from_environment(detect_virtualenv_mozinfo=False) + self.assertEqual(obj.topobjdir, topobjdir_real) + + finally: + os.chdir(self._old_cwd) + shutil.rmtree(d) + + def test_mach_command_base_inside_objdir(self): + """Ensure a MachCommandBase constructed from inside the objdir works.""" + + d = os.path.realpath(tempfile.mkdtemp()) + + try: + topobjdir = os.path.join(d, "objdir") + os.makedirs(topobjdir) + + topsrcdir = os.path.join(d, "srcdir") + prepare_tmp_topsrcdir(topsrcdir) + + mozinfo = os.path.join(topobjdir, "mozinfo.json") + with open(mozinfo, "wt") as fh: + json.dump( + dict( + topsrcdir=topsrcdir, + ), + fh, + ) + + os.chdir(topobjdir) + + class MockMachContext(object): + pass + + context = MockMachContext() + context.cwd = topobjdir + context.topdir = topsrcdir + context.settings = None + context.log_manager = None + context.detect_virtualenv_mozinfo = False + + o = MachCommandBase(context, None) + + self.assertEqual(o.topobjdir, mozpath.normsep(topobjdir)) + self.assertEqual(o.topsrcdir, mozpath.normsep(topsrcdir)) + + finally: + os.chdir(self._old_cwd) + shutil.rmtree(d) + + def test_objdir_is_srcdir_rejected(self): + """Ensure the srcdir configurations are rejected.""" + d = os.path.realpath(tempfile.mkdtemp()) + + try: + # The easiest way to do this is to create a mozinfo.json with data + # that will never happen. + mozinfo = os.path.join(d, "mozinfo.json") + with open(mozinfo, "wt") as fh: + json.dump({"topsrcdir": d}, fh) + + os.chdir(d) + + with self.assertRaises(BadEnvironmentException): + MozbuildObject.from_environment(detect_virtualenv_mozinfo=False) + + finally: + os.chdir(self._old_cwd) + shutil.rmtree(d) + + def test_objdir_mismatch(self): + """Ensure MachCommandBase throwing on objdir mismatch.""" + d = os.path.realpath(tempfile.mkdtemp()) + + try: + real_topobjdir = os.path.join(d, "real-objdir") + os.makedirs(real_topobjdir) + + topobjdir = os.path.join(d, "objdir") + os.makedirs(topobjdir) + + topsrcdir = os.path.join(d, "srcdir") + prepare_tmp_topsrcdir(topsrcdir) + + mozconfig = os.path.join(d, "mozconfig") + with open(mozconfig, "wt") as fh: + fh.write( + "mk_add_options MOZ_OBJDIR=%s" % real_topobjdir.replace("\\", "/") + ) + + mozinfo = os.path.join(topobjdir, "mozinfo.json") + with open(mozinfo, "wt") as fh: + json.dump( + dict( + topsrcdir=topsrcdir, + mozconfig=mozconfig, + ), + fh, + ) + + os.chdir(topobjdir) + + class MockMachContext(object): + pass + + context = MockMachContext() + context.cwd = topobjdir + context.topdir = topsrcdir + context.settings = None + context.log_manager = None + context.detect_virtualenv_mozinfo = False + + stdout = sys.stdout + sys.stdout = StringIO() + try: + with self.assertRaises(SystemExit): + MachCommandBase(context, None) + + self.assertTrue( + sys.stdout.getvalue().startswith( + "Ambiguous object directory detected." + ) + ) + finally: + sys.stdout = stdout + + finally: + os.chdir(self._old_cwd) + shutil.rmtree(d) + + def test_config_environment(self): + d = os.path.realpath(tempfile.mkdtemp()) + + try: + with open(os.path.join(d, "config.status"), "w") as fh: + fh.write("# coding=utf-8\n") + fh.write("from __future__ import unicode_literals\n") + fh.write("topobjdir = '%s'\n" % mozpath.normsep(d)) + fh.write("topsrcdir = '%s'\n" % topsrcdir) + fh.write("mozconfig = None\n") + fh.write("defines = { 'FOO': 'foo' }\n") + fh.write("substs = { 'QUX': 'qux' }\n") + fh.write( + "__all__ = ['topobjdir', 'topsrcdir', 'defines', " + "'substs', 'mozconfig']" + ) + + base = self.get_base(topobjdir=d) + + ce = base.config_environment + self.assertIsInstance(ce, ConfigEnvironment) + + self.assertEqual(base.defines, ce.defines) + self.assertEqual(base.substs, ce.substs) + + self.assertEqual(base.defines, {"FOO": "foo"}) + self.assertEqual( + base.substs, + { + "ACDEFINES": "-DFOO=foo", + "ALLEMPTYSUBSTS": "", + "ALLSUBSTS": "ACDEFINES = -DFOO=foo\nQUX = qux", + "QUX": "qux", + }, + ) + finally: + shutil.rmtree(d) + + def test_get_binary_path(self): + base = self.get_base(topobjdir=topobjdir) + + platform = sys.platform + + # We should ideally use the config.status from the build. Let's install + # a fake one. + substs = [ + ("MOZ_APP_NAME", "awesomeapp"), + ("MOZ_BUILD_APP", "awesomeapp"), + ] + if sys.platform.startswith("darwin"): + substs.append(("OS_ARCH", "Darwin")) + substs.append(("BIN_SUFFIX", "")) + substs.append(("MOZ_MACBUNDLE_NAME", "Nightly.app")) + elif sys.platform.startswith(("win32", "cygwin")): + substs.append(("OS_ARCH", "WINNT")) + substs.append(("BIN_SUFFIX", ".exe")) + else: + substs.append(("OS_ARCH", "something")) + substs.append(("BIN_SUFFIX", "")) + + base._config_environment = ConfigEnvironment( + base.topsrcdir, base.topobjdir, substs=substs + ) + + p = base.get_binary_path("xpcshell", False) + if platform.startswith("darwin"): + self.assertTrue(p.endswith("Contents/MacOS/xpcshell")) + elif platform.startswith(("win32", "cygwin")): + self.assertTrue(p.endswith("xpcshell.exe")) + else: + self.assertTrue(p.endswith("dist/bin/xpcshell")) + + p = base.get_binary_path(validate_exists=False) + if platform.startswith("darwin"): + self.assertTrue(p.endswith("Contents/MacOS/awesomeapp")) + elif platform.startswith(("win32", "cygwin")): + self.assertTrue(p.endswith("awesomeapp.exe")) + else: + self.assertTrue(p.endswith("dist/bin/awesomeapp")) + + p = base.get_binary_path(validate_exists=False, where="staged-package") + if platform.startswith("darwin"): + self.assertTrue( + p.endswith("awesomeapp/Nightly.app/Contents/MacOS/awesomeapp") + ) + elif platform.startswith(("win32", "cygwin")): + self.assertTrue(p.endswith("awesomeapp\\awesomeapp.exe")) + else: + self.assertTrue(p.endswith("awesomeapp/awesomeapp")) + + self.assertRaises(Exception, base.get_binary_path, where="somewhere") + + p = base.get_binary_path("foobar", validate_exists=False) + if platform.startswith("win32"): + self.assertTrue(p.endswith("foobar.exe")) + else: + self.assertTrue(p.endswith("foobar")) + + +class TestPathArgument(unittest.TestCase): + def test_path_argument(self): + # Absolute path + p = PathArgument("/obj/foo", "/src", "/obj", "/src") + self.assertEqual(p.relpath(), "foo") + self.assertEqual(p.srcdir_path(), "/src/foo") + self.assertEqual(p.objdir_path(), "/obj/foo") + + # Relative path within srcdir + p = PathArgument("foo", "/src", "/obj", "/src") + self.assertEqual(p.relpath(), "foo") + self.assertEqual(p.srcdir_path(), "/src/foo") + self.assertEqual(p.objdir_path(), "/obj/foo") + + # Relative path within subdirectory + p = PathArgument("bar", "/src", "/obj", "/src/foo") + self.assertEqual(p.relpath(), "foo/bar") + self.assertEqual(p.srcdir_path(), "/src/foo/bar") + self.assertEqual(p.objdir_path(), "/obj/foo/bar") + + # Relative path within objdir + p = PathArgument("foo", "/src", "/obj", "/obj") + self.assertEqual(p.relpath(), "foo") + self.assertEqual(p.srcdir_path(), "/src/foo") + self.assertEqual(p.objdir_path(), "/obj/foo") + + # "." path + p = PathArgument(".", "/src", "/obj", "/src/foo") + self.assertEqual(p.relpath(), "foo") + self.assertEqual(p.srcdir_path(), "/src/foo") + self.assertEqual(p.objdir_path(), "/obj/foo") + + # Nested src/obj directories + p = PathArgument("bar", "/src", "/src/obj", "/src/obj/foo") + self.assertEqual(p.relpath(), "foo/bar") + self.assertEqual(p.srcdir_path(), "/src/foo/bar") + self.assertEqual(p.objdir_path(), "/src/obj/foo/bar") + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/test_containers.py b/python/mozbuild/mozbuild/test/test_containers.py new file mode 100644 index 0000000000..50dd0a4088 --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_containers.py @@ -0,0 +1,224 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import unittest +from collections import OrderedDict + +from mozunit import main + +from mozbuild.util import ( + KeyedDefaultDict, + List, + OrderedDefaultDict, + ReadOnlyDefaultDict, + ReadOnlyDict, + ReadOnlyKeyedDefaultDict, + ReadOnlyNamespace, +) + + +class TestReadOnlyNamespace(unittest.TestCase): + def test_basic(self): + test = ReadOnlyNamespace(foo=1, bar=2) + + self.assertEqual(test.foo, 1) + self.assertEqual(test.bar, 2) + self.assertEqual( + sorted(i for i in dir(test) if not i.startswith("__")), ["bar", "foo"] + ) + + with self.assertRaises(AttributeError): + test.missing + + with self.assertRaises(Exception): + test.foo = 2 + + with self.assertRaises(Exception): + del test.foo + + self.assertEqual(test, test) + self.assertEqual(test, ReadOnlyNamespace(foo=1, bar=2)) + self.assertNotEqual(test, ReadOnlyNamespace(foo="1", bar=2)) + self.assertNotEqual(test, ReadOnlyNamespace(foo=1, bar=2, qux=3)) + self.assertNotEqual(test, ReadOnlyNamespace(foo=1, qux=3)) + self.assertNotEqual(test, ReadOnlyNamespace(foo=3, bar="42")) + + +class TestReadOnlyDict(unittest.TestCase): + def test_basic(self): + original = {"foo": 1, "bar": 2} + + test = ReadOnlyDict(original) + + self.assertEqual(original, test) + self.assertEqual(test["foo"], 1) + + with self.assertRaises(KeyError): + test["missing"] + + with self.assertRaises(Exception): + test["baz"] = True + + def test_update(self): + original = {"foo": 1, "bar": 2} + + test = ReadOnlyDict(original) + + with self.assertRaises(Exception): + test.update(foo=2) + + self.assertEqual(original, test) + + def test_del(self): + original = {"foo": 1, "bar": 2} + + test = ReadOnlyDict(original) + + with self.assertRaises(Exception): + del test["foo"] + + self.assertEqual(original, test) + + +class TestReadOnlyDefaultDict(unittest.TestCase): + def test_simple(self): + original = {"foo": 1, "bar": 2} + + test = ReadOnlyDefaultDict(bool, original) + + self.assertEqual(original, test) + + self.assertEqual(test["foo"], 1) + + def test_assignment(self): + test = ReadOnlyDefaultDict(bool, {}) + + with self.assertRaises(Exception): + test["foo"] = True + + def test_defaults(self): + test = ReadOnlyDefaultDict(bool, {"foo": 1}) + + self.assertEqual(test["foo"], 1) + + self.assertEqual(test["qux"], False) + + +class TestList(unittest.TestCase): + def test_add_list(self): + test = List([1, 2, 3]) + + test += [4, 5, 6] + self.assertIsInstance(test, List) + self.assertEqual(test, [1, 2, 3, 4, 5, 6]) + + test = test + [7, 8] + self.assertIsInstance(test, List) + self.assertEqual(test, [1, 2, 3, 4, 5, 6, 7, 8]) + + def test_add_string(self): + test = List([1, 2, 3]) + + with self.assertRaises(ValueError): + test += "string" + + def test_none(self): + """As a special exception, we allow None to be treated as an empty + list.""" + test = List([1, 2, 3]) + + test += None + self.assertEqual(test, [1, 2, 3]) + + test = test + None + self.assertIsInstance(test, List) + self.assertEqual(test, [1, 2, 3]) + + with self.assertRaises(ValueError): + test += False + + with self.assertRaises(ValueError): + test = test + False + + +class TestOrderedDefaultDict(unittest.TestCase): + def test_simple(self): + original = OrderedDict(foo=1, bar=2) + + test = OrderedDefaultDict(bool, original) + + self.assertEqual(original, test) + + self.assertEqual(test["foo"], 1) + + self.assertEqual(list(test), ["foo", "bar"]) + + def test_defaults(self): + test = OrderedDefaultDict(bool, {"foo": 1}) + + self.assertEqual(test["foo"], 1) + + self.assertEqual(test["qux"], False) + + self.assertEqual(list(test), ["foo", "qux"]) + + +class TestKeyedDefaultDict(unittest.TestCase): + def test_simple(self): + original = {"foo": 1, "bar": 2} + + test = KeyedDefaultDict(lambda x: x, original) + + self.assertEqual(original, test) + + self.assertEqual(test["foo"], 1) + + def test_defaults(self): + test = KeyedDefaultDict(lambda x: x, {"foo": 1}) + + self.assertEqual(test["foo"], 1) + + self.assertEqual(test["qux"], "qux") + + self.assertEqual(test["bar"], "bar") + + test["foo"] = 2 + test["qux"] = None + test["baz"] = "foo" + + self.assertEqual(test["foo"], 2) + + self.assertEqual(test["qux"], None) + + self.assertEqual(test["baz"], "foo") + + +class TestReadOnlyKeyedDefaultDict(unittest.TestCase): + def test_defaults(self): + test = ReadOnlyKeyedDefaultDict(lambda x: x, {"foo": 1}) + + self.assertEqual(test["foo"], 1) + + self.assertEqual(test["qux"], "qux") + + self.assertEqual(test["bar"], "bar") + + copy = dict(test) + + with self.assertRaises(Exception): + test["foo"] = 2 + + with self.assertRaises(Exception): + test["qux"] = None + + with self.assertRaises(Exception): + test["baz"] = "foo" + + self.assertEqual(test, copy) + + self.assertEqual(len(test), 3) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/test_dotproperties.py b/python/mozbuild/mozbuild/test/test_dotproperties.py new file mode 100644 index 0000000000..4e7a437799 --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_dotproperties.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- + +import os +import unittest + +import mozpack.path as mozpath +from mozunit import main +from six import StringIO + +from mozbuild.dotproperties import DotProperties + +test_data_path = mozpath.abspath(mozpath.dirname(__file__)) +test_data_path = mozpath.join(test_data_path, "data") + + +class TestDotProperties(unittest.TestCase): + def test_get(self): + contents = StringIO( + """ +key=value +""" + ) + p = DotProperties(contents) + self.assertEqual(p.get("missing"), None) + self.assertEqual(p.get("missing", "default"), "default") + self.assertEqual(p.get("key"), "value") + + def test_update(self): + contents = StringIO( + """ +old=old value +key=value +""" + ) + p = DotProperties(contents) + self.assertEqual(p.get("old"), "old value") + self.assertEqual(p.get("key"), "value") + + new_contents = StringIO( + """ +key=new value +""" + ) + p.update(new_contents) + self.assertEqual(p.get("old"), "old value") + self.assertEqual(p.get("key"), "new value") + + def test_get_list(self): + contents = StringIO( + """ +list.0=A +list.1=B +list.2=C + +order.1=B +order.0=A +order.2=C +""" + ) + p = DotProperties(contents) + self.assertEqual(p.get_list("missing"), []) + self.assertEqual(p.get_list("list"), ["A", "B", "C"]) + self.assertEqual(p.get_list("order"), ["A", "B", "C"]) + + def test_get_list_with_shared_prefix(self): + contents = StringIO( + """ +list.0=A +list.1=B +list.2=C + +list.sublist.1=E +list.sublist.0=D +list.sublist.2=F + +list.sublist.second.0=G + +list.other.0=H +""" + ) + p = DotProperties(contents) + self.assertEqual(p.get_list("list"), ["A", "B", "C"]) + self.assertEqual(p.get_list("list.sublist"), ["D", "E", "F"]) + self.assertEqual(p.get_list("list.sublist.second"), ["G"]) + self.assertEqual(p.get_list("list.other"), ["H"]) + + def test_get_dict(self): + contents = StringIO( + """ +A.title=title A + +B.title=title B +B.url=url B + +C=value +""" + ) + p = DotProperties(contents) + self.assertEqual(p.get_dict("missing"), {}) + self.assertEqual(p.get_dict("A"), {"title": "title A"}) + self.assertEqual(p.get_dict("B"), {"title": "title B", "url": "url B"}) + with self.assertRaises(ValueError): + p.get_dict("A", required_keys=["title", "url"]) + with self.assertRaises(ValueError): + p.get_dict("missing", required_keys=["key"]) + # A key=value pair is considered to root an empty dict. + self.assertEqual(p.get_dict("C"), {}) + with self.assertRaises(ValueError): + p.get_dict("C", required_keys=["missing_key"]) + + def test_get_dict_with_shared_prefix(self): + contents = StringIO( + """ +A.title=title A +A.subdict.title=title A subdict + +B.title=title B +B.url=url B +B.subdict.title=title B subdict +B.subdict.url=url B subdict +""" + ) + p = DotProperties(contents) + self.assertEqual(p.get_dict("A"), {"title": "title A"}) + self.assertEqual(p.get_dict("B"), {"title": "title B", "url": "url B"}) + self.assertEqual(p.get_dict("A.subdict"), {"title": "title A subdict"}) + self.assertEqual( + p.get_dict("B.subdict"), + {"title": "title B subdict", "url": "url B subdict"}, + ) + + def test_get_dict_with_value_prefix(self): + contents = StringIO( + """ +A.default=A +A.default.B=B +A.default.B.ignored=B ignored +A.default.C=C +A.default.C.ignored=C ignored +""" + ) + p = DotProperties(contents) + self.assertEqual(p.get("A.default"), "A") + # This enumerates the properties. + self.assertEqual(p.get_dict("A.default"), {"B": "B", "C": "C"}) + # They can still be fetched directly. + self.assertEqual(p.get("A.default.B"), "B") + self.assertEqual(p.get("A.default.C"), "C") + + def test_unicode(self): + contents = StringIO( + """ +# Danish. +# #### ~~ Søren Munk Skrøder, sskroeder - 2009-05-30 @ #mozmae + +# Korean. +A.title=í•œë©”ì¼ + +# Russian. +list.0 = test +list.1 = Ð¯Ð½Ð´ÐµÐºÑ +""" + ) + p = DotProperties(contents) + self.assertEqual(p.get_dict("A"), {"title": "한메ì¼"}) + self.assertEqual(p.get_list("list"), ["test", "ЯндекÑ"]) + + def test_valid_unicode_from_file(self): + # The contents of valid.properties is identical to the contents of the + # test above. This specifically exercises reading from a file. + p = DotProperties(os.path.join(test_data_path, "valid.properties")) + self.assertEqual(p.get_dict("A"), {"title": "한메ì¼"}) + self.assertEqual(p.get_list("list"), ["test", "ЯндекÑ"]) + + def test_bad_unicode_from_file(self): + # The contents of bad.properties is not valid Unicode; see the comments + # in the file itself for details. + with self.assertRaises(UnicodeDecodeError): + DotProperties(os.path.join(test_data_path, "bad.properties")) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/test_expression.py b/python/mozbuild/mozbuild/test/test_expression.py new file mode 100644 index 0000000000..535e62bf43 --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_expression.py @@ -0,0 +1,88 @@ +import unittest + +import mozunit + +from mozbuild.preprocessor import Context, Expression + + +class TestContext(unittest.TestCase): + """ + Unit tests for the Context class + """ + + def setUp(self): + self.c = Context() + self.c["FAIL"] = "PASS" + + def test_string_literal(self): + """test string literal, fall-through for undefined var in a Context""" + self.assertEqual(self.c["PASS"], "PASS") + + def test_variable(self): + """test value for defined var in the Context class""" + self.assertEqual(self.c["FAIL"], "PASS") + + def test_in(self): + """test 'var in context' to not fall for fallback""" + self.assertTrue("FAIL" in self.c) + self.assertTrue("PASS" not in self.c) + + +class TestExpression(unittest.TestCase): + """ + Unit tests for the Expression class + evaluate() is called with a context {FAIL: 'PASS'} + """ + + def setUp(self): + self.c = Context() + self.c["FAIL"] = "PASS" + + def test_string_literal(self): + """Test for a string literal in an Expression""" + self.assertEqual(Expression("PASS").evaluate(self.c), "PASS") + + def test_variable(self): + """Test for variable value in an Expression""" + self.assertEqual(Expression("FAIL").evaluate(self.c), "PASS") + + def test_not(self): + """Test for the ! operator""" + self.assertTrue(Expression("!0").evaluate(self.c)) + self.assertTrue(not Expression("!1").evaluate(self.c)) + + def test_equals(self): + """Test for the == operator""" + self.assertTrue(Expression("FAIL == PASS").evaluate(self.c)) + + def test_notequals(self): + """Test for the != operator""" + self.assertTrue(Expression("FAIL != 1").evaluate(self.c)) + + def test_logical_and(self): + """Test for the && operator""" + self.assertTrue(Expression("PASS == PASS && PASS != NOTPASS").evaluate(self.c)) + + def test_logical_or(self): + """Test for the || operator""" + self.assertTrue( + Expression("PASS == NOTPASS || PASS != NOTPASS").evaluate(self.c) + ) + + def test_logical_ops(self): + """Test for the && and || operators precedence""" + # Would evaluate to false if precedence was wrong + self.assertTrue( + Expression("PASS == PASS || PASS != NOTPASS && PASS == NOTPASS").evaluate( + self.c + ) + ) + + def test_defined(self): + """Test for the defined() value""" + self.assertTrue(Expression("defined(FAIL)").evaluate(self.c)) + self.assertTrue(Expression("!defined(PASS)").evaluate(self.c)) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/test_jarmaker.py b/python/mozbuild/mozbuild/test/test_jarmaker.py new file mode 100644 index 0000000000..24a8c7694a --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_jarmaker.py @@ -0,0 +1,493 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import os.path +import sys +import unittest +from filecmp import dircmp +from shutil import copy2, rmtree +from tempfile import mkdtemp +from zipfile import ZipFile + +import mozunit +import six +from six import StringIO + +from mozbuild.jar import JarMaker + +if sys.platform == "win32": + import ctypes + from ctypes import POINTER, WinError + + DWORD = ctypes.c_ulong + LPDWORD = POINTER(DWORD) + HANDLE = ctypes.c_void_p + GENERIC_READ = 0x80000000 + FILE_SHARE_READ = 0x00000001 + OPEN_EXISTING = 3 + MAX_PATH = 260 + + class FILETIME(ctypes.Structure): + _fields_ = [("dwLowDateTime", DWORD), ("dwHighDateTime", DWORD)] + + class BY_HANDLE_FILE_INFORMATION(ctypes.Structure): + _fields_ = [ + ("dwFileAttributes", DWORD), + ("ftCreationTime", FILETIME), + ("ftLastAccessTime", FILETIME), + ("ftLastWriteTime", FILETIME), + ("dwVolumeSerialNumber", DWORD), + ("nFileSizeHigh", DWORD), + ("nFileSizeLow", DWORD), + ("nNumberOfLinks", DWORD), + ("nFileIndexHigh", DWORD), + ("nFileIndexLow", DWORD), + ] + + # http://msdn.microsoft.com/en-us/library/aa363858 + CreateFile = ctypes.windll.kernel32.CreateFileA + CreateFile.argtypes = [ + ctypes.c_char_p, + DWORD, + DWORD, + ctypes.c_void_p, + DWORD, + DWORD, + HANDLE, + ] + CreateFile.restype = HANDLE + + # http://msdn.microsoft.com/en-us/library/aa364952 + GetFileInformationByHandle = ctypes.windll.kernel32.GetFileInformationByHandle + GetFileInformationByHandle.argtypes = [HANDLE, POINTER(BY_HANDLE_FILE_INFORMATION)] + GetFileInformationByHandle.restype = ctypes.c_int + + # http://msdn.microsoft.com/en-us/library/aa364996 + GetVolumePathName = ctypes.windll.kernel32.GetVolumePathNameA + GetVolumePathName.argtypes = [ctypes.c_char_p, ctypes.c_char_p, DWORD] + GetVolumePathName.restype = ctypes.c_int + + # http://msdn.microsoft.com/en-us/library/aa364993 + GetVolumeInformation = ctypes.windll.kernel32.GetVolumeInformationA + GetVolumeInformation.argtypes = [ + ctypes.c_char_p, + ctypes.c_char_p, + DWORD, + LPDWORD, + LPDWORD, + LPDWORD, + ctypes.c_char_p, + DWORD, + ] + GetVolumeInformation.restype = ctypes.c_int + + +def symlinks_supported(path): + if sys.platform == "win32": + # Add 1 for a trailing backslash if necessary, and 1 for the terminating + # null character. + volpath = ctypes.create_string_buffer(len(path) + 2) + rv = GetVolumePathName(six.ensure_binary(path), volpath, len(volpath)) + if rv == 0: + raise WinError() + + fsname = ctypes.create_string_buffer(MAX_PATH + 1) + rv = GetVolumeInformation( + volpath, None, 0, None, None, None, fsname, len(fsname) + ) + if rv == 0: + raise WinError() + + # Return true only if the fsname is NTFS + return fsname.value == "NTFS" + else: + return True + + +def _getfileinfo(path): + """Return information for the given file. This only works on Windows.""" + fh = CreateFile( + six.ensure_binary(path), + GENERIC_READ, + FILE_SHARE_READ, + None, + OPEN_EXISTING, + 0, + None, + ) + if fh is None: + raise WinError() + info = BY_HANDLE_FILE_INFORMATION() + rv = GetFileInformationByHandle(fh, info) + if rv == 0: + raise WinError() + return info + + +def is_symlink_to(dest, src): + if sys.platform == "win32": + # Check if both are on the same volume and have the same file ID + destinfo = _getfileinfo(dest) + srcinfo = _getfileinfo(src) + return ( + destinfo.dwVolumeSerialNumber == srcinfo.dwVolumeSerialNumber + and destinfo.nFileIndexHigh == srcinfo.nFileIndexHigh + and destinfo.nFileIndexLow == srcinfo.nFileIndexLow + ) + else: + # Read the link and check if it is correct + if not os.path.islink(dest): + return False + target = os.path.abspath(os.readlink(dest)) + abssrc = os.path.abspath(src) + return target == abssrc + + +class _TreeDiff(dircmp): + """Helper to report rich results on difference between two directories.""" + + def _fillDiff(self, dc, rv, basepath="{0}"): + rv["right_only"] += map(lambda l: basepath.format(l), dc.right_only) + rv["left_only"] += map(lambda l: basepath.format(l), dc.left_only) + rv["diff_files"] += map(lambda l: basepath.format(l), dc.diff_files) + rv["funny"] += map(lambda l: basepath.format(l), dc.common_funny) + rv["funny"] += map(lambda l: basepath.format(l), dc.funny_files) + for subdir, _dc in six.iteritems(dc.subdirs): + self._fillDiff(_dc, rv, basepath.format(subdir + "/{0}")) + + def allResults(self, left, right): + rv = {"right_only": [], "left_only": [], "diff_files": [], "funny": []} + self._fillDiff(self, rv) + chunks = [] + if rv["right_only"]: + chunks.append("{0} only in {1}".format(", ".join(rv["right_only"]), right)) + if rv["left_only"]: + chunks.append("{0} only in {1}".format(", ".join(rv["left_only"]), left)) + if rv["diff_files"]: + chunks.append("{0} differ".format(", ".join(rv["diff_files"]))) + if rv["funny"]: + chunks.append("{0} don't compare".format(", ".join(rv["funny"]))) + return "; ".join(chunks) + + +class TestJarMaker(unittest.TestCase): + """ + Unit tests for JarMaker.py + """ + + debug = False # set to True to debug failing tests on disk + + def setUp(self): + self.tmpdir = mkdtemp() + self.srcdir = os.path.join(self.tmpdir, "src") + os.mkdir(self.srcdir) + self.builddir = os.path.join(self.tmpdir, "build") + os.mkdir(self.builddir) + self.refdir = os.path.join(self.tmpdir, "ref") + os.mkdir(self.refdir) + self.stagedir = os.path.join(self.tmpdir, "stage") + os.mkdir(self.stagedir) + + def tearDown(self): + if self.debug: + print(self.tmpdir) + elif sys.platform != "win32": + # can't clean up on windows + rmtree(self.tmpdir) + + def _jar_and_compare(self, infile, **kwargs): + jm = JarMaker(outputFormat="jar") + if "topsourcedir" not in kwargs: + kwargs["topsourcedir"] = self.srcdir + for attr in ("topsourcedir", "sourcedirs"): + if attr in kwargs: + setattr(jm, attr, kwargs[attr]) + jm.makeJar(infile, self.builddir) + cwd = os.getcwd() + os.chdir(self.builddir) + try: + # expand build to stage + for path, dirs, files in os.walk("."): + stagedir = os.path.join(self.stagedir, path) + if not os.path.isdir(stagedir): + os.mkdir(stagedir) + for file in files: + if file.endswith(".jar"): + # expand jar + stagepath = os.path.join(stagedir, file) + os.mkdir(stagepath) + zf = ZipFile(os.path.join(path, file)) + # extractall is only in 2.6, do this manually :-( + for entry_name in zf.namelist(): + segs = entry_name.split("/") + fname = segs.pop() + dname = os.path.join(stagepath, *segs) + if not os.path.isdir(dname): + os.makedirs(dname) + if not fname: + # directory, we're done + continue + _c = zf.read(entry_name) + open(os.path.join(dname, fname), "wb").write(_c) + zf.close() + else: + copy2(os.path.join(path, file), stagedir) + # compare both dirs + os.chdir("..") + td = _TreeDiff("ref", "stage") + return td.allResults("reference", "build") + finally: + os.chdir(cwd) + + def _create_simple_setup(self): + # create src content + jarf = open(os.path.join(self.srcdir, "jar.mn"), "w") + jarf.write( + """test.jar: + dir/foo (bar) +""" + ) + jarf.close() + open(os.path.join(self.srcdir, "bar"), "w").write("content\n") + # create reference + refpath = os.path.join(self.refdir, "chrome", "test.jar", "dir") + os.makedirs(refpath) + open(os.path.join(refpath, "foo"), "w").write("content\n") + + def test_a_simple_jar(self): + """Test a simple jar.mn""" + self._create_simple_setup() + # call JarMaker + rv = self._jar_and_compare( + os.path.join(self.srcdir, "jar.mn"), sourcedirs=[self.srcdir] + ) + self.assertTrue(not rv, rv) + + def test_a_simple_symlink(self): + """Test a simple jar.mn with a symlink""" + if not symlinks_supported(self.srcdir): + raise unittest.SkipTest("symlinks not supported") + + self._create_simple_setup() + jm = JarMaker(outputFormat="symlink") + jm.sourcedirs = [self.srcdir] + jm.topsourcedir = self.srcdir + jm.makeJar(os.path.join(self.srcdir, "jar.mn"), self.builddir) + # All we do is check that srcdir/bar points to builddir/chrome/test/dir/foo + srcbar = os.path.join(self.srcdir, "bar") + destfoo = os.path.join(self.builddir, "chrome", "test", "dir", "foo") + self.assertTrue( + is_symlink_to(destfoo, srcbar), + "{0} is not a symlink to {1}".format(destfoo, srcbar), + ) + + def _create_wildcard_setup(self): + # create src content + jarf = open(os.path.join(self.srcdir, "jar.mn"), "w") + jarf.write( + """test.jar: + dir/bar (*.js) + dir/hoge (qux/*) +""" + ) + jarf.close() + open(os.path.join(self.srcdir, "foo.js"), "w").write("foo.js\n") + open(os.path.join(self.srcdir, "bar.js"), "w").write("bar.js\n") + os.makedirs(os.path.join(self.srcdir, "qux", "foo")) + open(os.path.join(self.srcdir, "qux", "foo", "1"), "w").write("1\n") + open(os.path.join(self.srcdir, "qux", "foo", "2"), "w").write("2\n") + open(os.path.join(self.srcdir, "qux", "baz"), "w").write("baz\n") + # create reference + refpath = os.path.join(self.refdir, "chrome", "test.jar", "dir") + os.makedirs(os.path.join(refpath, "bar")) + os.makedirs(os.path.join(refpath, "hoge", "foo")) + open(os.path.join(refpath, "bar", "foo.js"), "w").write("foo.js\n") + open(os.path.join(refpath, "bar", "bar.js"), "w").write("bar.js\n") + open(os.path.join(refpath, "hoge", "foo", "1"), "w").write("1\n") + open(os.path.join(refpath, "hoge", "foo", "2"), "w").write("2\n") + open(os.path.join(refpath, "hoge", "baz"), "w").write("baz\n") + + def test_a_wildcard_jar(self): + """Test a wildcard in jar.mn""" + self._create_wildcard_setup() + # call JarMaker + rv = self._jar_and_compare( + os.path.join(self.srcdir, "jar.mn"), sourcedirs=[self.srcdir] + ) + self.assertTrue(not rv, rv) + + def test_a_wildcard_symlink(self): + """Test a wildcard in jar.mn with symlinks""" + if not symlinks_supported(self.srcdir): + raise unittest.SkipTest("symlinks not supported") + + self._create_wildcard_setup() + jm = JarMaker(outputFormat="symlink") + jm.sourcedirs = [self.srcdir] + jm.topsourcedir = self.srcdir + jm.makeJar(os.path.join(self.srcdir, "jar.mn"), self.builddir) + + expected_symlinks = { + ("bar", "foo.js"): ("foo.js",), + ("bar", "bar.js"): ("bar.js",), + ("hoge", "foo", "1"): ("qux", "foo", "1"), + ("hoge", "foo", "2"): ("qux", "foo", "2"), + ("hoge", "baz"): ("qux", "baz"), + } + for dest, src in six.iteritems(expected_symlinks): + srcpath = os.path.join(self.srcdir, *src) + destpath = os.path.join(self.builddir, "chrome", "test", "dir", *dest) + self.assertTrue( + is_symlink_to(destpath, srcpath), + "{0} is not a symlink to {1}".format(destpath, srcpath), + ) + + +class Test_relativesrcdir(unittest.TestCase): + def setUp(self): + self.jm = JarMaker() + self.jm.topsourcedir = "/TOPSOURCEDIR" + self.jm.relativesrcdir = "browser/locales" + self.fake_empty_file = StringIO() + self.fake_empty_file.name = "fake_empty_file" + + def tearDown(self): + del self.jm + del self.fake_empty_file + + def test_en_US(self): + jm = self.jm + jm.makeJar(self.fake_empty_file, "/NO_OUTPUT_REQUIRED") + self.assertEqual( + jm.localedirs, + [ + os.path.join( + os.path.abspath("/TOPSOURCEDIR"), "browser/locales", "en-US" + ) + ], + ) + + def test_l10n_no_merge(self): + jm = self.jm + jm.l10nbase = "/L10N_BASE" + jm.makeJar(self.fake_empty_file, "/NO_OUTPUT_REQUIRED") + self.assertEqual(jm.localedirs, [os.path.join("/L10N_BASE", "browser")]) + + def test_l10n_merge(self): + jm = self.jm + jm.l10nbase = "/L10N_MERGE" + jm.makeJar(self.fake_empty_file, "/NO_OUTPUT_REQUIRED") + self.assertEqual( + jm.localedirs, + [ + os.path.join("/L10N_MERGE", "browser"), + ], + ) + + def test_override(self): + jm = self.jm + jm.outputFormat = "flat" # doesn't touch chrome dir without files + jarcontents = StringIO( + """en-US.jar: +relativesrcdir dom/locales: +""" + ) + jarcontents.name = "override.mn" + jm.makeJar(jarcontents, "/NO_OUTPUT_REQUIRED") + self.assertEqual( + jm.localedirs, + [os.path.join(os.path.abspath("/TOPSOURCEDIR"), "dom/locales", "en-US")], + ) + + def test_override_l10n(self): + jm = self.jm + jm.l10nbase = "/L10N_BASE" + jm.outputFormat = "flat" # doesn't touch chrome dir without files + jarcontents = StringIO( + """en-US.jar: +relativesrcdir dom/locales: +""" + ) + jarcontents.name = "override.mn" + jm.makeJar(jarcontents, "/NO_OUTPUT_REQUIRED") + self.assertEqual(jm.localedirs, [os.path.join("/L10N_BASE", "dom")]) + + +class Test_fluent(unittest.TestCase): + """ + Unit tests for JarMaker interaction with Fluent + """ + + debug = False # set to True to debug failing tests on disk + + def setUp(self): + self.tmpdir = mkdtemp() + self.srcdir = os.path.join(self.tmpdir, "src") + os.mkdir(self.srcdir) + self.builddir = os.path.join(self.tmpdir, "build") + os.mkdir(self.builddir) + self.l10nbase = os.path.join(self.tmpdir, "l10n-base") + os.mkdir(self.l10nbase) + self.l10nmerge = os.path.join(self.tmpdir, "l10n-merge") + os.mkdir(self.l10nmerge) + + def tearDown(self): + if self.debug: + print(self.tmpdir) + elif sys.platform != "win32": + # can't clean up on windows + rmtree(self.tmpdir) + + def _create_fluent_setup(self): + # create src content + jarf = open(os.path.join(self.srcdir, "jar.mn"), "w") + jarf.write( + """[localization] test.jar: + app (%app/**/*.ftl) +""" + ) + jarf.close() + appdir = os.path.join(self.srcdir, "app", "locales", "en-US", "app") + os.makedirs(appdir) + open(os.path.join(appdir, "test.ftl"), "w").write("id = Value") + open(os.path.join(appdir, "test2.ftl"), "w").write("id2 = Value 2") + + l10ndir = os.path.join(self.l10nbase, "app", "app") + os.makedirs(l10ndir) + open(os.path.join(l10ndir, "test.ftl"), "w").write("id = L10n Value") + + def test_l10n_not_merge_ftl(self): + """Test that JarMaker doesn't merge source .ftl files""" + self._create_fluent_setup() + jm = JarMaker(outputFormat="symlink") + jm.sourcedirs = [self.srcdir] + jm.topsourcedir = self.srcdir + jm.l10nbase = self.l10nbase + jm.l10nmerge = self.l10nmerge + jm.relativesrcdir = "app/locales" + jm.makeJar(os.path.join(self.srcdir, "jar.mn"), self.builddir) + + # test.ftl should be taken from the l10ndir, since it is present there + destpath = os.path.join( + self.builddir, "localization", "test", "app", "test.ftl" + ) + srcpath = os.path.join(self.l10nbase, "app", "app", "test.ftl") + self.assertTrue( + is_symlink_to(destpath, srcpath), + "{0} should be a symlink to {1}".format(destpath, srcpath), + ) + + # test2.ftl on the other hand, is only present in en-US dir, and should + # not be linked from the build dir + destpath = os.path.join( + self.builddir, "localization", "test", "app", "test2.ftl" + ) + self.assertFalse( + os.path.isfile(destpath), "test2.ftl should not be taken from en-US" + ) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/test_licenses.py b/python/mozbuild/mozbuild/test/test_licenses.py new file mode 100644 index 0000000000..9f3f12d423 --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_licenses.py @@ -0,0 +1,33 @@ +import unittest + +import mozunit + +from mozbuild.vendor.vendor_rust import VendorRust + + +class TestLicenses(unittest.TestCase): + """ + Unit tests for the Rust Vendoring stuff + """ + + def setUp(self): + pass + + def tearDown(self): + pass + + def testLicense(self): + self.assertEqual(VendorRust.runtime_license("", "Apache-2.0"), True) + self.assertEqual(VendorRust.runtime_license("", "MIT"), True) + self.assertEqual(VendorRust.runtime_license("", "GPL"), False) + self.assertEqual(VendorRust.runtime_license("", "MIT /GPL"), True) + self.assertEqual(VendorRust.runtime_license("", "GPL/ Proprietary"), False) + self.assertEqual(VendorRust.runtime_license("", "GPL AND MIT"), False) + self.assertEqual(VendorRust.runtime_license("", "ISC\tAND\tMIT"), False) + self.assertEqual(VendorRust.runtime_license("", "GPL OR MIT"), True) + self.assertEqual(VendorRust.runtime_license("", "ALLIGATOR MIT"), False) + pass + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/test_line_endings.py b/python/mozbuild/mozbuild/test/test_line_endings.py new file mode 100644 index 0000000000..f8cdd89174 --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_line_endings.py @@ -0,0 +1,45 @@ +import unittest + +import mozunit +from mozfile import NamedTemporaryFile +from six import StringIO + +from mozbuild.preprocessor import Preprocessor + + +class TestLineEndings(unittest.TestCase): + """ + Unit tests for the Context class + """ + + def setUp(self): + self.pp = Preprocessor() + self.pp.out = StringIO() + self.f = NamedTemporaryFile(mode="wb") + + def tearDown(self): + self.f.close() + + def createFile(self, lineendings): + for line, ending in zip([b"a", b"#literal b", b"c"], lineendings): + self.f.write(line + ending) + self.f.flush() + + def testMac(self): + self.createFile([b"\x0D"] * 3) + self.pp.do_include(self.f.name) + self.assertEqual(self.pp.out.getvalue(), "a\nb\nc\n") + + def testUnix(self): + self.createFile([b"\x0A"] * 3) + self.pp.do_include(self.f.name) + self.assertEqual(self.pp.out.getvalue(), "a\nb\nc\n") + + def testWindows(self): + self.createFile([b"\x0D\x0A"] * 3) + self.pp.do_include(self.f.name) + self.assertEqual(self.pp.out.getvalue(), "a\nb\nc\n") + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/test_makeutil.py b/python/mozbuild/mozbuild/test/test_makeutil.py new file mode 100644 index 0000000000..524851bfbd --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_makeutil.py @@ -0,0 +1,164 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import unittest + +from mozunit import main +from six import StringIO + +from mozbuild.makeutil import Makefile, Rule, read_dep_makefile, write_dep_makefile + + +class TestMakefile(unittest.TestCase): + def test_rule(self): + out = StringIO() + rule = Rule() + rule.dump(out) + self.assertEqual(out.getvalue(), "") + + out = StringIO() + rule.add_targets(["foo", "bar"]) + rule.dump(out) + self.assertEqual(out.getvalue(), "foo bar:\n") + + out = StringIO() + rule.add_targets(["baz"]) + rule.add_dependencies(["qux", "hoge", "piyo"]) + rule.dump(out) + self.assertEqual(out.getvalue(), "foo bar baz: qux hoge piyo\n") + + out = StringIO() + rule = Rule(["foo", "bar"]) + rule.add_dependencies(["baz"]) + rule.add_commands(["echo $@"]) + rule.add_commands(["$(BAZ) -o $@ $<", "$(TOUCH) $@"]) + rule.dump(out) + self.assertEqual( + out.getvalue(), + "foo bar: baz\n" + + "\techo $@\n" + + "\t$(BAZ) -o $@ $<\n" + + "\t$(TOUCH) $@\n", + ) + + out = StringIO() + rule = Rule(["foo"]) + rule.add_dependencies(["bar", "foo", "baz"]) + rule.dump(out) + self.assertEqual(out.getvalue(), "foo: bar baz\n") + + out = StringIO() + rule.add_targets(["bar"]) + rule.dump(out) + self.assertEqual(out.getvalue(), "foo bar: baz\n") + + out = StringIO() + rule.add_targets(["bar"]) + rule.dump(out) + self.assertEqual(out.getvalue(), "foo bar: baz\n") + + out = StringIO() + rule.add_dependencies(["bar"]) + rule.dump(out) + self.assertEqual(out.getvalue(), "foo bar: baz\n") + + out = StringIO() + rule.add_dependencies(["qux"]) + rule.dump(out) + self.assertEqual(out.getvalue(), "foo bar: baz qux\n") + + out = StringIO() + rule.add_dependencies(["qux"]) + rule.dump(out) + self.assertEqual(out.getvalue(), "foo bar: baz qux\n") + + out = StringIO() + rule.add_dependencies(["hoge", "hoge"]) + rule.dump(out) + self.assertEqual(out.getvalue(), "foo bar: baz qux hoge\n") + + out = StringIO() + rule.add_targets(["fuga", "fuga"]) + rule.dump(out) + self.assertEqual(out.getvalue(), "foo bar fuga: baz qux hoge\n") + + def test_makefile(self): + out = StringIO() + mk = Makefile() + rule = mk.create_rule(["foo"]) + rule.add_dependencies(["bar", "baz", "qux"]) + rule.add_commands(["echo foo"]) + rule = mk.create_rule().add_targets(["bar", "baz"]) + rule.add_dependencies(["hoge"]) + rule.add_commands(["echo $@"]) + mk.dump(out, removal_guard=False) + self.assertEqual( + out.getvalue(), + "foo: bar baz qux\n" + "\techo foo\n" + "bar baz: hoge\n" + "\techo $@\n", + ) + + out = StringIO() + mk.dump(out) + self.assertEqual( + out.getvalue(), + "foo: bar baz qux\n" + + "\techo foo\n" + + "bar baz: hoge\n" + + "\techo $@\n" + + "hoge qux:\n", + ) + + def test_statement(self): + out = StringIO() + mk = Makefile() + mk.create_rule(["foo"]).add_dependencies(["bar"]).add_commands(["echo foo"]) + mk.add_statement("BAR = bar") + mk.create_rule(["$(BAR)"]).add_commands(["echo $@"]) + mk.dump(out, removal_guard=False) + self.assertEqual( + out.getvalue(), + "foo: bar\n" + "\techo foo\n" + "BAR = bar\n" + "$(BAR):\n" + "\techo $@\n", + ) + + @unittest.skipIf(os.name != "nt", "Test only applicable on Windows.") + def test_path_normalization(self): + out = StringIO() + mk = Makefile() + rule = mk.create_rule(["c:\\foo"]) + rule.add_dependencies(["c:\\bar", "c:\\baz\\qux"]) + rule.add_commands(["echo c:\\foo"]) + mk.dump(out) + self.assertEqual( + out.getvalue(), + "c:/foo: c:/bar c:/baz/qux\n" + "\techo c:\\foo\n" + "c:/bar c:/baz/qux:\n", + ) + + def test_read_dep_makefile(self): + input = StringIO( + os.path.abspath("foo") + + ": bar\n" + + "baz qux: \\ \n" + + "hoge \\\n" + + "piyo \\\n" + + "fuga\n" + + "fuga:\n" + ) + result = list(read_dep_makefile(input)) + self.assertEqual(len(result), 2) + self.assertEqual( + list(result[0].targets()), [os.path.abspath("foo").replace(os.sep, "/")] + ) + self.assertEqual(list(result[0].dependencies()), ["bar"]) + self.assertEqual(list(result[1].targets()), ["baz", "qux"]) + self.assertEqual(list(result[1].dependencies()), ["hoge", "piyo", "fuga"]) + + def test_write_dep_makefile(self): + out = StringIO() + write_dep_makefile(out, "target", ["b", "c", "a"]) + self.assertEqual(out.getvalue(), "target: b c a\n" + "a b c:\n") + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/test_manifest.py b/python/mozbuild/mozbuild/test/test_manifest.py new file mode 100644 index 0000000000..e5675aba36 --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_manifest.py @@ -0,0 +1,2081 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import unittest + +import mozfile +from mozunit import main + +from mozbuild.vendor.moz_yaml import MozYamlVerifyError, load_moz_yaml + + +class TestManifest(unittest.TestCase): + def process_test_vectors(self, test_vectors): + index = 0 + for vector in test_vectors: + print("Testing index", index) + expected, yaml = vector + with mozfile.NamedTemporaryFile() as tf: + tf.write(yaml) + tf.flush() + if expected == "exception": + with self.assertRaises(MozYamlVerifyError): + load_moz_yaml(tf.name, require_license_file=False) + else: + self.assertDictEqual( + load_moz_yaml(tf.name, require_license_file=False), expected + ) + index += 1 + + # =========================================================================================== + def test_simple(self): + simple_dict = { + "schema": "1", + "origin": { + "description": "2D Graphics Library", + "license": ["MPL-1.1", "LGPL-2.1"], + "name": "cairo", + "release": "version 1.6.4", + "revision": "AA001122334455", + "url": "https://www.cairographics.org/", + }, + "bugzilla": {"component": "Graphics", "product": "Core"}, + } + + self.process_test_vectors( + [ + ( + simple_dict, + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +bugzilla: + product: Core + component: Graphics + """.strip(), + ), + ( + simple_dict, + b""" +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +bugzilla: + product: Core + component: Graphics + """.strip(), + ), + ] + ) + + # =========================================================================================== + def test_updatebot(self): + self.process_test_vectors( + [ + ( + { + "schema": "1", + "origin": { + "description": "2D Graphics Library", + "license": ["MPL-1.1", "LGPL-2.1"], + "name": "cairo", + "release": "version 1.6.4", + "revision": "AA001122334455", + "url": "https://www.cairographics.org/", + }, + "bugzilla": {"component": "Graphics", "product": "Core"}, + "updatebot": { + "maintainer-phab": "tjr", + "maintainer-bz": "a@example.com", + }, + }, + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + """.strip(), + ), + # ------------------------------------------------- + ( + { + "schema": "1", + "origin": { + "description": "2D Graphics Library", + "license": ["MPL-1.1", "LGPL-2.1"], + "name": "cairo", + "release": "version 1.6.4", + "revision": "001122334455", + "url": "https://www.cairographics.org/", + }, + "bugzilla": {"component": "Graphics", "product": "Core"}, + "updatebot": { + "maintainer-phab": "tjr", + "maintainer-bz": "a@example.com", + }, + }, + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: 001122334455 +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + """.strip(), + ), + # ------------------------------------------------- + ( + { + "schema": "1", + "origin": { + "description": "2D Graphics Library", + "license": ["MPL-1.1", "LGPL-2.1"], + "name": "cairo", + "release": "version 1.6.4", + "revision": "001122334455", + "url": "https://www.cairographics.org/", + }, + "bugzilla": {"component": "Graphics", "product": "Core"}, + "updatebot": { + "try-preset": "foo", + "maintainer-phab": "tjr", + "maintainer-bz": "a@example.com", + }, + }, + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: 001122334455 +bugzilla: + product: Core + component: Graphics +updatebot: + try-preset: foo + maintainer-phab: tjr + maintainer-bz: a@example.com + """.strip(), + ), + # ------------------------------------------------- + ( + { + "schema": "1", + "origin": { + "description": "2D Graphics Library", + "license": ["MPL-1.1", "LGPL-2.1"], + "name": "cairo", + "release": "version 1.6.4", + "revision": "AA001122334455", + "url": "https://www.cairographics.org/", + }, + "bugzilla": {"component": "Graphics", "product": "Core"}, + "vendoring": { + "url": "https://example.com", + "source-hosting": "gitlab", + }, + "updatebot": { + "maintainer-phab": "tjr", + "maintainer-bz": "a@example.com", + "fuzzy-query": "!linux64", + "tasks": [{"type": "commit-alert"}], + }, + }, + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + fuzzy-query: "!linux64" + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: commit-alert + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + try-preset: foo + fuzzy-query: "!linux64" + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: commit-alert + """.strip(), + ), + # ------------------------------------------------- + ( + { + "schema": "1", + "origin": { + "description": "2D Graphics Library", + "license": ["MPL-1.1", "LGPL-2.1"], + "name": "cairo", + "release": "version 1.6.4", + "revision": "AA001122334455", + "url": "https://www.cairographics.org/", + }, + "bugzilla": {"component": "Graphics", "product": "Core"}, + "vendoring": { + "url": "https://example.com", + "source-hosting": "gitlab", + }, + "updatebot": { + "maintainer-phab": "tjr", + "maintainer-bz": "a@example.com", + "fuzzy-paths": ["dir1/", "dir2"], + "tasks": [{"type": "commit-alert"}], + }, + }, + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + fuzzy-paths: + - dir1/ + - dir2 + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: commit-alert + """.strip(), + ), + # ------------------------------------------------- + ( + { + "schema": "1", + "origin": { + "description": "2D Graphics Library", + "license": ["MPL-1.1", "LGPL-2.1"], + "name": "cairo", + "release": "version 1.6.4", + "revision": "AA001122334455", + "url": "https://www.cairographics.org/", + }, + "bugzilla": {"component": "Graphics", "product": "Core"}, + "vendoring": { + "url": "https://example.com", + "source-hosting": "gitlab", + }, + "updatebot": { + "maintainer-phab": "tjr", + "maintainer-bz": "a@example.com", + "fuzzy-paths": ["dir1/"], + "tasks": [{"type": "commit-alert"}], + }, + }, + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + fuzzy-paths: ['dir1/'] + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: commit-alert + """.strip(), + ), + # ------------------------------------------------- + ( + { + "schema": "1", + "origin": { + "description": "2D Graphics Library", + "license": ["MPL-1.1", "LGPL-2.1"], + "name": "cairo", + "release": "version 1.6.4", + "revision": "AA001122334455", + "url": "https://www.cairographics.org/", + }, + "bugzilla": {"component": "Graphics", "product": "Core"}, + "vendoring": { + "url": "https://example.com", + "source-hosting": "gitlab", + "tracking": "commit", + "flavor": "rust", + }, + "updatebot": { + "maintainer-phab": "tjr", + "maintainer-bz": "a@example.com", + "tasks": [ + {"type": "commit-alert", "frequency": "release"}, + { + "type": "vendoring", + "enabled": False, + "cc": ["b@example.com"], + "needinfo": ["c@example.com"], + "frequency": "1 weeks", + "platform": "windows", + }, + ], + }, + }, + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + tracking: commit + source-hosting: gitlab + flavor: rust +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: commit-alert + frequency: release + - type: vendoring + enabled: False + cc: ["b@example.com"] + needinfo: ["c@example.com"] + frequency: 1 weeks + platform: windows + """.strip(), + ), + # ------------------------------------------------- + ( + { + "schema": "1", + "origin": { + "description": "2D Graphics Library", + "license": ["MPL-1.1", "LGPL-2.1"], + "name": "cairo", + "release": "version 1.6.4", + "revision": "AA001122334455", + "url": "https://www.cairographics.org/", + }, + "bugzilla": {"component": "Graphics", "product": "Core"}, + "vendoring": { + "url": "https://example.com", + "source-hosting": "gitlab", + "tracking": "tag", + "flavor": "rust", + }, + "updatebot": { + "maintainer-phab": "tjr", + "maintainer-bz": "a@example.com", + "tasks": [ + {"type": "commit-alert", "frequency": "release"}, + { + "type": "vendoring", + "enabled": False, + "cc": ["b@example.com"], + "needinfo": ["c@example.com"], + "frequency": "1 weeks, 4 commits", + "platform": "windows", + }, + ], + }, + }, + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + tracking: tag + source-hosting: gitlab + flavor: rust +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: commit-alert + frequency: release + - type: vendoring + enabled: False + cc: ["b@example.com"] + needinfo: ["c@example.com"] + frequency: 1 weeks, 4 commits + platform: windows + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", # rust flavor cannot use update-actions + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + tracking: tag + source-hosting: gitlab + flavor: rust + update-actions: + - action: move-file + from: foo + to: bar +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: commit-alert + frequency: release + - type: vendoring + enabled: False + cc: ["b@example.com"] + needinfo: ["c@example.com"] + frequency: 1 weeks, 4 commits + platform: windows + """.strip(), + ), + # ------------------------------------------------- + ( + { + "schema": "1", + "origin": { + "description": "2D Graphics Library", + "license": ["MPL-1.1", "LGPL-2.1"], + "name": "cairo", + "release": "version 1.6.4", + "revision": "AA001122334455", + "url": "https://www.cairographics.org/", + }, + "bugzilla": {"component": "Graphics", "product": "Core"}, + "vendoring": { + "url": "https://example.com", + "source-hosting": "gitlab", + }, + "updatebot": { + "maintainer-phab": "tjr", + "maintainer-bz": "a@example.com", + "tasks": [ + { + "type": "vendoring", + "enabled": False, + "cc": ["b@example.com", "c@example.com"], + "needinfo": ["d@example.com", "e@example.com"], + "frequency": "every", + }, + { + "type": "commit-alert", + "filter": "none", + "source-extensions": [".c", ".cpp"], + "frequency": "2 weeks", + "platform": "linux", + }, + ], + }, + }, + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + enabled: False + cc: + - b@example.com + - c@example.com + needinfo: + - d@example.com + - e@example.com + frequency: every + - type: commit-alert + filter: none + frequency: 2 weeks + platform: linux + source-extensions: + - .c + - .cpp + """.strip(), + ), + # ------------------------------------------------- + ( + { + "schema": "1", + "origin": { + "description": "2D Graphics Library", + "license": ["MPL-1.1", "LGPL-2.1"], + "name": "cairo", + "release": "version 1.6.4", + "revision": "AA001122334455", + "url": "https://www.cairographics.org/", + }, + "bugzilla": {"component": "Graphics", "product": "Core"}, + "vendoring": { + "url": "https://example.com", + "source-hosting": "gitlab", + }, + "updatebot": { + "maintainer-phab": "tjr", + "maintainer-bz": "a@example.com", + "tasks": [ + { + "type": "vendoring", + "enabled": False, + "cc": ["b@example.com", "c@example.com"], + "needinfo": ["d@example.com", "e@example.com"], + "frequency": "every", + }, + { + "type": "commit-alert", + "filter": "none", + "source-extensions": [".c", ".cpp"], + "frequency": "2 commits", + "platform": "linux", + }, + ], + }, + }, + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + enabled: False + cc: + - b@example.com + - c@example.com + needinfo: + - d@example.com + - e@example.com + frequency: every + - type: commit-alert + filter: none + frequency: 2 commits + platform: linux + source-extensions: + - .c + - .cpp + """.strip(), + ), + # ------------------------------------------------- + ( + { + "schema": "1", + "origin": { + "description": "2D Graphics Library", + "license": ["MPL-1.1", "LGPL-2.1"], + "name": "cairo", + "release": "version 1.6.4", + "revision": "AA001122334455", + "url": "https://www.cairographics.org/", + }, + "bugzilla": {"component": "Graphics", "product": "Core"}, + "vendoring": { + "url": "https://example.com", + "source-hosting": "gitlab", + }, + "updatebot": { + "maintainer-phab": "tjr", + "maintainer-bz": "a@example.com", + "tasks": [ + { + "type": "vendoring", + "enabled": False, + "cc": ["b@example.com", "c@example.com"], + "needinfo": ["d@example.com", "e@example.com"], + "frequency": "every", + "blocking": "1234", + }, + { + "type": "commit-alert", + "filter": "none", + "source-extensions": [".c", ".cpp"], + "frequency": "2 commits", + "platform": "linux", + }, + ], + }, + }, + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + enabled: False + cc: + - b@example.com + - c@example.com + needinfo: + - d@example.com + - e@example.com + frequency: every + blocking: 1234 + - type: commit-alert + filter: none + frequency: 2 commits + platform: linux + source-extensions: + - .c + - .cpp + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + branch: foo + enabled: False + cc: + - b@example.com + - c@example.com + needinfo: + - d@example.com + - e@example.com + frequency: every + blocking: 1234 + - type: commit-alert + filter: none + frequency: 2 commits + platform: linux + source-extensions: + - .c + - .cpp + """.strip(), + ), + # ------------------------------------------------- + ( + { + "schema": "1", + "origin": { + "license": ["MPL-1.1", "LGPL-2.1"], + "name": "cairo", + "description": "2D Graphics Library", + "url": "https://www.cairographics.org/", + "release": "version 1.6.4", + "revision": "AA001122334455", + }, + "bugzilla": {"component": "Graphics", "product": "Core"}, + "vendoring": { + "url": "https://example.com", + "source-hosting": "gitlab", + "flavor": "individual-files", + "individual-files": [ + {"upstream": "foo", "destination": "bar"} + ], + }, + }, + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab + flavor: individual-files + individual-files: + - upstream: foo + destination: bar +bugzilla: + product: Core + component: Graphics + """.strip(), + ), + # ------------------------------------------------- + ( + { + "schema": "1", + "origin": { + "license": ["MPL-1.1", "LGPL-2.1"], + "name": "cairo", + "description": "2D Graphics Library", + "url": "https://www.cairographics.org/", + "release": "version 1.6.4", + "revision": "AA001122334455", + }, + "bugzilla": {"component": "Graphics", "product": "Core"}, + "vendoring": { + "url": "https://example.com", + "source-hosting": "gitlab", + "flavor": "individual-files", + "individual-files": [ + {"upstream": "foo", "destination": "bar"} + ], + "update-actions": [ + {"action": "move-file", "from": "foo", "to": "bar"} + ], + }, + }, + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab + flavor: individual-files + individual-files: + - upstream: foo + destination: bar + update-actions: + - action: move-file + from: foo + to: bar +bugzilla: + product: Core + component: Graphics + """.strip(), + ), + # ------------------------------------------------- + ( + { + "schema": "1", + "origin": { + "license": ["MPL-1.1", "LGPL-2.1"], + "name": "cairo", + "description": "2D Graphics Library", + "url": "https://www.cairographics.org/", + "release": "version 1.6.4", + "revision": "AA001122334455", + }, + "bugzilla": {"component": "Graphics", "product": "Core"}, + "vendoring": { + "url": "https://example.com", + "source-hosting": "gitlab", + "flavor": "individual-files", + "individual-files-default-destination": "bar", + "individual-files-default-upstream": "foo", + "individual-files-list": ["foo", "bar"], + "update-actions": [ + {"action": "move-file", "from": "foo", "to": "bar"} + ], + }, + }, + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab + flavor: individual-files + individual-files-default-upstream: foo + individual-files-default-destination: bar + individual-files-list: + - foo + - bar + update-actions: + - action: move-file + from: foo + to: bar +bugzilla: + product: Core + component: Graphics + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", # can't have both types of indidivudal-files list + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab + flavor: individual-files + individual-files-list: + - foo + individual-files: + - upstream: foo + destination: bar + update-actions: + - action: move-file + from: foo + to: bar +bugzilla: + product: Core + component: Graphics + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", # can't have indidivudal-files-default-upstream + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab + flavor: individual-files + indidivudal-files-default-upstream: foo + individual-files: + - upstream: foo + destination: bar + update-actions: + - action: move-file + from: foo + to: bar +bugzilla: + product: Core + component: Graphics + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", # must have indidivudal-files-default-upstream + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab + flavor: individual-files + indidivudal-files-default-destination: foo + individual-files-list: + - foo + - bar + update-actions: + - action: move-file + from: foo + to: bar +bugzilla: + product: Core + component: Graphics + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab + tracking: tag + flavor: individual-files + individual-files: + - upstream-src: foo + dst: bar +bugzilla: + product: Core + component: Graphics + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab + flavor: individual-files +bugzilla: + product: Core + component: Graphics + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab + flavor: rust + individual-files: + - upstream: foo + destination: bar +bugzilla: + product: Core + component: Graphics + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab + flavor: rust + include: + - foo +bugzilla: + product: Core + component: Graphics + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + enabled: False + cc: + - b@example.com + - c@example.com + needinfo: + - d@example.com + - e@example.com + frequency: every + blocking: foo + - type: commit-alert + filter: none + frequency: 2 commits + platform: linux + source-extensions: + - .c + - .cpp + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + fuzzy-paths: "must-be-array" + tasks: + - type: vendoring + enabled: False + cc: + - b@example.com + - c@example.com + needinfo: + - d@example.com + - e@example.com + frequency: every + - type: commit-alert + filter: none + frequency: 2 commits + platform: linux + source-extensions: + - .c + - .cpp + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + enabled: False + cc: + - b@example.com + - c@example.com + needinfo: + - d@example.com + - e@example.com + frequency: every + - type: commit-alert + filter: none + frequency: 2 commits, 4 weeks + platform: linux + source-extensions: + - .c + - .cpp + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + enabled: False + cc: + - b@example.com + - c@example.com + needinfo: + - d@example.com + - e@example.com + frequency: every + - type: commit-alert + filter: none + frequency: 4 weeks, 2 commits, 3 weeks + platform: linux + source-extensions: + - .c + - .cpp + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab + flavor: chocolate +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + enabled: False + cc: + - b@example.com + - c@example.com + needinfo: + - d@example.com + - e@example.com + frequency: every + - type: commit-alert + filter: none + frequency: 2 weeks + platform: linux + source-extensions: + - .c + - .cpp + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab + flavor: chocolate +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + enabled: False + cc: + - b@example.com + - c@example.com + needinfo: + - d@example.com + - e@example.com + frequency: every + - type: commit-alert + filter: none + frequency: 01 commits + platform: linux + source-extensions: + - .c + - .cpp + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + enabled: False + cc: + - b@example.com + - c@example.com + needinfo: + - d@example.com + - e@example.com + frequency: every + - type: commit-alert + filter: none + frequency: 2 weeks + platform: mac + source-extensions: + - .c + - .cpp + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + enabled: False + cc: + - b@example.com + - c@example.com + - type: commit-alert + filter: none + source-extensions: + - .c + - .cpp + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 +bugzilla: + product: Core + component: Graphics +vendoring: + url: https://example.com + source-hosting: gitlab +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + enabled: False + cc: + - b@example.com + - c@example.com + - type: commit-alert + filter: none + source-extensions: + - .c + - .cpp + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + filter: none + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: foo + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + source-extensions: + - .c + - .cpp + """.strip(), + ), + # ------------------------------------------------- + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: commit-alert + filter: hogwash + """.strip(), + ), + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +bugzilla: + product: Core + component: Graphics +vendoring: + url: https://example.com + source-hosting: gitlab +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + enabled: False + cc: + - b@example.com + - c@example.com + - type: commit-alert + - type: commit-alert + filter: none + source-extensions: + - .c + - .cpp""".strip(), + ), + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +bugzilla: + product: Core + component: Graphics +vendoring: + url: https://example.com + source-hosting: gitlab +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + enabled: False + cc: + - b@example.com + - c@example.com + - type: vendoring + - type: commit-alert + filter: none + source-extensions: + - .c + - .cpp""".strip(), + ), + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +bugzilla: + product: Core + component: Graphics +vendoring: + url: https://example.com + source-hosting: gitlab +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + enabled: False + cc: + - b@example.com + - c@example.com + - type: commit-alert + frequency: every-release + filter: none + source-extensions: + - .c + - .cpp""".strip(), + ), + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +bugzilla: + product: Core + component: Graphics +vendoring: + url: https://example.com + source-hosting: gitlab +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: vendoring + enabled: False + cc: + - b@example.com + - c@example.com + frequency: 2 months + - type: commit-alert + filter: none + source-extensions: + - .c + - .cpp""".strip(), + ), + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +vendoring: + url: https://example.com + source-hosting: gitlab +bugzilla: + product: Core + component: Graphics +updatebot: + maintainer-phab: tjr + maintainer-bz: a@example.com + tasks: + - type: commit-alert + frequency: 0 weeks + """.strip(), + ), + ] + ) + + # =========================================================================================== + def test_malformed(self): + with mozfile.NamedTemporaryFile() as tf: + tf.write(b"blah") + tf.flush() + with self.assertRaises(MozYamlVerifyError): + load_moz_yaml(tf.name, require_license_file=False) + + def test_schema(self): + with mozfile.NamedTemporaryFile() as tf: + tf.write(b"schema: 99") + tf.flush() + with self.assertRaises(MozYamlVerifyError): + load_moz_yaml(tf.name, require_license_file=False) + + def test_json(self): + with mozfile.NamedTemporaryFile() as tf: + tf.write( + b'{"origin": {"release": "version 1.6.4", "url": "https://w' + b'ww.cairographics.org/", "description": "2D Graphics Libra' + b'ry", "license": ["MPL-1.1", "LGPL-2.1"], "name": "cairo"}' + b', "bugzilla": {"product": "Core", "component": "Graphics"' + b'}, "schema": 1}' + ) + tf.flush() + with self.assertRaises(MozYamlVerifyError): + load_moz_yaml(tf.name, require_license_file=False) + + def test_revision(self): + self.process_test_vectors( + [ + ( + { + "schema": "1", + "origin": { + "description": "2D Graphics Library", + "license": ["MPL-1.1", "LGPL-2.1"], + "name": "cairo", + "release": "version 1.6.4", + "revision": "v1.6.37", + "url": "https://www.cairographics.org/", + }, + "bugzilla": {"component": "Graphics", "product": "Core"}, + }, + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: v1.6.37 +bugzilla: + product: Core + component: Graphics""".strip(), + ), + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: 4.0.0. +bugzilla: + product: Core + component: Graphics""".strip(), + ), + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: 4.^.0 +bugzilla: + product: Core + component: Graphics""".strip(), + ), + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: " " +bugzilla: + product: Core + component: Graphics""".strip(), + ), + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: ??? +bugzilla: + product: Core + component: Graphics""".strip(), + ), + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: ] +bugzilla: + product: Core + component: Graphics""".strip(), + ), + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +bugzilla: + product: Core + component: Graphics +vendoring: + url: https://example.com + source-hosting: gitlab + update-actions: + - action: run-script + cwd: '{cwd}' + script: 'script.py' + args: ['hi'] + pattern: 'hi' +""".strip(), + ), + ( + "exception", + b""" +--- +schema: 1 +origin: + name: cairo + description: 2D Graphics Library + url: https://www.cairographics.org/ + release: version 1.6.4 + license: + - MPL-1.1 + - LGPL-2.1 + revision: AA001122334455 +bugzilla: + product: Core + component: Graphics +vendoring: + url: https://example.com + source-hosting: gitlab + update-actions: + - action: run-script + cwd: '{cwd}' + args: ['hi'] +""".strip(), + ), + ] + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/test_mozconfig.py b/python/mozbuild/mozbuild/test/test_mozconfig.py new file mode 100644 index 0000000000..20827d7f29 --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_mozconfig.py @@ -0,0 +1,275 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import unittest +from shutil import rmtree +from tempfile import mkdtemp + +from mozfile.mozfile import NamedTemporaryFile +from mozunit import main + +from mozbuild.mozconfig import MozconfigLoader, MozconfigLoadException + + +class TestMozconfigLoader(unittest.TestCase): + def setUp(self): + self._old_env = dict(os.environ) + os.environ.pop("MOZCONFIG", None) + os.environ.pop("MOZ_OBJDIR", None) + os.environ.pop("CC", None) + os.environ.pop("CXX", None) + self._temp_dirs = set() + + def tearDown(self): + os.environ.clear() + os.environ.update(self._old_env) + + for d in self._temp_dirs: + rmtree(d) + + def get_loader(self): + return MozconfigLoader(self.get_temp_dir()) + + def get_temp_dir(self): + d = mkdtemp() + self._temp_dirs.add(d) + + return d + + def test_read_no_mozconfig(self): + # This is basically to ensure changes to defaults incur a test failure. + result = self.get_loader().read_mozconfig() + + self.assertEqual( + result, + { + "path": None, + "topobjdir": None, + "configure_args": None, + "make_flags": None, + "make_extra": None, + "env": None, + "vars": None, + }, + ) + + def test_read_empty_mozconfig(self): + with NamedTemporaryFile(mode="w") as mozconfig: + result = self.get_loader().read_mozconfig(mozconfig.name) + + self.assertEqual(result["path"], mozconfig.name) + self.assertIsNone(result["topobjdir"]) + self.assertEqual(result["configure_args"], []) + self.assertEqual(result["make_flags"], []) + self.assertEqual(result["make_extra"], []) + + for f in ("added", "removed", "modified"): + self.assertEqual(len(result["vars"][f]), 0) + self.assertEqual(len(result["env"][f]), 0) + + self.assertEqual(result["env"]["unmodified"], {}) + + def test_read_capture_ac_options(self): + """Ensures ac_add_options calls are captured.""" + with NamedTemporaryFile(mode="w") as mozconfig: + mozconfig.write("ac_add_options --enable-debug\n") + mozconfig.write("ac_add_options --disable-tests --enable-foo\n") + mozconfig.write('ac_add_options --foo="bar baz"\n') + mozconfig.flush() + + result = self.get_loader().read_mozconfig(mozconfig.name) + self.assertEqual( + result["configure_args"], + ["--enable-debug", "--disable-tests", "--enable-foo", "--foo=bar baz"], + ) + + def test_read_ac_options_substitution(self): + """Ensure ac_add_options values are substituted.""" + with NamedTemporaryFile(mode="w") as mozconfig: + mozconfig.write("ac_add_options --foo=@TOPSRCDIR@\n") + mozconfig.flush() + + loader = self.get_loader() + result = loader.read_mozconfig(mozconfig.name) + self.assertEqual(result["configure_args"], ["--foo=%s" % loader.topsrcdir]) + + def test_read_capture_mk_options(self): + """Ensures mk_add_options calls are captured.""" + with NamedTemporaryFile(mode="w") as mozconfig: + mozconfig.write("mk_add_options MOZ_OBJDIR=/foo/bar\n") + mozconfig.write('mk_add_options MOZ_MAKE_FLAGS="-j8 -s"\n') + mozconfig.write('mk_add_options FOO="BAR BAZ"\n') + mozconfig.write("mk_add_options BIZ=1\n") + mozconfig.flush() + + result = self.get_loader().read_mozconfig(mozconfig.name) + self.assertEqual(result["topobjdir"], "/foo/bar") + self.assertEqual(result["make_flags"], ["-j8", "-s"]) + self.assertEqual(result["make_extra"], ["FOO=BAR BAZ", "BIZ=1"]) + + def test_read_no_mozconfig_objdir_environ(self): + os.environ["MOZ_OBJDIR"] = "obj-firefox" + result = self.get_loader().read_mozconfig() + self.assertEqual(result["topobjdir"], "obj-firefox") + + def test_read_empty_mozconfig_objdir_environ(self): + os.environ["MOZ_OBJDIR"] = "obj-firefox" + with NamedTemporaryFile(mode="w") as mozconfig: + result = self.get_loader().read_mozconfig(mozconfig.name) + self.assertEqual(result["topobjdir"], "obj-firefox") + + def test_read_capture_mk_options_objdir_environ(self): + """Ensures mk_add_options calls are captured and override the environ.""" + os.environ["MOZ_OBJDIR"] = "obj-firefox" + with NamedTemporaryFile(mode="w") as mozconfig: + mozconfig.write("mk_add_options MOZ_OBJDIR=/foo/bar\n") + mozconfig.flush() + + result = self.get_loader().read_mozconfig(mozconfig.name) + self.assertEqual(result["topobjdir"], "/foo/bar") + + def test_read_moz_objdir_substitution(self): + """Ensure @TOPSRCDIR@ substitution is recognized in MOZ_OBJDIR.""" + with NamedTemporaryFile(mode="w") as mozconfig: + mozconfig.write("mk_add_options MOZ_OBJDIR=@TOPSRCDIR@/some-objdir") + mozconfig.flush() + + loader = self.get_loader() + result = loader.read_mozconfig(mozconfig.name) + + self.assertEqual(result["topobjdir"], "%s/some-objdir" % loader.topsrcdir) + + def test_read_new_variables(self): + """New variables declared in mozconfig file are detected.""" + with NamedTemporaryFile(mode="w") as mozconfig: + mozconfig.write("CC=/usr/local/bin/clang\n") + mozconfig.write("CXX=/usr/local/bin/clang++\n") + mozconfig.flush() + + result = self.get_loader().read_mozconfig(mozconfig.name) + + self.assertEqual( + result["vars"]["added"], + {"CC": "/usr/local/bin/clang", "CXX": "/usr/local/bin/clang++"}, + ) + self.assertEqual(result["env"]["added"], {}) + + def test_read_exported_variables(self): + """Exported variables are caught as new variables.""" + with NamedTemporaryFile(mode="w") as mozconfig: + mozconfig.write("export MY_EXPORTED=woot\n") + mozconfig.flush() + + result = self.get_loader().read_mozconfig(mozconfig.name) + + self.assertEqual(result["vars"]["added"], {}) + self.assertEqual(result["env"]["added"], {"MY_EXPORTED": "woot"}) + + def test_read_modify_variables(self): + """Variables modified by mozconfig are detected.""" + old_path = os.path.realpath("/usr/bin/gcc") + new_path = os.path.realpath("/usr/local/bin/clang") + os.environ["CC"] = old_path + + with NamedTemporaryFile(mode="w") as mozconfig: + mozconfig.write('CC="%s"\n' % new_path) + mozconfig.flush() + + result = self.get_loader().read_mozconfig(mozconfig.name) + + self.assertEqual(result["vars"]["modified"], {}) + self.assertEqual(result["env"]["modified"], {"CC": (old_path, new_path)}) + + def test_read_unmodified_variables(self): + """Variables modified by mozconfig are detected.""" + cc_path = os.path.realpath("/usr/bin/gcc") + os.environ["CC"] = cc_path + + with NamedTemporaryFile(mode="w") as mozconfig: + mozconfig.flush() + + result = self.get_loader().read_mozconfig(mozconfig.name) + + self.assertEqual(result["vars"]["unmodified"], {}) + self.assertEqual(result["env"]["unmodified"], {"CC": cc_path}) + + def test_read_removed_variables(self): + """Variables unset by the mozconfig are detected.""" + cc_path = os.path.realpath("/usr/bin/clang") + os.environ["CC"] = cc_path + + with NamedTemporaryFile(mode="w") as mozconfig: + mozconfig.write("unset CC\n") + mozconfig.flush() + + result = self.get_loader().read_mozconfig(mozconfig.name) + + self.assertEqual(result["vars"]["removed"], {}) + self.assertEqual(result["env"]["removed"], {"CC": cc_path}) + + def test_read_multiline_variables(self): + """Ensure multi-line variables are captured properly.""" + with NamedTemporaryFile(mode="w") as mozconfig: + mozconfig.write('multi="foo\nbar"\n') + mozconfig.write("single=1\n") + mozconfig.flush() + + result = self.get_loader().read_mozconfig(mozconfig.name) + + self.assertEqual( + result["vars"]["added"], {"multi": "foo\nbar", "single": "1"} + ) + self.assertEqual(result["env"]["added"], {}) + + def test_read_topsrcdir_defined(self): + """Ensure $topsrcdir references work as expected.""" + with NamedTemporaryFile(mode="w") as mozconfig: + mozconfig.write("TEST=$topsrcdir") + mozconfig.flush() + + loader = self.get_loader() + result = loader.read_mozconfig(mozconfig.name) + + self.assertEqual( + result["vars"]["added"]["TEST"], loader.topsrcdir.replace(os.sep, "/") + ) + self.assertEqual(result["env"]["added"], {}) + + def test_read_empty_variable_value(self): + """Ensure empty variable values are parsed properly.""" + with NamedTemporaryFile(mode="w") as mozconfig: + mozconfig.write("EMPTY=\n") + mozconfig.write("export EXPORT_EMPTY=\n") + mozconfig.flush() + + result = self.get_loader().read_mozconfig(mozconfig.name) + + self.assertEqual( + result["vars"]["added"], + { + "EMPTY": "", + }, + ) + self.assertEqual(result["env"]["added"], {"EXPORT_EMPTY": ""}) + + def test_read_load_exception(self): + """Ensure non-0 exit codes in mozconfigs are handled properly.""" + with NamedTemporaryFile(mode="w") as mozconfig: + mozconfig.write('echo "hello world"\n') + mozconfig.write("exit 1\n") + mozconfig.flush() + + with self.assertRaises(MozconfigLoadException) as e: + self.get_loader().read_mozconfig(mozconfig.name) + + self.assertIn( + "Evaluation of your mozconfig exited with an error", str(e.exception) + ) + self.assertEqual(e.exception.path, mozconfig.name.replace(os.sep, "/")) + self.assertEqual(e.exception.output, ["hello world"]) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/test_mozinfo.py b/python/mozbuild/mozbuild/test/test_mozinfo.py new file mode 100755 index 0000000000..0d966b3dcc --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_mozinfo.py @@ -0,0 +1,318 @@ +#!/usr/bin/env python +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import json +import os +import tempfile +import unittest + +import mozunit +import six +from mozfile.mozfile import NamedTemporaryFile +from six import StringIO + +from mozbuild.backend.configenvironment import ConfigEnvironment +from mozbuild.mozinfo import build_dict, write_mozinfo + + +class Base(object): + def _config(self, substs={}): + d = os.path.dirname(__file__) + return ConfigEnvironment(d, d, substs=substs) + + +class TestBuildDict(unittest.TestCase, Base): + def test_missing(self): + """ + Test that missing required values raises. + """ + + with self.assertRaises(Exception): + build_dict(self._config(substs=dict(OS_TARGET="foo"))) + + with self.assertRaises(Exception): + build_dict(self._config(substs=dict(TARGET_CPU="foo"))) + + with self.assertRaises(Exception): + build_dict(self._config(substs=dict(MOZ_WIDGET_TOOLKIT="foo"))) + + def test_win(self): + d = build_dict( + self._config( + dict( + OS_TARGET="WINNT", + TARGET_CPU="i386", + MOZ_WIDGET_TOOLKIT="windows", + ) + ) + ) + self.assertEqual("win", d["os"]) + self.assertEqual("x86", d["processor"]) + self.assertEqual("windows", d["toolkit"]) + self.assertEqual(32, d["bits"]) + + def test_linux(self): + d = build_dict( + self._config( + dict( + OS_TARGET="Linux", + TARGET_CPU="i386", + MOZ_WIDGET_TOOLKIT="gtk", + ) + ) + ) + self.assertEqual("linux", d["os"]) + self.assertEqual("x86", d["processor"]) + self.assertEqual("gtk", d["toolkit"]) + self.assertEqual(32, d["bits"]) + + d = build_dict( + self._config( + dict( + OS_TARGET="Linux", + TARGET_CPU="x86_64", + MOZ_WIDGET_TOOLKIT="gtk", + ) + ) + ) + self.assertEqual("linux", d["os"]) + self.assertEqual("x86_64", d["processor"]) + self.assertEqual("gtk", d["toolkit"]) + self.assertEqual(64, d["bits"]) + + def test_mac(self): + d = build_dict( + self._config( + dict( + OS_TARGET="Darwin", + TARGET_CPU="i386", + MOZ_WIDGET_TOOLKIT="cocoa", + ) + ) + ) + self.assertEqual("mac", d["os"]) + self.assertEqual("x86", d["processor"]) + self.assertEqual("cocoa", d["toolkit"]) + self.assertEqual(32, d["bits"]) + + d = build_dict( + self._config( + dict( + OS_TARGET="Darwin", + TARGET_CPU="x86_64", + MOZ_WIDGET_TOOLKIT="cocoa", + ) + ) + ) + self.assertEqual("mac", d["os"]) + self.assertEqual("x86_64", d["processor"]) + self.assertEqual("cocoa", d["toolkit"]) + self.assertEqual(64, d["bits"]) + + def test_android(self): + d = build_dict( + self._config( + dict( + OS_TARGET="Android", + TARGET_CPU="arm", + MOZ_WIDGET_TOOLKIT="android", + ) + ) + ) + self.assertEqual("android", d["os"]) + self.assertEqual("arm", d["processor"]) + self.assertEqual("android", d["toolkit"]) + self.assertEqual(32, d["bits"]) + + def test_x86(self): + """ + Test that various i?86 values => x86. + """ + d = build_dict( + self._config( + dict( + OS_TARGET="WINNT", + TARGET_CPU="i486", + MOZ_WIDGET_TOOLKIT="windows", + ) + ) + ) + self.assertEqual("x86", d["processor"]) + + d = build_dict( + self._config( + dict( + OS_TARGET="WINNT", + TARGET_CPU="i686", + MOZ_WIDGET_TOOLKIT="windows", + ) + ) + ) + self.assertEqual("x86", d["processor"]) + + def test_arm(self): + """ + Test that all arm CPU architectures => arm. + """ + d = build_dict( + self._config( + dict( + OS_TARGET="Linux", + TARGET_CPU="arm", + MOZ_WIDGET_TOOLKIT="gtk", + ) + ) + ) + self.assertEqual("arm", d["processor"]) + + d = build_dict( + self._config( + dict( + OS_TARGET="Linux", + TARGET_CPU="armv7", + MOZ_WIDGET_TOOLKIT="gtk", + ) + ) + ) + self.assertEqual("arm", d["processor"]) + + def test_unknown(self): + """ + Test that unknown values pass through okay. + """ + d = build_dict( + self._config( + dict( + OS_TARGET="RandOS", + TARGET_CPU="cptwo", + MOZ_WIDGET_TOOLKIT="foobar", + ) + ) + ) + self.assertEqual("randos", d["os"]) + self.assertEqual("cptwo", d["processor"]) + self.assertEqual("foobar", d["toolkit"]) + # unknown CPUs should not get a bits value + self.assertFalse("bits" in d) + + def test_debug(self): + """ + Test that debug values are properly detected. + """ + d = build_dict( + self._config( + dict( + OS_TARGET="Linux", + TARGET_CPU="i386", + MOZ_WIDGET_TOOLKIT="gtk", + ) + ) + ) + self.assertEqual(False, d["debug"]) + + d = build_dict( + self._config( + dict( + OS_TARGET="Linux", + TARGET_CPU="i386", + MOZ_WIDGET_TOOLKIT="gtk", + MOZ_DEBUG="1", + ) + ) + ) + self.assertEqual(True, d["debug"]) + + def test_crashreporter(self): + """ + Test that crashreporter values are properly detected. + """ + d = build_dict( + self._config( + dict( + OS_TARGET="Linux", + TARGET_CPU="i386", + MOZ_WIDGET_TOOLKIT="gtk", + ) + ) + ) + self.assertEqual(False, d["crashreporter"]) + + d = build_dict( + self._config( + dict( + OS_TARGET="Linux", + TARGET_CPU="i386", + MOZ_WIDGET_TOOLKIT="gtk", + MOZ_CRASHREPORTER="1", + ) + ) + ) + self.assertEqual(True, d["crashreporter"]) + + +class TestWriteMozinfo(unittest.TestCase, Base): + """ + Test the write_mozinfo function. + """ + + def setUp(self): + fd, f = tempfile.mkstemp() + self.f = six.ensure_text(f) + os.close(fd) + + def tearDown(self): + os.unlink(self.f) + + def test_basic(self): + """ + Test that writing to a file produces correct output. + """ + c = self._config( + dict( + OS_TARGET="WINNT", + TARGET_CPU="i386", + MOZ_WIDGET_TOOLKIT="windows", + ) + ) + tempdir = tempfile.gettempdir() + c.topsrcdir = tempdir + with NamedTemporaryFile( + dir=os.path.normpath(c.topsrcdir), mode="wt" + ) as mozconfig: + mozconfig.write("unused contents") + mozconfig.flush() + c.mozconfig = mozconfig.name + write_mozinfo(self.f, c) + with open(self.f) as f: + d = json.load(f) + self.assertEqual("win", d["os"]) + self.assertEqual("x86", d["processor"]) + self.assertEqual("windows", d["toolkit"]) + self.assertEqual(tempdir, d["topsrcdir"]) + self.assertEqual(mozconfig.name, d["mozconfig"]) + self.assertEqual(32, d["bits"]) + + def test_fileobj(self): + """ + Test that writing to a file-like object produces correct output. + """ + s = StringIO() + c = self._config( + dict( + OS_TARGET="WINNT", + TARGET_CPU="i386", + MOZ_WIDGET_TOOLKIT="windows", + ) + ) + write_mozinfo(s, c) + d = json.loads(s.getvalue()) + self.assertEqual("win", d["os"]) + self.assertEqual("x86", d["processor"]) + self.assertEqual("windows", d["toolkit"]) + self.assertEqual(32, d["bits"]) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/test_preprocessor.py b/python/mozbuild/mozbuild/test/test_preprocessor.py new file mode 100644 index 0000000000..82039c2bd7 --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_preprocessor.py @@ -0,0 +1,832 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import shutil +import unittest +from tempfile import mkdtemp + +from mozunit import MockedOpen, main +from six import StringIO + +from mozbuild.preprocessor import Preprocessor + + +class TestPreprocessor(unittest.TestCase): + """ + Unit tests for the Context class + """ + + def setUp(self): + self.pp = Preprocessor() + self.pp.out = StringIO() + + def do_include_compare(self, content_lines, expected_lines): + content = "%s" % "\n".join(content_lines) + expected = "%s".rstrip() % "\n".join(expected_lines) + + with MockedOpen({"dummy": content}): + self.pp.do_include("dummy") + self.assertEqual(self.pp.out.getvalue().rstrip("\n"), expected) + + def do_include_pass(self, content_lines): + self.do_include_compare(content_lines, ["PASS"]) + + def test_conditional_if_0(self): + self.do_include_pass( + [ + "#if 0", + "FAIL", + "#else", + "PASS", + "#endif", + ] + ) + + def test_no_marker(self): + lines = [ + "#if 0", + "PASS", + "#endif", + ] + self.pp.setMarker(None) + self.do_include_compare(lines, lines) + + def test_string_value(self): + self.do_include_compare( + [ + "#define FOO STRING", + "#if FOO", + "string value is true", + "#else", + "string value is false", + "#endif", + ], + ["string value is false"], + ) + + def test_number_value(self): + self.do_include_compare( + [ + "#define FOO 1", + "#if FOO", + "number value is true", + "#else", + "number value is false", + "#endif", + ], + ["number value is true"], + ) + + def test_conditional_if_0_elif_1(self): + self.do_include_pass( + [ + "#if 0", + "#elif 1", + "PASS", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_conditional_if_1(self): + self.do_include_pass( + [ + "#if 1", + "PASS", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_conditional_if_0_or_1(self): + self.do_include_pass( + [ + "#if 0 || 1", + "PASS", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_conditional_if_1_elif_1_else(self): + self.do_include_pass( + [ + "#if 1", + "PASS", + "#elif 1", + "FAIL", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_conditional_if_1_if_1(self): + self.do_include_pass( + [ + "#if 1", + "#if 1", + "PASS", + "#else", + "FAIL", + "#endif", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_conditional_not_0(self): + self.do_include_pass( + [ + "#if !0", + "PASS", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_conditional_not_0_and_1(self): + self.do_include_pass( + [ + "#if !0 && !1", + "FAIL", + "#else", + "PASS", + "#endif", + ] + ) + + def test_conditional_not_1(self): + self.do_include_pass( + [ + "#if !1", + "FAIL", + "#else", + "PASS", + "#endif", + ] + ) + + def test_conditional_not_emptyval(self): + self.do_include_compare( + [ + "#define EMPTYVAL", + "#ifndef EMPTYVAL", + "FAIL", + "#else", + "PASS", + "#endif", + "#ifdef EMPTYVAL", + "PASS", + "#else", + "FAIL", + "#endif", + ], + ["PASS", "PASS"], + ) + + def test_conditional_not_nullval(self): + self.do_include_pass( + [ + "#define NULLVAL 0", + "#if !NULLVAL", + "PASS", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_indentation(self): + self.do_include_pass( + [ + " #define NULLVAL 0", + " #if !NULLVAL", + "PASS", + " #else", + "FAIL", + " #endif", + ] + ) + + def test_expand(self): + self.do_include_pass( + [ + "#define ASVAR AS", + "#expand P__ASVAR__S", + ] + ) + + def test_undef_defined(self): + self.do_include_compare( + [ + "#define BAR", + "#undef BAR", + "BAR", + ], + ["BAR"], + ) + + def test_undef_undefined(self): + self.do_include_compare( + [ + "#undef BAR", + ], + [], + ) + + def test_filter_attemptSubstitution(self): + self.do_include_compare( + [ + "#filter attemptSubstitution", + "@PASS@", + "#unfilter attemptSubstitution", + ], + ["@PASS@"], + ) + + def test_filter_emptyLines(self): + self.do_include_compare( + [ + "lines with a", + "", + "blank line", + "#filter emptyLines", + "lines with", + "", + "no blank lines", + "#unfilter emptyLines", + "yet more lines with", + "", + "blank lines", + ], + [ + "lines with a", + "", + "blank line", + "lines with", + "no blank lines", + "yet more lines with", + "", + "blank lines", + ], + ) + + def test_filter_dumbComments(self): + self.do_include_compare( + [ + "#filter dumbComments", + "PASS//PASS // PASS", + " //FAIL", + "// FAIL", + "PASS //", + "PASS // FAIL", + "//", + "", + "#unfilter dumbComments", + "// PASS", + ], + [ + "PASS//PASS // PASS", + "", + "", + "PASS //", + "PASS // FAIL", + "", + "", + "// PASS", + ], + ) + + def test_filter_dumbComments_and_emptyLines(self): + self.do_include_compare( + [ + "#filter dumbComments emptyLines", + "PASS//PASS // PASS", + " //FAIL", + "// FAIL", + "PASS //", + "PASS // FAIL", + "//", + "", + "#unfilter dumbComments emptyLines", + "", + "// PASS", + ], + [ + "PASS//PASS // PASS", + "PASS //", + "PASS // FAIL", + "", + "// PASS", + ], + ) + + def test_filter_substitution(self): + self.do_include_pass( + [ + "#define VAR ASS", + "#filter substitution", + "P@VAR@", + "#unfilter substitution", + ] + ) + + def test_error(self): + with MockedOpen({"f": "#error spit this message out\n"}): + with self.assertRaises(Preprocessor.Error) as e: + self.pp.do_include("f") + self.assertEqual(e.args[0][-1], "spit this message out") + + def test_ambigous_command(self): + comment = "# if I tell you a joke\n" + with MockedOpen({"f": comment}): + with self.assertRaises(Preprocessor.Error) as e: + self.pp.do_include("f") + the_exception = e.exception + self.assertEqual(the_exception.args[0][-1], comment) + + def test_javascript_line(self): + # The preprocessor is reading the filename from somewhere not caught + # by MockedOpen. + tmpdir = mkdtemp() + try: + full = os.path.join(tmpdir, "javascript_line.js.in") + with open(full, "w") as fh: + fh.write( + "\n".join( + [ + "// Line 1", + "#if 0", + "// line 3", + "#endif", + "// line 5", + "# comment", + "// line 7", + "// line 8", + "// line 9", + "# another comment", + "// line 11", + "#define LINE 1", + "// line 13, given line number overwritten with 2", + "", + ] + ) + ) + + self.pp.do_include(full) + out = "\n".join( + [ + "// Line 1", + '//@line 5 "CWDjavascript_line.js.in"', + "// line 5", + '//@line 7 "CWDjavascript_line.js.in"', + "// line 7", + "// line 8", + "// line 9", + '//@line 11 "CWDjavascript_line.js.in"', + "// line 11", + '//@line 2 "CWDjavascript_line.js.in"', + "// line 13, given line number overwritten with 2", + "", + ] + ) + out = out.replace("CWD", tmpdir + os.path.sep) + self.assertEqual(self.pp.out.getvalue(), out) + finally: + shutil.rmtree(tmpdir) + + def test_literal(self): + self.do_include_pass( + [ + "#literal PASS", + ] + ) + + def test_var_directory(self): + self.do_include_pass( + [ + "#ifdef DIRECTORY", + "PASS", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_var_file(self): + self.do_include_pass( + [ + "#ifdef FILE", + "PASS", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_var_if_0(self): + self.do_include_pass( + [ + "#define VAR 0", + "#if VAR", + "FAIL", + "#else", + "PASS", + "#endif", + ] + ) + + def test_var_if_0_elifdef(self): + self.do_include_pass( + [ + "#if 0", + "#elifdef FILE", + "PASS", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_var_if_0_elifndef(self): + self.do_include_pass( + [ + "#if 0", + "#elifndef VAR", + "PASS", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_var_ifdef_0(self): + self.do_include_pass( + [ + "#define VAR 0", + "#ifdef VAR", + "PASS", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_var_ifdef_1_or_undef(self): + self.do_include_pass( + [ + "#define FOO 1", + "#if defined(FOO) || defined(BAR)", + "PASS", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_var_ifdef_undef(self): + self.do_include_pass( + [ + "#define VAR 0", + "#undef VAR", + "#ifdef VAR", + "FAIL", + "#else", + "PASS", + "#endif", + ] + ) + + def test_var_ifndef_0(self): + self.do_include_pass( + [ + "#define VAR 0", + "#ifndef VAR", + "FAIL", + "#else", + "PASS", + "#endif", + ] + ) + + def test_var_ifndef_0_and_undef(self): + self.do_include_pass( + [ + "#define FOO 0", + "#if !defined(FOO) && !defined(BAR)", + "FAIL", + "#else", + "PASS", + "#endif", + ] + ) + + def test_var_ifndef_undef(self): + self.do_include_pass( + [ + "#define VAR 0", + "#undef VAR", + "#ifndef VAR", + "PASS", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_var_line(self): + self.do_include_pass( + [ + "#ifdef LINE", + "PASS", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_filterDefine(self): + self.do_include_pass( + [ + "#filter substitution", + "#define VAR AS", + "#define VAR2 P@VAR@", + "@VAR2@S", + ] + ) + + def test_number_value_equals(self): + self.do_include_pass( + [ + "#define FOO 1000", + "#if FOO == 1000", + "PASS", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_default_defines(self): + self.pp.handleCommandLine(["-DFOO"]) + self.do_include_pass( + [ + "#if FOO == 1", + "PASS", + "#else", + "FAIL", + ] + ) + + def test_number_value_equals_defines(self): + self.pp.handleCommandLine(["-DFOO=1000"]) + self.do_include_pass( + [ + "#if FOO == 1000", + "PASS", + "#else", + "FAIL", + ] + ) + + def test_octal_value_equals(self): + self.do_include_pass( + [ + "#define FOO 0100", + "#if FOO == 0100", + "PASS", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_octal_value_equals_defines(self): + self.pp.handleCommandLine(["-DFOO=0100"]) + self.do_include_pass( + [ + "#if FOO == 0100", + "PASS", + "#else", + "FAIL", + "#endif", + ] + ) + + def test_value_quoted_expansion(self): + """ + Quoted values on the commandline don't currently have quotes stripped. + Pike says this is for compat reasons. + """ + self.pp.handleCommandLine(['-DFOO="ABCD"']) + self.do_include_compare( + [ + "#filter substitution", + "@FOO@", + ], + ['"ABCD"'], + ) + + def test_octal_value_quoted_expansion(self): + self.pp.handleCommandLine(['-DFOO="0100"']) + self.do_include_compare( + [ + "#filter substitution", + "@FOO@", + ], + ['"0100"'], + ) + + def test_number_value_not_equals_quoted_defines(self): + self.pp.handleCommandLine(['-DFOO="1000"']) + self.do_include_pass( + [ + "#if FOO == 1000", + "FAIL", + "#else", + "PASS", + "#endif", + ] + ) + + def test_octal_value_not_equals_quoted_defines(self): + self.pp.handleCommandLine(['-DFOO="0100"']) + self.do_include_pass( + [ + "#if FOO == 0100", + "FAIL", + "#else", + "PASS", + "#endif", + ] + ) + + def test_undefined_variable(self): + with MockedOpen({"f": "#filter substitution\n@foo@"}): + with self.assertRaises(Preprocessor.Error) as e: + self.pp.do_include("f") + self.assertEqual(e.key, "UNDEFINED_VAR") + + def test_include(self): + files = { + "foo/test": "\n".join( + [ + "#define foo foobarbaz", + "#include @inc@", + "@bar@", + "", + ] + ), + "bar": "\n".join( + [ + "#define bar barfoobaz", + "@foo@", + "", + ] + ), + "f": "\n".join( + [ + "#filter substitution", + "#define inc ../bar", + "#include foo/test", + "", + ] + ), + } + + with MockedOpen(files): + self.pp.do_include("f") + self.assertEqual(self.pp.out.getvalue(), "foobarbaz\nbarfoobaz\n") + + def test_include_line(self): + files = { + "srcdir/test.js": "\n".join( + [ + "#define foo foobarbaz", + "#include @inc@", + "@bar@", + "", + ] + ), + "srcdir/bar.js": "\n".join( + [ + "#define bar barfoobaz", + "@foo@", + "", + ] + ), + "srcdir/foo.js": "\n".join( + [ + "bazfoobar", + "#include bar.js", + "bazbarfoo", + "", + ] + ), + "objdir/baz.js": "baz\n", + "srcdir/f.js": "\n".join( + [ + "#include foo.js", + "#filter substitution", + "#define inc bar.js", + "#include test.js", + "#include ../objdir/baz.js", + "fin", + "", + ] + ), + } + + preprocessed = ( + '//@line 1 "$SRCDIR/foo.js"\n' + "bazfoobar\n" + '//@line 2 "$SRCDIR/bar.js"\n' + "@foo@\n" + '//@line 3 "$SRCDIR/foo.js"\n' + "bazbarfoo\n" + '//@line 2 "$SRCDIR/bar.js"\n' + "foobarbaz\n" + '//@line 3 "$SRCDIR/test.js"\n' + "barfoobaz\n" + '//@line 1 "$OBJDIR/baz.js"\n' + "baz\n" + '//@line 6 "$SRCDIR/f.js"\n' + "fin\n" + ) + + # Try with separate srcdir/objdir + with MockedOpen(files): + self.pp.topsrcdir = os.path.abspath("srcdir") + self.pp.topobjdir = os.path.abspath("objdir") + self.pp.do_include("srcdir/f.js") + self.assertEqual(self.pp.out.getvalue(), preprocessed) + + # Try again with relative objdir + self.setUp() + files["srcdir/objdir/baz.js"] = files["objdir/baz.js"] + del files["objdir/baz.js"] + files["srcdir/f.js"] = files["srcdir/f.js"].replace("../", "") + with MockedOpen(files): + self.pp.topsrcdir = os.path.abspath("srcdir") + self.pp.topobjdir = os.path.abspath("srcdir/objdir") + self.pp.do_include("srcdir/f.js") + self.assertEqual(self.pp.out.getvalue(), preprocessed) + + def test_include_missing_file(self): + with MockedOpen({"f": "#include foo\n"}): + with self.assertRaises(Preprocessor.Error) as e: + self.pp.do_include("f") + self.assertEqual(e.exception.key, "FILE_NOT_FOUND") + + def test_include_undefined_variable(self): + with MockedOpen({"f": "#filter substitution\n#include @foo@\n"}): + with self.assertRaises(Preprocessor.Error) as e: + self.pp.do_include("f") + self.assertEqual(e.exception.key, "UNDEFINED_VAR") + + def test_include_literal_at(self): + files = { + "@foo@": "#define foo foobarbaz\n", + "f": "#include @foo@\n#filter substitution\n@foo@\n", + } + + with MockedOpen(files): + self.pp.do_include("f") + self.assertEqual(self.pp.out.getvalue(), "foobarbaz\n") + + def test_command_line_literal_at(self): + with MockedOpen({"@foo@.in": "@foo@\n"}): + self.pp.handleCommandLine(["-Fsubstitution", "-Dfoo=foobarbaz", "@foo@.in"]) + self.assertEqual(self.pp.out.getvalue(), "foobarbaz\n") + + def test_invalid_ifdef(self): + with MockedOpen({"dummy": "#ifdef FOO == BAR\nPASS\n#endif"}): + with self.assertRaises(Preprocessor.Error) as e: + self.pp.do_include("dummy") + self.assertEqual(e.exception.key, "INVALID_VAR") + + with MockedOpen({"dummy": "#ifndef FOO == BAR\nPASS\n#endif"}): + with self.assertRaises(Preprocessor.Error) as e: + self.pp.do_include("dummy") + self.assertEqual(e.exception.key, "INVALID_VAR") + + # Trailing whitespaces, while not nice, shouldn't be an error. + self.do_include_pass( + [ + "#ifndef FOO ", + "PASS", + "#endif", + ] + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/test_pythonutil.py b/python/mozbuild/mozbuild/test/test_pythonutil.py new file mode 100644 index 0000000000..6ebb5cc46e --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_pythonutil.py @@ -0,0 +1,24 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os + +from mozunit import main + +from mozbuild.pythonutil import iter_modules_in_path + + +def test_iter_modules_in_path(): + tests_path = os.path.normcase(os.path.dirname(__file__)) + paths = list(iter_modules_in_path(tests_path)) + assert set(paths) == set( + [ + os.path.join(os.path.abspath(tests_path), "__init__.py"), + os.path.join(os.path.abspath(tests_path), "test_pythonutil.py"), + ] + ) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/test_rewrite_mozbuild.py b/python/mozbuild/mozbuild/test/test_rewrite_mozbuild.py new file mode 100644 index 0000000000..467295c9e9 --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_rewrite_mozbuild.py @@ -0,0 +1,515 @@ +# coding: utf-8 +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import tempfile +import unittest + +from mozunit import main + +import mozbuild.vendor.rewrite_mozbuild as mu + +SAMPLE_PIXMAN_MOZBUILD = """ +if CONFIG['OS_ARCH'] != 'Darwin' and CONFIG['CC_TYPE'] in ('clang', 'gcc'): + if CONFIG['HAVE_ARM_NEON']: + SOURCES += [ + "pixman-arm-neon-asm-bilinear.S", + "pixman-arm-neon-asm.S", + ] + if CONFIG['HAVE_ARM_SIMD']: + SOURCES += [ + 'pixman-arm-simd-asm-scaled.S', + 'pixman-arm-simd-asm.S'] + +SOURCES += ['pixman-region32.c', + 'pixman-solid-fill.c', + 'pixman-trap.c', + 'pixman-utils.c', + 'pixman-x86.c', + 'pixman.c', +] + +if use_sse2: + DEFINES['USE_SSE'] = True + DEFINES['USE_SSE2'] = True + SOURCES += ['pixman-sse2.c'] + SOURCES['pixman-sse2.c'].flags += CONFIG['SSE_FLAGS'] + CONFIG['SSE2_FLAGS'] + if CONFIG['CC_TYPE'] in ('clang', 'gcc'): + SOURCES['pixman-sse2.c'].flags += ['-Winline'] +""" + +SAMPLE_DAV1D_MOZBUILD = """ +SOURCES += [ + '../../third_party/dav1d/src/cdf.c', + '../../third_party/dav1d/src/cpu.c', + ] +EXPORTS = [ + '../../third_party/dav1d/src/header1.h', + '../../third_party/dav1d/src/header2.h', + ] +""" + + +SAMPLE_JPEGXL_MOZBUILD = """ +SOURCES += [ + "/third_party/jpeg-xl/lib/jxl/ac_strategy.cc", + "/third_party/jpeg-xl/lib/jxl/alpha.cc", + "/third_party/jpeg-xl/lib/jxl/ans_common.cc", + "/third_party/jpeg-xl/lib/jxl/aux_out.cc", + ] +EXPORTS.bob.carol = [ + "/third_party/jpeg-xl/lib/jxl/header1.hpp", + "/third_party/jpeg-xl/lib/jxl/header2.h", +] +""" + + +def _make_mozbuild_directory_structure(mozbuild_path, contents): + d = tempfile.TemporaryDirectory() + os.makedirs(os.path.join(d.name, os.path.split(mozbuild_path)[0])) + + arcconfig = open(os.path.join(d.name, ".arcconfig"), mode="w") + arcconfig.close() + + mozbuild = open(os.path.join(d.name, mozbuild_path), mode="w") + mozbuild.write(contents) + mozbuild.close() + + return d + + +class TestUtils(unittest.TestCase): + def test_normalize_filename(self): + self.assertEqual(mu.normalize_filename("foo/bar/moz.build", "/"), "/") + self.assertEqual( + mu.normalize_filename("foo/bar/moz.build", "a.c"), "foo/bar/a.c" + ) + self.assertEqual( + mu.normalize_filename("foo/bar/moz.build", "baz/a.c"), "foo/bar/baz/a.c" + ) + self.assertEqual(mu.normalize_filename("foo/bar/moz.build", "/a.c"), "/a.c") + + def test_unnormalize_filename(self): + test_vectors = [ + ("foo/bar/moz.build", "/"), + ("foo/bar/moz.build", "a.c"), + ("foo/bar/moz.build", "baz/a.c"), + ("foo/bar/moz.build", "/a.c"), + ] + + for vector in test_vectors: + mozbuild, file = vector + self.assertEqual( + mu.unnormalize_filename( + mozbuild, mu.normalize_filename(mozbuild, file) + ), + file, + ) + + def test_find_all_posible_assignments_from_filename(self): + test_vectors = [ + # ( + # target_filename_normalized + # source_assignments + # expected + # ) + ( + "root/dir/asm/blah.S", + { + "> SOURCES": ["root/dir/main.c"], + "> if conditional > SOURCES": ["root/dir/asm/blah.S"], + }, + {"> if conditional > SOURCES": ["root/dir/asm/blah.S"]}, + ), + ( + "root/dir/dostuff.c", + { + "> SOURCES": ["root/dir/main.c"], + "> if conditional > SOURCES": ["root/dir/asm/blah.S"], + }, + {"> SOURCES": ["root/dir/main.c"]}, + ), + ] + + for vector in test_vectors: + target_filename_normalized, source_assignments, expected = vector + actual = mu.find_all_posible_assignments_from_filename( + source_assignments, target_filename_normalized + ) + self.assertEqual(actual, expected) + + def test_filenames_directory_is_in_filename_list(self): + test_vectors = [ + # ( + # normalized filename + # list of normalized_filenames + # expected + # ) + ("foo/bar/a.c", ["foo/b.c"], False), + ("foo/bar/a.c", ["foo/b.c", "foo/bar/c.c"], True), + ("foo/bar/a.c", ["foo/b.c", "foo/bar/baz/d.c"], False), + ] + for vector in test_vectors: + normalized_filename, list_of_normalized_filesnames, expected = vector + actual = mu.filenames_directory_is_in_filename_list( + normalized_filename, list_of_normalized_filesnames + ) + self.assertEqual(actual, expected) + + def test_guess_best_assignment(self): + test_vectors = [ + # ( + # filename_normalized + # source_assignments + # expected + # ) + ( + "foo/asm_arm.c", + { + "> SOURCES": ["foo/main.c", "foo/all_utility.c"], + "> if ASM > SOURCES": ["foo/asm_x86.c"], + }, + "> if ASM > SOURCES", + ) + ] + for vector in test_vectors: + normalized_filename, source_assignments, expected = vector + actual, _ = mu.guess_best_assignment( + source_assignments, normalized_filename + ) + self.assertEqual(actual, expected) + + def test_mozbuild_removing(self): + test_vectors = [ + ( + "media/dav1d/moz.build", + SAMPLE_DAV1D_MOZBUILD, + "third_party/dav1d/src/cdf.c", + "media/dav1d/", + "third-party/dav1d/", + " '../../third_party/dav1d/src/cdf.c',\n", + ), + ( + "media/dav1d/moz.build", + SAMPLE_DAV1D_MOZBUILD, + "third_party/dav1d/src/header1.h", + "media/dav1d/", + "third-party/dav1d/", + " '../../third_party/dav1d/src/header1.h',\n", + ), + ( + "media/jxl/moz.build", + SAMPLE_JPEGXL_MOZBUILD, + "third_party/jpeg-xl/lib/jxl/alpha.cc", + "media/jxl/", + "third-party/jpeg-xl/", + ' "/third_party/jpeg-xl/lib/jxl/alpha.cc",\n', + ), + ( + "media/jxl/moz.build", + SAMPLE_JPEGXL_MOZBUILD, + "third_party/jpeg-xl/lib/jxl/header1.hpp", + "media/jxl/", + "third-party/jpeg-xl/", + ' "/third_party/jpeg-xl/lib/jxl/header1.hpp",\n', + ), + ] + + for vector in test_vectors: + ( + mozbuild_path, + mozbuild_contents, + file_to_remove, + moz_yaml_dir, + vendoring_dir, + replace_str, + ) = vector + + startdir = os.getcwd() + try: + mozbuild_dir = _make_mozbuild_directory_structure( + mozbuild_path, mozbuild_contents + ) + os.chdir(mozbuild_dir.name) + + mu.remove_file_from_moz_build_file( + file_to_remove, + moz_yaml_dir=moz_yaml_dir, + vendoring_dir=vendoring_dir, + ) + + with open(os.path.join(mozbuild_dir.name, mozbuild_path)) as file: + contents = file.read() + + expected_output = mozbuild_contents.replace(replace_str, "") + if contents != expected_output: + print("File to remove:", file_to_remove) + print("Contents:") + print("-------------------") + print(contents) + print("-------------------") + print("Expected:") + print("-------------------") + print(expected_output) + print("-------------------") + self.assertEqual(contents, expected_output) + finally: + os.chdir(startdir) + + def test_mozbuild_adding(self): + test_vectors = [ + ( + "media/dav1d/moz.build", + SAMPLE_DAV1D_MOZBUILD, + "third_party/dav1d/src/cdf2.c", + "media/dav1d/", + "third-party/dav1d/", + "cdf.c',\n", + "cdf.c',\n '../../third_party/dav1d/src/cdf2.c',\n", + ), + ( + "media/dav1d/moz.build", + SAMPLE_DAV1D_MOZBUILD, + "third_party/dav1d/src/header3.h", + "media/dav1d/", + "third-party/dav1d/", + "header2.h',\n", + "header2.h',\n '../../third_party/dav1d/src/header3.h',\n", + ), + ( + "media/jxl/moz.build", + SAMPLE_JPEGXL_MOZBUILD, + "third_party/jpeg-xl/lib/jxl/alpha2.cc", + "media/jxl/", + "third-party/jpeg-xl/", + 'alpha.cc",\n', + 'alpha.cc",\n "/third_party/jpeg-xl/lib/jxl/alpha2.cc",\n', + ), + ( + "media/jxl/moz.build", + SAMPLE_JPEGXL_MOZBUILD, + "third_party/jpeg-xl/lib/jxl/header3.hpp", + "media/jxl/", + "third-party/jpeg-xl/", + 'header2.h",\n', + 'header2.h",\n "/third_party/jpeg-xl/lib/jxl/header3.hpp",\n', + ), + ] + + for vector in test_vectors: + ( + mozbuild_path, + mozbuild_contents, + file_to_add, + moz_yaml_dir, + vendoring_dir, + search_str, + replace_str, + ) = vector + + startdir = os.getcwd() + try: + mozbuild_dir = _make_mozbuild_directory_structure( + mozbuild_path, mozbuild_contents + ) + os.chdir(mozbuild_dir.name) + + mu.add_file_to_moz_build_file( + file_to_add, moz_yaml_dir=moz_yaml_dir, vendoring_dir=vendoring_dir + ) + + with open(os.path.join(mozbuild_dir.name, mozbuild_path)) as file: + contents = file.read() + + expected_output = mozbuild_contents.replace(search_str, replace_str) + if contents != expected_output: + print("File to add:", file_to_add) + print("Contents:") + print("-------------------") + print(contents) + print("-------------------") + print("Expected:") + print("-------------------") + print(expected_output) + print("-------------------") + self.assertEqual(contents, expected_output) + finally: + os.chdir(startdir) + + # This test is legacy. I'm keeping it around, but new test vectors should be added to the + # non-internal test to exercise the public API. + def test_mozbuild_adding_internal(self): + test_vectors = [ + # ( + # mozbuild_contents + # unnormalized_filename_to_add, + # unnormalized_list_of_files + # expected_output + # ) + ( + SAMPLE_PIXMAN_MOZBUILD, + "pixman-sse2-more.c", + ["pixman-sse2.c"], + SAMPLE_PIXMAN_MOZBUILD.replace( + "SOURCES += ['pixman-sse2.c']", + "SOURCES += ['pixman-sse2-more.c','pixman-sse2.c']", + ), + ), + ( + SAMPLE_PIXMAN_MOZBUILD, + "pixman-trap-more.c", + [ + "pixman-region32.c", + "pixman-solid-fill.c", + "pixman-trap.c", + "pixman-utils.c", + "pixman-x86.c", + "pixman.c", + ], + SAMPLE_PIXMAN_MOZBUILD.replace( + "'pixman-trap.c',", "'pixman-trap-more.c',\n 'pixman-trap.c'," + ), + ), + ( + SAMPLE_PIXMAN_MOZBUILD, + "pixman-arm-neon-asm-more.S", + ["pixman-arm-neon-asm-bilinear.S", "pixman-arm-neon-asm.S"], + SAMPLE_PIXMAN_MOZBUILD.replace( + '"pixman-arm-neon-asm.S"', + '"pixman-arm-neon-asm-more.S",\n "pixman-arm-neon-asm.S"', + ), + ), + ( + SAMPLE_PIXMAN_MOZBUILD, + "pixman-arm-simd-asm-smore.S", + ["pixman-arm-simd-asm-scaled.S", "pixman-arm-simd-asm.S"], + SAMPLE_PIXMAN_MOZBUILD.replace( + "'pixman-arm-simd-asm.S'", + "'pixman-arm-simd-asm-smore.S',\n 'pixman-arm-simd-asm.S'", + ), + ), + ( + SAMPLE_PIXMAN_MOZBUILD, + "pixman-arm-simd-asn.S", + ["pixman-arm-simd-asm-scaled.S", "pixman-arm-simd-asm.S"], + SAMPLE_PIXMAN_MOZBUILD.replace( + "'pixman-arm-simd-asm.S'", + "'pixman-arm-simd-asm.S',\n 'pixman-arm-simd-asn.S'", + ), + ), + ] + + for vector in test_vectors: + ( + mozbuild_contents, + unnormalized_filename_to_add, + unnormalized_list_of_files, + expected_output, + ) = vector + + fd, filename = tempfile.mkstemp(text=True) + os.close(fd) + file = open(filename, mode="w") + file.write(mozbuild_contents) + file.close() + + mu.edit_moz_build_file_to_add_file( + filename, unnormalized_filename_to_add, unnormalized_list_of_files + ) + + with open(filename) as file: + contents = file.read() + os.remove(filename) + + if contents != expected_output: + print("File to add:", unnormalized_filename_to_add) + print("Contents:") + print("-------------------") + print(contents) + print("-------------------") + print("Expected:") + print("-------------------") + print(expected_output) + print("-------------------") + self.assertEqual(contents, expected_output) + + # This test is legacy. I'm keeping it around, but new test vectors should be added to the + # non-internal test to exercise the public API. + def test_mozbuild_removing_internal(self): + test_vectors = [ + # ( + # mozbuild_contents + # unnormalized_filename_to_add + # expected_output + # ) + ( + SAMPLE_PIXMAN_MOZBUILD, + "pixman-sse2.c", + SAMPLE_PIXMAN_MOZBUILD.replace( + "SOURCES += ['pixman-sse2.c']", "SOURCES += []" + ), + ), + ( + SAMPLE_PIXMAN_MOZBUILD, + "pixman-trap.c", + SAMPLE_PIXMAN_MOZBUILD.replace(" 'pixman-trap.c',\n", ""), + ), + ( + SAMPLE_PIXMAN_MOZBUILD, + "pixman-arm-neon-asm.S", + SAMPLE_PIXMAN_MOZBUILD.replace( + ' "pixman-arm-neon-asm.S",\n', "" + ), + ), + ( + SAMPLE_PIXMAN_MOZBUILD, + "pixman-arm-simd-asm.S", + SAMPLE_PIXMAN_MOZBUILD.replace( + " 'pixman-arm-simd-asm.S'", " " + ), + ), + ( + SAMPLE_PIXMAN_MOZBUILD, + "pixman-region32.c", + SAMPLE_PIXMAN_MOZBUILD.replace("'pixman-region32.c',", ""), + ), + ] + + for vector in test_vectors: + ( + mozbuild_contents, + unnormalized_filename_to_remove, + expected_output, + ) = vector + + fd, filename = tempfile.mkstemp(text=True) + os.close(fd) + file = open(filename, mode="w") + file.write(mozbuild_contents) + file.close() + + mu.edit_moz_build_file_to_remove_file( + filename, unnormalized_filename_to_remove + ) + + with open(filename) as file: + contents = file.read() + os.remove(filename) + + if contents != expected_output: + print("File to remove:", unnormalized_filename_to_remove) + print("Contents:") + print("-------------------") + print(contents) + print("-------------------") + print("Expected:") + print("-------------------") + print(expected_output) + print("-------------------") + self.assertEqual(contents, expected_output) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/test_telemetry.py b/python/mozbuild/mozbuild/test/test_telemetry.py new file mode 100644 index 0000000000..894e32ee2d --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_telemetry.py @@ -0,0 +1,102 @@ +# Any copyright is dedicated to the Public Domain. +# http://creativecommons.org/publicdomain/zero/1.0/ + +import os + +import buildconfig +import mozunit + +from mozbuild.telemetry import filter_args + +TELEMETRY_LOAD_ERROR = """ +Error loading telemetry. mach output: +========================================================= +%s +========================================================= +""" + + +def test_path_filtering(): + srcdir_path = os.path.join(buildconfig.topsrcdir, "a") + srcdir_path_2 = os.path.join(buildconfig.topsrcdir, "a/b/c") + objdir_path = os.path.join(buildconfig.topobjdir, "x") + objdir_path_2 = os.path.join(buildconfig.topobjdir, "x/y/z") + home_path = os.path.join(os.path.expanduser("~"), "something_in_home") + other_path = "/other/path" + args = filter_args( + "pass", + [ + "python", + "-c", + "pass", + srcdir_path, + srcdir_path_2, + objdir_path, + objdir_path_2, + home_path, + other_path, + ], + buildconfig.topsrcdir, + buildconfig.topobjdir, + cwd=buildconfig.topsrcdir, + ) + + expected = [ + "a", + "a/b/c", + "$topobjdir/x", + "$topobjdir/x/y/z", + "$HOME/something_in_home", + "", + ] + assert args == expected + + +def test_path_filtering_in_objdir(): + srcdir_path = os.path.join(buildconfig.topsrcdir, "a") + srcdir_path_2 = os.path.join(buildconfig.topsrcdir, "a/b/c") + objdir_path = os.path.join(buildconfig.topobjdir, "x") + objdir_path_2 = os.path.join(buildconfig.topobjdir, "x/y/z") + other_path = "/other/path" + args = filter_args( + "pass", + [ + "python", + "-c", + "pass", + srcdir_path, + srcdir_path_2, + objdir_path, + objdir_path_2, + other_path, + ], + buildconfig.topsrcdir, + buildconfig.topobjdir, + cwd=buildconfig.topobjdir, + ) + expected = ["$topsrcdir/a", "$topsrcdir/a/b/c", "x", "x/y/z", ""] + assert args == expected + + +def test_path_filtering_other_cwd(tmpdir): + srcdir_path = os.path.join(buildconfig.topsrcdir, "a") + srcdir_path_2 = os.path.join(buildconfig.topsrcdir, "a/b/c") + other_path = str(tmpdir.join("other")) + args = filter_args( + "pass", + ["python", "-c", "pass", srcdir_path, srcdir_path_2, other_path], + buildconfig.topsrcdir, + buildconfig.topobjdir, + cwd=str(tmpdir), + ) + expected = [ + "$topsrcdir/a", + "$topsrcdir/a/b/c", + # cwd-relative paths should be relativized + "other", + ] + assert args == expected + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/test_telemetry_settings.py b/python/mozbuild/mozbuild/test/test_telemetry_settings.py new file mode 100644 index 0000000000..2d50141a15 --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_telemetry_settings.py @@ -0,0 +1,174 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +from unittest import mock +from unittest.mock import Mock + +import mozunit +import pytest +import requests +from mach.config import ConfigSettings +from mach.decorators import SettingsProvider +from mach.telemetry import ( + initialize_telemetry_setting, + record_telemetry_settings, + resolve_is_employee, +) + +from mozbuild.settings import TelemetrySettings + + +@SettingsProvider +class OtherSettings: + config_settings = [("foo.bar", "int", "", 1), ("build.abc", "string", "", "")] + + +def record_enabled_telemetry(mozbuild_path, settings): + record_telemetry_settings(settings, mozbuild_path, True) + + +@pytest.fixture +def settings(): + s = ConfigSettings() + s.register_provider(TelemetrySettings) + s.register_provider(OtherSettings) + return s + + +def load_settings_file(mozbuild_path, settings): + settings.load_file(os.path.join(mozbuild_path, "machrc")) + + +def write_config(mozbuild_path, contents): + with open(os.path.join(mozbuild_path, "machrc"), "w") as f: + f.write(contents) + + +def test_nonexistent(tmpdir, settings): + record_enabled_telemetry(tmpdir, settings) + load_settings_file(tmpdir, settings) + assert settings.mach_telemetry.is_enabled + + +def test_file_exists_no_build_section(tmpdir, settings): + write_config( + tmpdir, + """[foo] +bar = 2 +""", + ) + record_enabled_telemetry(tmpdir, settings) + load_settings_file(tmpdir, settings) + assert settings.mach_telemetry.is_enabled + assert settings.foo.bar == 2 + + +def test_existing_build_section(tmpdir, settings): + write_config( + tmpdir, + """[foo] +bar = 2 + +[build] +abc = xyz +""", + ) + record_enabled_telemetry(tmpdir, settings) + load_settings_file(tmpdir, settings) + assert settings.mach_telemetry.is_enabled + assert settings.build.abc == "xyz" + assert settings.foo.bar == 2 + + +def test_malformed_file(tmpdir, settings): + """Ensure that a malformed config file doesn't cause breakage.""" + write_config( + tmpdir, + """[foo +bar = 1 +""", + ) + record_enabled_telemetry(tmpdir, settings) + # Can't load_settings config, it will not have been written! + + +def _initialize_telemetry(settings, is_employee, contributor_prompt_response=None): + with mock.patch( + "mach.telemetry.resolve_is_employee", return_value=is_employee + ), mock.patch( + "mach.telemetry.prompt_telemetry_message_contributor", + return_value=contributor_prompt_response, + ) as prompt_mock, mock.patch( + "subprocess.run", return_value=Mock(returncode=0) + ), mock.patch( + "mach.config.ConfigSettings" + ): + initialize_telemetry_setting(settings, "", "") + return prompt_mock.call_count == 1 + + +def test_initialize_new_contributor_deny_telemetry(settings): + did_prompt = _initialize_telemetry(settings, False, False) + assert did_prompt + assert not settings.mach_telemetry.is_enabled + assert settings.mach_telemetry.is_set_up + assert settings.mach_telemetry.is_done_first_time_setup + + +def test_initialize_new_contributor_allow_telemetry(settings): + did_prompt = _initialize_telemetry(settings, False, True) + assert did_prompt + assert settings.mach_telemetry.is_enabled + assert settings.mach_telemetry.is_set_up + assert settings.mach_telemetry.is_done_first_time_setup + + +def test_initialize_new_employee(settings): + did_prompt = _initialize_telemetry(settings, True) + assert not did_prompt + assert settings.mach_telemetry.is_enabled + assert settings.mach_telemetry.is_set_up + assert settings.mach_telemetry.is_done_first_time_setup + + +def test_initialize_noop_when_telemetry_disabled_env(monkeypatch): + monkeypatch.setenv("DISABLE_TELEMETRY", "1") + with mock.patch("mach.telemetry.record_telemetry_settings") as record_mock: + did_prompt = _initialize_telemetry(None, False) + assert record_mock.call_count == 0 + assert not did_prompt + + +def test_initialize_noop_when_request_error(settings): + with mock.patch( + "mach.telemetry.resolve_is_employee", + side_effect=requests.exceptions.RequestException("Unlucky"), + ), mock.patch("mach.telemetry.record_telemetry_settings") as record_mock: + initialize_telemetry_setting(None, None, None) + assert record_mock.call_count == 0 + + +def test_resolve_is_employee(): + def mock_and_run(is_employee_bugzilla, is_employee_vcs): + with mock.patch( + "mach.telemetry.resolve_is_employee_by_credentials", + return_value=is_employee_bugzilla, + ), mock.patch( + "mach.telemetry.resolve_is_employee_by_vcs", return_value=is_employee_vcs + ): + return resolve_is_employee(None) + + assert not mock_and_run(False, False) + assert not mock_and_run(False, True) + assert not mock_and_run(False, None) + assert mock_and_run(True, False) + assert mock_and_run(True, True) + assert mock_and_run(True, None) + assert not mock_and_run(None, False) + assert mock_and_run(None, True) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/test_util.py b/python/mozbuild/mozbuild/test/test_util.py new file mode 100644 index 0000000000..9931b338b9 --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_util.py @@ -0,0 +1,889 @@ +# coding: utf-8 +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import copy +import hashlib +import itertools +import os +import string +import sys +import unittest + +import pytest +import six +from mozfile.mozfile import NamedTemporaryFile +from mozunit import main + +from mozbuild.util import ( + EnumString, + EnumStringComparisonError, + HierarchicalStringList, + MozbuildDeletionError, + ReadOnlyDict, + StrictOrderingOnAppendList, + StrictOrderingOnAppendListWithAction, + StrictOrderingOnAppendListWithFlagsFactory, + TypedList, + TypedNamedTuple, + UnsortedError, + expand_variables, + group_unified_files, + hash_file, + hexdump, + memoize, + memoized_property, + pair, + resolve_target_to_make, +) + +if sys.version_info[0] == 3: + str_type = "str" +else: + str_type = "unicode" + +data_path = os.path.abspath(os.path.dirname(__file__)) +data_path = os.path.join(data_path, "data") + + +class TestHashing(unittest.TestCase): + def test_hash_file_known_hash(self): + """Ensure a known hash value is recreated.""" + data = b"The quick brown fox jumps over the lazy cog" + expected = "de9f2c7fd25e1b3afad3e85a0bd17d9b100db4b3" + + temp = NamedTemporaryFile() + temp.write(data) + temp.flush() + + actual = hash_file(temp.name) + + self.assertEqual(actual, expected) + + def test_hash_file_large(self): + """Ensure that hash_file seems to work with a large file.""" + data = b"x" * 1048576 + + hasher = hashlib.sha1() + hasher.update(data) + expected = hasher.hexdigest() + + temp = NamedTemporaryFile() + temp.write(data) + temp.flush() + + actual = hash_file(temp.name) + + self.assertEqual(actual, expected) + + +class TestResolveTargetToMake(unittest.TestCase): + def setUp(self): + self.topobjdir = data_path + + def assertResolve(self, path, expected): + # Handle Windows path separators. + (reldir, target) = resolve_target_to_make(self.topobjdir, path) + if reldir is not None: + reldir = reldir.replace(os.sep, "/") + if target is not None: + target = target.replace(os.sep, "/") + self.assertEqual((reldir, target), expected) + + def test_root_path(self): + self.assertResolve("/test-dir", ("test-dir", None)) + self.assertResolve("/test-dir/with", ("test-dir/with", None)) + self.assertResolve("/test-dir/without", ("test-dir", None)) + self.assertResolve("/test-dir/without/with", ("test-dir/without/with", None)) + + def test_dir(self): + self.assertResolve("test-dir", ("test-dir", None)) + self.assertResolve("test-dir/with", ("test-dir/with", None)) + self.assertResolve("test-dir/with", ("test-dir/with", None)) + self.assertResolve("test-dir/without", ("test-dir", None)) + self.assertResolve("test-dir/without/with", ("test-dir/without/with", None)) + + def test_top_level(self): + self.assertResolve("package", (None, "package")) + # Makefile handling shouldn't affect top-level targets. + self.assertResolve("Makefile", (None, "Makefile")) + + def test_regular_file(self): + self.assertResolve("test-dir/with/file", ("test-dir/with", "file")) + self.assertResolve( + "test-dir/with/without/file", ("test-dir/with", "without/file") + ) + self.assertResolve( + "test-dir/with/without/with/file", ("test-dir/with/without/with", "file") + ) + + self.assertResolve("test-dir/without/file", ("test-dir", "without/file")) + self.assertResolve( + "test-dir/without/with/file", ("test-dir/without/with", "file") + ) + self.assertResolve( + "test-dir/without/with/without/file", + ("test-dir/without/with", "without/file"), + ) + + def test_Makefile(self): + self.assertResolve("test-dir/with/Makefile", ("test-dir", "with/Makefile")) + self.assertResolve( + "test-dir/with/without/Makefile", ("test-dir/with", "without/Makefile") + ) + self.assertResolve( + "test-dir/with/without/with/Makefile", + ("test-dir/with", "without/with/Makefile"), + ) + + self.assertResolve( + "test-dir/without/Makefile", ("test-dir", "without/Makefile") + ) + self.assertResolve( + "test-dir/without/with/Makefile", ("test-dir", "without/with/Makefile") + ) + self.assertResolve( + "test-dir/without/with/without/Makefile", + ("test-dir/without/with", "without/Makefile"), + ) + + +class TestHierarchicalStringList(unittest.TestCase): + def setUp(self): + self.EXPORTS = HierarchicalStringList() + + def test_exports_append(self): + self.assertEqual(self.EXPORTS._strings, []) + self.EXPORTS += ["foo.h"] + self.assertEqual(self.EXPORTS._strings, ["foo.h"]) + self.EXPORTS += ["bar.h"] + self.assertEqual(self.EXPORTS._strings, ["foo.h", "bar.h"]) + + def test_exports_subdir(self): + self.assertEqual(self.EXPORTS._children, {}) + self.EXPORTS.foo += ["foo.h"] + six.assertCountEqual(self, self.EXPORTS._children, {"foo": True}) + self.assertEqual(self.EXPORTS.foo._strings, ["foo.h"]) + self.EXPORTS.bar += ["bar.h"] + six.assertCountEqual(self, self.EXPORTS._children, {"foo": True, "bar": True}) + self.assertEqual(self.EXPORTS.foo._strings, ["foo.h"]) + self.assertEqual(self.EXPORTS.bar._strings, ["bar.h"]) + + def test_exports_multiple_subdir(self): + self.EXPORTS.foo.bar = ["foobar.h"] + six.assertCountEqual(self, self.EXPORTS._children, {"foo": True}) + six.assertCountEqual(self, self.EXPORTS.foo._children, {"bar": True}) + six.assertCountEqual(self, self.EXPORTS.foo.bar._children, {}) + self.assertEqual(self.EXPORTS._strings, []) + self.assertEqual(self.EXPORTS.foo._strings, []) + self.assertEqual(self.EXPORTS.foo.bar._strings, ["foobar.h"]) + + def test_invalid_exports_append(self): + with self.assertRaises(ValueError) as ve: + self.EXPORTS += "foo.h" + six.assertRegex( + self, + str(ve.exception), + "Expected a list of strings, not <(?:type|class) '%s'>" % str_type, + ) + + def test_invalid_exports_set(self): + with self.assertRaises(ValueError) as ve: + self.EXPORTS.foo = "foo.h" + + six.assertRegex( + self, + str(ve.exception), + "Expected a list of strings, not <(?:type|class) '%s'>" % str_type, + ) + + def test_invalid_exports_append_base(self): + with self.assertRaises(ValueError) as ve: + self.EXPORTS += "foo.h" + + six.assertRegex( + self, + str(ve.exception), + "Expected a list of strings, not <(?:type|class) '%s'>" % str_type, + ) + + def test_invalid_exports_bool(self): + with self.assertRaises(ValueError) as ve: + self.EXPORTS += [True] + + six.assertRegex( + self, + str(ve.exception), + "Expected a list of strings, not an element of " "<(?:type|class) 'bool'>", + ) + + def test_del_exports(self): + with self.assertRaises(MozbuildDeletionError): + self.EXPORTS.foo += ["bar.h"] + del self.EXPORTS.foo + + def test_unsorted(self): + with self.assertRaises(UnsortedError): + self.EXPORTS += ["foo.h", "bar.h"] + + with self.assertRaises(UnsortedError): + self.EXPORTS.foo = ["foo.h", "bar.h"] + + with self.assertRaises(UnsortedError): + self.EXPORTS.foo += ["foo.h", "bar.h"] + + def test_reassign(self): + self.EXPORTS.foo = ["foo.h"] + + with self.assertRaises(KeyError): + self.EXPORTS.foo = ["bar.h"] + + def test_walk(self): + l = HierarchicalStringList() + l += ["root1", "root2", "root3"] + l.child1 += ["child11", "child12", "child13"] + l.child1.grandchild1 += ["grandchild111", "grandchild112"] + l.child1.grandchild2 += ["grandchild121", "grandchild122"] + l.child2.grandchild1 += ["grandchild211", "grandchild212"] + l.child2.grandchild1 += ["grandchild213", "grandchild214"] + + els = list((path, list(seq)) for path, seq in l.walk()) + self.assertEqual( + els, + [ + ("", ["root1", "root2", "root3"]), + ("child1", ["child11", "child12", "child13"]), + ("child1/grandchild1", ["grandchild111", "grandchild112"]), + ("child1/grandchild2", ["grandchild121", "grandchild122"]), + ( + "child2/grandchild1", + [ + "grandchild211", + "grandchild212", + "grandchild213", + "grandchild214", + ], + ), + ], + ) + + def test_merge(self): + l1 = HierarchicalStringList() + l1 += ["root1", "root2", "root3"] + l1.child1 += ["child11", "child12", "child13"] + l1.child1.grandchild1 += ["grandchild111", "grandchild112"] + l1.child1.grandchild2 += ["grandchild121", "grandchild122"] + l1.child2.grandchild1 += ["grandchild211", "grandchild212"] + l1.child2.grandchild1 += ["grandchild213", "grandchild214"] + l2 = HierarchicalStringList() + l2.child1 += ["child14", "child15"] + l2.child1.grandchild2 += ["grandchild123"] + l2.child3 += ["child31", "child32"] + + l1 += l2 + els = list((path, list(seq)) for path, seq in l1.walk()) + self.assertEqual( + els, + [ + ("", ["root1", "root2", "root3"]), + ("child1", ["child11", "child12", "child13", "child14", "child15"]), + ("child1/grandchild1", ["grandchild111", "grandchild112"]), + ( + "child1/grandchild2", + ["grandchild121", "grandchild122", "grandchild123"], + ), + ( + "child2/grandchild1", + [ + "grandchild211", + "grandchild212", + "grandchild213", + "grandchild214", + ], + ), + ("child3", ["child31", "child32"]), + ], + ) + + +class TestStrictOrderingOnAppendList(unittest.TestCase): + def test_init(self): + l = StrictOrderingOnAppendList() + self.assertEqual(len(l), 0) + + l = StrictOrderingOnAppendList(["a", "b", "c"]) + self.assertEqual(len(l), 3) + + with self.assertRaises(UnsortedError): + StrictOrderingOnAppendList(["c", "b", "a"]) + + self.assertEqual(len(l), 3) + + def test_extend(self): + l = StrictOrderingOnAppendList() + l.extend(["a", "b"]) + self.assertEqual(len(l), 2) + self.assertIsInstance(l, StrictOrderingOnAppendList) + + with self.assertRaises(UnsortedError): + l.extend(["d", "c"]) + + self.assertEqual(len(l), 2) + + def test_slicing(self): + l = StrictOrderingOnAppendList() + l[:] = ["a", "b"] + self.assertEqual(len(l), 2) + self.assertIsInstance(l, StrictOrderingOnAppendList) + + with self.assertRaises(UnsortedError): + l[:] = ["b", "a"] + + self.assertEqual(len(l), 2) + + def test_add(self): + l = StrictOrderingOnAppendList() + l2 = l + ["a", "b"] + self.assertEqual(len(l), 0) + self.assertEqual(len(l2), 2) + self.assertIsInstance(l2, StrictOrderingOnAppendList) + + with self.assertRaises(UnsortedError): + l2 = l + ["b", "a"] + + self.assertEqual(len(l), 0) + + def test_iadd(self): + l = StrictOrderingOnAppendList() + l += ["a", "b"] + self.assertEqual(len(l), 2) + self.assertIsInstance(l, StrictOrderingOnAppendList) + + with self.assertRaises(UnsortedError): + l += ["b", "a"] + + self.assertEqual(len(l), 2) + + def test_add_after_iadd(self): + l = StrictOrderingOnAppendList(["b"]) + l += ["a"] + l2 = l + ["c", "d"] + self.assertEqual(len(l), 2) + self.assertEqual(len(l2), 4) + self.assertIsInstance(l2, StrictOrderingOnAppendList) + with self.assertRaises(UnsortedError): + l2 = l + ["d", "c"] + + self.assertEqual(len(l), 2) + + def test_add_StrictOrderingOnAppendList(self): + l = StrictOrderingOnAppendList() + l += ["c", "d"] + l += ["a", "b"] + l2 = StrictOrderingOnAppendList() + with self.assertRaises(UnsortedError): + l2 += list(l) + # Adding a StrictOrderingOnAppendList to another shouldn't throw + l2 += l + + +class TestStrictOrderingOnAppendListWithAction(unittest.TestCase): + def setUp(self): + self.action = lambda a: (a, id(a)) + + def assertSameList(self, expected, actual): + self.assertEqual(len(expected), len(actual)) + for idx, item in enumerate(actual): + self.assertEqual(item, expected[idx]) + + def test_init(self): + l = StrictOrderingOnAppendListWithAction(action=self.action) + self.assertEqual(len(l), 0) + original = ["a", "b", "c"] + l = StrictOrderingOnAppendListWithAction(["a", "b", "c"], action=self.action) + expected = [self.action(i) for i in original] + self.assertSameList(expected, l) + + with self.assertRaises(ValueError): + StrictOrderingOnAppendListWithAction("abc", action=self.action) + + with self.assertRaises(ValueError): + StrictOrderingOnAppendListWithAction() + + def test_extend(self): + l = StrictOrderingOnAppendListWithAction(action=self.action) + original = ["a", "b"] + l.extend(original) + expected = [self.action(i) for i in original] + self.assertSameList(expected, l) + + with self.assertRaises(ValueError): + l.extend("ab") + + def test_slicing(self): + l = StrictOrderingOnAppendListWithAction(action=self.action) + original = ["a", "b"] + l[:] = original + expected = [self.action(i) for i in original] + self.assertSameList(expected, l) + + with self.assertRaises(ValueError): + l[:] = "ab" + + def test_add(self): + l = StrictOrderingOnAppendListWithAction(action=self.action) + original = ["a", "b"] + l2 = l + original + expected = [self.action(i) for i in original] + self.assertSameList(expected, l2) + + with self.assertRaises(ValueError): + l + "abc" + + def test_iadd(self): + l = StrictOrderingOnAppendListWithAction(action=self.action) + original = ["a", "b"] + l += original + expected = [self.action(i) for i in original] + self.assertSameList(expected, l) + + with self.assertRaises(ValueError): + l += "abc" + + +class TestStrictOrderingOnAppendListWithFlagsFactory(unittest.TestCase): + def test_strict_ordering_on_append_list_with_flags_factory(self): + cls = StrictOrderingOnAppendListWithFlagsFactory( + { + "foo": bool, + "bar": int, + } + ) + + l = cls() + l += ["a", "b"] + + with self.assertRaises(Exception): + l["a"] = "foo" + + with self.assertRaises(Exception): + l["c"] + + self.assertEqual(l["a"].foo, False) + l["a"].foo = True + self.assertEqual(l["a"].foo, True) + + with self.assertRaises(TypeError): + l["a"].bar = "bar" + + self.assertEqual(l["a"].bar, 0) + l["a"].bar = 42 + self.assertEqual(l["a"].bar, 42) + + l["b"].foo = True + self.assertEqual(l["b"].foo, True) + + with self.assertRaises(AttributeError): + l["b"].baz = False + + l["b"].update(foo=False, bar=12) + self.assertEqual(l["b"].foo, False) + self.assertEqual(l["b"].bar, 12) + + with self.assertRaises(AttributeError): + l["b"].update(xyz=1) + + def test_strict_ordering_on_append_list_with_flags_factory_extend(self): + FooList = StrictOrderingOnAppendListWithFlagsFactory( + {"foo": bool, "bar": six.text_type} + ) + foo = FooList(["a", "b", "c"]) + foo["a"].foo = True + foo["b"].bar = "bar" + + # Don't allow extending lists with different flag definitions. + BarList = StrictOrderingOnAppendListWithFlagsFactory( + {"foo": six.text_type, "baz": bool} + ) + bar = BarList(["d", "e", "f"]) + bar["d"].foo = "foo" + bar["e"].baz = True + with self.assertRaises(ValueError): + foo + bar + with self.assertRaises(ValueError): + bar + foo + + # It's not obvious what to do with duplicate list items with possibly + # different flag values, so don't allow that case. + with self.assertRaises(ValueError): + foo + foo + + def assertExtended(l): + self.assertEqual(len(l), 6) + self.assertEqual(l["a"].foo, True) + self.assertEqual(l["b"].bar, "bar") + self.assertTrue("c" in l) + self.assertEqual(l["d"].foo, True) + self.assertEqual(l["e"].bar, "bar") + self.assertTrue("f" in l) + + # Test extend. + zot = FooList(["d", "e", "f"]) + zot["d"].foo = True + zot["e"].bar = "bar" + zot.extend(foo) + assertExtended(zot) + + # Test __add__. + zot = FooList(["d", "e", "f"]) + zot["d"].foo = True + zot["e"].bar = "bar" + assertExtended(foo + zot) + assertExtended(zot + foo) + + # Test __iadd__. + foo += zot + assertExtended(foo) + + # Test __setitem__. + foo[3:] = [] + self.assertEqual(len(foo), 3) + foo[3:] = zot + assertExtended(foo) + + +class TestMemoize(unittest.TestCase): + def test_memoize(self): + self._count = 0 + + @memoize + def wrapped(a, b): + self._count += 1 + return a + b + + self.assertEqual(self._count, 0) + self.assertEqual(wrapped(1, 1), 2) + self.assertEqual(self._count, 1) + self.assertEqual(wrapped(1, 1), 2) + self.assertEqual(self._count, 1) + self.assertEqual(wrapped(2, 1), 3) + self.assertEqual(self._count, 2) + self.assertEqual(wrapped(1, 2), 3) + self.assertEqual(self._count, 3) + self.assertEqual(wrapped(1, 2), 3) + self.assertEqual(self._count, 3) + self.assertEqual(wrapped(1, 1), 2) + self.assertEqual(self._count, 3) + + def test_memoize_method(self): + class foo(object): + def __init__(self): + self._count = 0 + + @memoize + def wrapped(self, a, b): + self._count += 1 + return a + b + + instance = foo() + refcount = sys.getrefcount(instance) + self.assertEqual(instance._count, 0) + self.assertEqual(instance.wrapped(1, 1), 2) + self.assertEqual(instance._count, 1) + self.assertEqual(instance.wrapped(1, 1), 2) + self.assertEqual(instance._count, 1) + self.assertEqual(instance.wrapped(2, 1), 3) + self.assertEqual(instance._count, 2) + self.assertEqual(instance.wrapped(1, 2), 3) + self.assertEqual(instance._count, 3) + self.assertEqual(instance.wrapped(1, 2), 3) + self.assertEqual(instance._count, 3) + self.assertEqual(instance.wrapped(1, 1), 2) + self.assertEqual(instance._count, 3) + + # Memoization of methods is expected to not keep references to + # instances, so the refcount shouldn't have changed after executing the + # memoized method. + self.assertEqual(refcount, sys.getrefcount(instance)) + + def test_memoized_property(self): + class foo(object): + def __init__(self): + self._count = 0 + + @memoized_property + def wrapped(self): + self._count += 1 + return 42 + + instance = foo() + self.assertEqual(instance._count, 0) + self.assertEqual(instance.wrapped, 42) + self.assertEqual(instance._count, 1) + self.assertEqual(instance.wrapped, 42) + self.assertEqual(instance._count, 1) + + +class TestTypedList(unittest.TestCase): + def test_init(self): + cls = TypedList(int) + l = cls() + self.assertEqual(len(l), 0) + + l = cls([1, 2, 3]) + self.assertEqual(len(l), 3) + + with self.assertRaises(ValueError): + cls([1, 2, "c"]) + + def test_extend(self): + cls = TypedList(int) + l = cls() + l.extend([1, 2]) + self.assertEqual(len(l), 2) + self.assertIsInstance(l, cls) + + with self.assertRaises(ValueError): + l.extend([3, "c"]) + + self.assertEqual(len(l), 2) + + def test_slicing(self): + cls = TypedList(int) + l = cls() + l[:] = [1, 2] + self.assertEqual(len(l), 2) + self.assertIsInstance(l, cls) + + with self.assertRaises(ValueError): + l[:] = [3, "c"] + + self.assertEqual(len(l), 2) + + def test_add(self): + cls = TypedList(int) + l = cls() + l2 = l + [1, 2] + self.assertEqual(len(l), 0) + self.assertEqual(len(l2), 2) + self.assertIsInstance(l2, cls) + + with self.assertRaises(ValueError): + l2 = l + [3, "c"] + + self.assertEqual(len(l), 0) + + def test_iadd(self): + cls = TypedList(int) + l = cls() + l += [1, 2] + self.assertEqual(len(l), 2) + self.assertIsInstance(l, cls) + + with self.assertRaises(ValueError): + l += [3, "c"] + + self.assertEqual(len(l), 2) + + def test_add_coercion(self): + objs = [] + + class Foo(object): + def __init__(self, obj): + objs.append(obj) + + cls = TypedList(Foo) + l = cls() + l += [1, 2] + self.assertEqual(len(objs), 2) + self.assertEqual(type(l[0]), Foo) + self.assertEqual(type(l[1]), Foo) + + # Adding a TypedList to a TypedList shouldn't trigger coercion again + l2 = cls() + l2 += l + self.assertEqual(len(objs), 2) + self.assertEqual(type(l2[0]), Foo) + self.assertEqual(type(l2[1]), Foo) + + # Adding a TypedList to a TypedList shouldn't even trigger the code + # that does coercion at all. + l2 = cls() + list.__setitem__(l, slice(0, -1), [1, 2]) + l2 += l + self.assertEqual(len(objs), 2) + self.assertEqual(type(l2[0]), int) + self.assertEqual(type(l2[1]), int) + + def test_memoized(self): + cls = TypedList(int) + cls2 = TypedList(str) + self.assertEqual(TypedList(int), cls) + self.assertNotEqual(cls, cls2) + + +class TypedTestStrictOrderingOnAppendList(unittest.TestCase): + def test_init(self): + class Unicode(six.text_type): + def __new__(cls, other): + if not isinstance(other, six.text_type): + raise ValueError() + return six.text_type.__new__(cls, other) + + cls = TypedList(Unicode, StrictOrderingOnAppendList) + l = cls() + self.assertEqual(len(l), 0) + + l = cls(["a", "b", "c"]) + self.assertEqual(len(l), 3) + + with self.assertRaises(UnsortedError): + cls(["c", "b", "a"]) + + with self.assertRaises(ValueError): + cls(["a", "b", 3]) + + self.assertEqual(len(l), 3) + + +class TestTypedNamedTuple(unittest.TestCase): + def test_simple(self): + FooBar = TypedNamedTuple("FooBar", [("foo", six.text_type), ("bar", int)]) + + t = FooBar(foo="foo", bar=2) + self.assertEqual(type(t), FooBar) + self.assertEqual(t.foo, "foo") + self.assertEqual(t.bar, 2) + self.assertEqual(t[0], "foo") + self.assertEqual(t[1], 2) + + FooBar("foo", 2) + + with self.assertRaises(TypeError): + FooBar("foo", "not integer") + with self.assertRaises(TypeError): + FooBar(2, 4) + + # Passing a tuple as the first argument is the same as passing multiple + # arguments. + t1 = ("foo", 3) + t2 = FooBar(t1) + self.assertEqual(type(t2), FooBar) + self.assertEqual(FooBar(t1), FooBar("foo", 3)) + + +class TestGroupUnifiedFiles(unittest.TestCase): + FILES = ["%s.cpp" % letter for letter in string.ascii_lowercase] + + def test_multiple_files(self): + mapping = list(group_unified_files(self.FILES, "Unified", "cpp", 5)) + + def check_mapping(index, expected_num_source_files): + (unified_file, source_files) = mapping[index] + + self.assertEqual(unified_file, "Unified%d.cpp" % index) + self.assertEqual(len(source_files), expected_num_source_files) + + all_files = list(itertools.chain(*[files for (_, files) in mapping])) + self.assertEqual(len(all_files), len(self.FILES)) + self.assertEqual(set(all_files), set(self.FILES)) + + expected_amounts = [5, 5, 5, 5, 5, 1] + for i, amount in enumerate(expected_amounts): + check_mapping(i, amount) + + +class TestMisc(unittest.TestCase): + def test_pair(self): + self.assertEqual(list(pair([1, 2, 3, 4, 5, 6])), [(1, 2), (3, 4), (5, 6)]) + + self.assertEqual( + list(pair([1, 2, 3, 4, 5, 6, 7])), [(1, 2), (3, 4), (5, 6), (7, None)] + ) + + def test_expand_variables(self): + self.assertEqual(expand_variables("$(var)", {"var": "value"}), "value") + + self.assertEqual( + expand_variables("$(a) and $(b)", {"a": "1", "b": "2"}), "1 and 2" + ) + + self.assertEqual( + expand_variables("$(a) and $(undefined)", {"a": "1", "b": "2"}), "1 and " + ) + + self.assertEqual( + expand_variables( + "before $(string) between $(list) after", + {"string": "abc", "list": ["a", "b", "c"]}, + ), + "before abc between a b c after", + ) + + +class TestEnumString(unittest.TestCase): + def test_string(self): + CompilerType = EnumString.subclass("gcc", "clang", "clang-cl") + + type = CompilerType("gcc") + self.assertEqual(type, "gcc") + self.assertNotEqual(type, "clang") + self.assertNotEqual(type, "clang-cl") + self.assertIn(type, ("gcc", "clang-cl")) + self.assertNotIn(type, ("clang", "clang-cl")) + + with self.assertRaises(EnumStringComparisonError): + self.assertEqual(type, "foo") + + with self.assertRaises(EnumStringComparisonError): + self.assertNotEqual(type, "foo") + + with self.assertRaises(EnumStringComparisonError): + self.assertIn(type, ("foo", "gcc")) + + with self.assertRaises(ValueError): + type = CompilerType("foo") + + +class TestHexDump(unittest.TestCase): + @unittest.skipUnless(six.PY3, "requires Python 3") + def test_hexdump(self): + self.assertEqual( + hexdump("abcdef123💩ZYXWVU".encode("utf-8")), + [ + "00 61 62 63 64 65 66 31 32 33 f0 9f 92 a9 5a 59 58 |abcdef123....ZYX|\n", + "10 57 56 55 |WVU |\n", + ], + ) + + +def test_read_only_dict(): + d = ReadOnlyDict(foo="bar") + with pytest.raises(Exception): + d["foo"] = "baz" + + with pytest.raises(Exception): + d.update({"foo": "baz"}) + + with pytest.raises(Exception): + del d["foo"] + + # ensure copy still works + d_copy = d.copy() + assert d == d_copy + # TODO Returning a dict here feels like a bug, but there are places in-tree + # relying on this behaviour. + assert isinstance(d_copy, dict) + + d_copy = copy.copy(d) + assert d == d_copy + assert isinstance(d_copy, ReadOnlyDict) + + d_copy = copy.deepcopy(d) + assert d == d_copy + assert isinstance(d_copy, ReadOnlyDict) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/test_util_fileavoidwrite.py b/python/mozbuild/mozbuild/test/test_util_fileavoidwrite.py new file mode 100644 index 0000000000..38c8941562 --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_util_fileavoidwrite.py @@ -0,0 +1,110 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +"""Tests for the FileAvoidWrite object.""" + +import locale +import pathlib + +import pytest +from mozunit import main + +from mozbuild.util import FileAvoidWrite + + +@pytest.fixture +def tmp_path(tmpdir): + """Backport of the tmp_path fixture from pytest 3.9.1.""" + return pathlib.Path(str(tmpdir)) + + +def test_overwrite_contents(tmp_path): + file = tmp_path / "file.txt" + file.write_text("abc") + + faw = FileAvoidWrite(str(file)) + faw.write("bazqux") + + assert faw.close() == (True, True) + assert file.read_text() == "bazqux" + + +def test_store_new_contents(tmp_path): + file = tmp_path / "file.txt" + + faw = FileAvoidWrite(str(file)) + faw.write("content") + + assert faw.close() == (False, True) + assert file.read_text() == "content" + + +def test_change_binary_file_contents(tmp_path): + file = tmp_path / "file.dat" + file.write_bytes(b"\0") + + faw = FileAvoidWrite(str(file), readmode="rb") + faw.write(b"\0\0\0") + + assert faw.close() == (True, True) + assert file.read_bytes() == b"\0\0\0" + + +def test_obj_as_context_manager(tmp_path): + file = tmp_path / "file.txt" + + with FileAvoidWrite(str(file)) as fh: + fh.write("foobar") + + assert file.read_text() == "foobar" + + +def test_no_write_happens_if_file_contents_same(tmp_path): + file = tmp_path / "file.txt" + file.write_text("content") + original_write_time = file.stat().st_mtime + + faw = FileAvoidWrite(str(file)) + faw.write("content") + + assert faw.close() == (True, False) + assert file.stat().st_mtime == original_write_time + + +def test_diff_not_created_by_default(tmp_path): + file = tmp_path / "file.txt" + faw = FileAvoidWrite(str(file)) + faw.write("dummy") + faw.close() + assert faw.diff is None + + +def test_diff_update(tmp_path): + file = tmp_path / "diffable.txt" + file.write_text("old") + + faw = FileAvoidWrite(str(file), capture_diff=True) + faw.write("new") + faw.close() + + diff = "\n".join(faw.diff) + assert "-old" in diff + assert "+new" in diff + + +@pytest.mark.skipif( + locale.getdefaultlocale()[1] == "cp1252", + reason="Fails on win32 terminals with cp1252 encoding", +) +def test_write_unicode(tmp_path): + # Unicode grinning face :D + binary_emoji = b"\xf0\x9f\x98\x80" + + file = tmp_path / "file.dat" + faw = FileAvoidWrite(str(file)) + faw.write(binary_emoji) + faw.close() + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozbuild/test/test_vendor.py b/python/mozbuild/mozbuild/test/test_vendor.py new file mode 100644 index 0000000000..07ba088337 --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_vendor.py @@ -0,0 +1,48 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import shutil +import subprocess +import tempfile +from unittest.mock import Mock + +import mozunit +from buildconfig import topsrcdir + +from mozbuild.vendor.vendor_python import VendorPython + + +def test_up_to_date_vendor(): + with tempfile.TemporaryDirectory() as work_dir: + subprocess.check_call(["hg", "init", work_dir]) + os.makedirs(os.path.join(work_dir, "third_party")) + shutil.copytree( + os.path.join(topsrcdir, os.path.join("third_party", "python")), + os.path.join(work_dir, os.path.join("third_party", "python")), + ) + + # Run the vendoring process + vendor = VendorPython( + work_dir, None, Mock(), topobjdir=os.path.join(work_dir, "obj") + ) + vendor.vendor() + + # Verify that re-vendoring did not cause file changes. + # Note that we don't want hg-ignored generated files + # to bust the diff, so we exclude them (pycache, egg-info). + subprocess.check_call( + [ + "diff", + "-r", + os.path.join(topsrcdir, os.path.join("third_party", "python")), + os.path.join(work_dir, os.path.join("third_party", "python")), + "--exclude=__pycache__", + "--strip-trailing-cr", + ] + ) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/test_vendor_tools.py b/python/mozbuild/mozbuild/test/test_vendor_tools.py new file mode 100644 index 0000000000..271be6d7da --- /dev/null +++ b/python/mozbuild/mozbuild/test/test_vendor_tools.py @@ -0,0 +1,90 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import mozunit + +from mozbuild.vendor.vendor_manifest import list_of_paths_to_readable_string + + +def test_list_of_paths_to_readable_string(): + paths = ["/tmp/a", "/tmp/b"] + s = list_of_paths_to_readable_string(paths) + assert not s.endswith(", ]") + assert s.endswith("]") + assert "/tmp/a" in s + assert "/tmp/b" in s + + paths = ["/tmp/a", "/tmp/b", "/tmp/c", "/tmp/d"] + s = list_of_paths_to_readable_string(paths) + assert not s.endswith(", ") + assert s.endswith("]") + assert "/tmp/a" not in s + assert "/tmp/b" not in s + assert "4 items in /tmp" in s + + paths = [ + "/tmp/a", + "/tmp/b", + "/tmp/c", + "/tmp/d", + "/tmp/d", + "/tmp/d", + "/tmp/d", + "/tmp/d", + "/tmp/d", + "/tmp/d", + ] + s = list_of_paths_to_readable_string(paths) + assert not s.endswith(", ") + assert s.endswith("]") + assert "/tmp/a" not in s + assert " a" not in s + assert "/tmp/b" not in s + assert "10 (omitted) items in /tmp" in s + + paths = ["/tmp", "/foo"] + s = list_of_paths_to_readable_string(paths) + assert not s.endswith(", ") + assert s.endswith("]") + assert "/tmp" in s + assert "/foo" in s + + paths = [ + "/tmp/a", + "/tmp/b", + "/tmp/c", + "/tmp/d", + "/tmp/d", + "/tmp/d", + "/tmp/d", + "/tmp/d", + "/tmp/d", + "/tmp/d", + ] + paths.extend(["/foo/w", "/foo/x", "/foo/y", "/foo/z"]) + paths.extend(["/bar/m", "/bar/n"]) + paths.extend(["/etc"]) + s = list_of_paths_to_readable_string(paths) + assert not s.endswith(", ") + assert s.endswith("]") + assert "/tmp/a" not in s + assert " d" not in s + assert "/tmp/b" not in s + assert "10 (omitted) items in /tmp" in s + + assert "/foo/w" not in s + assert "/foo/x" not in s + assert "4 items in /foo" in s + assert " w" in s + + assert "/bar/m" in s + assert "/bar/n" in s + + assert "/etc" in s + + assert len(s) < len(str(paths)) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozbuild/test/vendor_requirements.in b/python/mozbuild/mozbuild/test/vendor_requirements.in new file mode 100644 index 0000000000..852826fc1a --- /dev/null +++ b/python/mozbuild/mozbuild/test/vendor_requirements.in @@ -0,0 +1,5 @@ +# Until bug 1724273 lands, python-testing code that uses a site is not possible. Work around +# this by representing the "vendor" site's dependency as a separate "requirements.txt" file, +# which can be used by python-test's "requirements" feature. +poetry==1.4 +poetry-core==1.5.1 diff --git a/python/mozbuild/mozbuild/test/vendor_requirements.txt b/python/mozbuild/mozbuild/test/vendor_requirements.txt new file mode 100644 index 0000000000..10a32a524c --- /dev/null +++ b/python/mozbuild/mozbuild/test/vendor_requirements.txt @@ -0,0 +1,416 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --generate-hashes --output-file=python/mozbuild/mozbuild/test/vendor_requirements.txt python/mozbuild/mozbuild/test/vendor_requirements.in +# +appdirs==1.4.4 \ + --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41 \ + --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 + # via virtualenv +attrs==22.2.0 \ + --hash=sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836 \ + --hash=sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99 + # via jsonschema +build==0.10.0 \ + --hash=sha256:af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171 \ + --hash=sha256:d5b71264afdb5951d6704482aac78de887c80691c52b88a9ad195983ca2c9269 + # via poetry +cachecontrol[filecache]==0.12.10 \ + --hash=sha256:b0d43d8f71948ef5ebdee5fe236b86c6ffc7799370453dccb0e894c20dfa487c \ + --hash=sha256:d8aca75b82eec92d84b5d6eb8c8f66ea16f09d2adb09dbca27fe2d5fc8d3732d + # via poetry +certifi==2021.10.8 \ + --hash=sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872 \ + --hash=sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569 + # via requests +charset-normalizer==2.0.12 \ + --hash=sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597 \ + --hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df + # via requests +cleo==2.0.1 \ + --hash=sha256:6eb133670a3ed1f3b052d53789017b6e50fca66d1287e6e6696285f4cb8ea448 \ + --hash=sha256:eb4b2e1f3063c11085cebe489a6e9124163c226575a3c3be69b2e51af4a15ec5 + # via poetry +colorama==0.4.6 \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 + # via build +crashtest==0.4.1 \ + --hash=sha256:80d7b1f316ebfbd429f648076d6275c877ba30ba48979de4191714a75266f0ce \ + --hash=sha256:8d23eac5fa660409f57472e3851dab7ac18aba459a8d19cbbba86d3d5aecd2a5 + # via + # cleo + # poetry +distlib==0.3.4 \ + --hash=sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b \ + --hash=sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579 + # via virtualenv +dulwich==0.21.3 \ + --hash=sha256:026427b5ef0f1fe138ed22078e49b00175b58b11e5c18e2be00f06ee0782603b \ + --hash=sha256:03ed9448f2944166e28aa8d3f4c8feeceb5c6880e9ffe5ab274869d45abd9589 \ + --hash=sha256:058aaba18aefe18fcd84b216fd34d032ad453967dcf3dee263278951cd43e2d4 \ + --hash=sha256:075c8e9d2694ff16fc6e8a5ec0c771b7c33be12e4ebecc346fd74315d3d84605 \ + --hash=sha256:08ee426b609dab552839b5c7394ae9af2112c164bb727b7f85a69980eced9251 \ + --hash=sha256:092829f27a2c87cdf6b6523216822859ecf01d281ddfae0e58cad1f44adafff6 \ + --hash=sha256:0b541bd58426a30753ab12cc024ba29b6699d197d9d0d9f130b9768ab20e0e6a \ + --hash=sha256:0cd83f84e58aa59fb9d85cf15e74be83a5be876ac5876d5030f60fcce7ab36f1 \ + --hash=sha256:1799c04bd53ec404ebd2c82c1d66197a31e5f0549c95348bb7d3f57a28c94241 \ + --hash=sha256:1cf246530b8d574b33a9614da76881b96c190c0fe78f76ab016c88082c0da051 \ + --hash=sha256:208d01a9cda1bae16c92e8c54e806701a16969346aba44b8d6921c6c227277a9 \ + --hash=sha256:21ee962211839bb6e52d41f363ce9dbb0638d341a1c02263e163d69012f58b25 \ + --hash=sha256:250ec581682af846cb85844f8032b7642dd278006b1c3abd5e8e718eba0b1b00 \ + --hash=sha256:25376efc6ea2ee9daa868a120d4f9c905dcb7774f68931be921fba41a657f58a \ + --hash=sha256:2bf2be68fddfc0adfe43be99ab31f6b0f16b9ef1e40464679ba831ff615ad4a3 \ + --hash=sha256:33f73e8f902c6397cc73a727db1f6e75add8ce894bfbb1a15daa2f7a4138a744 \ + --hash=sha256:3b048f84c94c3284f29bf228f1094ccc48763d76ede5c35632153bd7f697b846 \ + --hash=sha256:40f8f461eba87ef2e8ce0005ca2c12f1b4fdbbafd3a717b8570060d7cd35ee0c \ + --hash=sha256:512bb4b04e403a38860f7eb22abeeaefba3c4a9c08bc7beec8885494c5828034 \ + --hash=sha256:5a1137177b62eec949c0f1564eef73920f842af5ebfc260c20d9cd47e8ecd519 \ + --hash=sha256:6618e35268d116bffddd6dbec360a40c54b3164f8af0513d95d8698f36e2eacc \ + --hash=sha256:67dbf4dd7586b2d437f539d5dc930ebceaf74a4150720644d6ea7e5ffc1cb2ff \ + --hash=sha256:6f8d45f5fcdb52c60c902a951f549faad9979314e7e069f4fa3d14eb409b16a0 \ + --hash=sha256:73f9feba3da1ae66f0b521d7c2727db7f5025a83facdc73f4f39abe2b6d4f00d \ + --hash=sha256:7aaf5c4528e83e3176e7dbb01dcec34fb41c93279a8f8527cf33e5df88bfb910 \ + --hash=sha256:7c69c95d5242171d07396761f759a8a4d566e9a01bf99612f9b9e309e70a80fc \ + --hash=sha256:7ca3b453d767eb83b3ec58f0cfcdc934875a341cdfdb0dc55c1431c96608cf83 \ + --hash=sha256:7f2cb11fe789b72feeae7cdf6e27375c33ed6915f8ca5ea7ce81b5e234c75a9e \ + --hash=sha256:89af4ee347f361338bad5c27b023f9d19e7aed17aa75cb519f28e6cf1658a0ba \ + --hash=sha256:8ad7de37c9ff817bc5d26f89100f87b7f1a5cc25e5eaaa54f11dc66cca9652e4 \ + --hash=sha256:8ba1fe3fb415fd34cae5ca090fb82030b6e8423d6eb2c4c9c4fbf50b15c7664c \ + --hash=sha256:9213a114dd19cfca19715088f12f143e918c5e1b4e26f7acf1a823d7da9e1413 \ + --hash=sha256:9f08e5cc10143d3da2a2cf735d8b932ef4e4e1d74b0c74ce66c52eab02068be8 \ + --hash=sha256:a275b3a579dfd923d6330f6e5c2886dbdb5da4e004c5abecb107eb347d301412 \ + --hash=sha256:a2e6270923bf5ec0e9f720d689579a904f401c62193222d000d8cb8e880684e9 \ + --hash=sha256:a98989ff1ed20825728495ffb859cd700a120850074184d2e1ec08a0b1ab8ab3 \ + --hash=sha256:ae38c6d24d7aff003a241c8f1dd268eb1c6f7625d91e3435836ff5a5eed05ce5 \ + --hash=sha256:af7a417e19068b1abeb9addd3c045a2d6e40d15365af6aa3cbe2d47305b5bb11 \ + --hash=sha256:b09b6166876d2cba8f331a548932b09e11c9386db0525c9ca15c399b666746fc \ + --hash=sha256:b9fc609a3d4009ee31212f435f5a75720ef24280f6d23edfd53f77b562a79c5b \ + --hash=sha256:ba3d42cd83d7f89b9c1b2f76df971e8ab58815f8060da4dc67b9ae9dba1b34cc \ + --hash=sha256:baf5b3b901272837bee2311ecbd28fdbe960d288a070dc72bdfdf48cfcbb8090 \ + --hash=sha256:bb54fe45deb55e4caae4ea2c1dba93ee79fb5c377287b14056d4c30fb156920e \ + --hash=sha256:be0801ae3f9017c6437bcd23a4bf2b2aa88e465f7efeed4b079944d07e3df994 \ + --hash=sha256:c349431f5c8aa99b8744550d0bb4615f63e73450584202ac5db0e5d7da4d82ff \ + --hash=sha256:c80ade5cdb0ea447e7f43b32abc2f4a628dcdfa64dc8ee5ab4262987e5e0814f \ + --hash=sha256:c8d1837c3d2d8e56aacc13a91ec7540b3baadc1b254fbdf225a2d15b72b654c3 \ + --hash=sha256:c97561c22fc05d0f6ba370d9bd67f86c313c38f31a1793e0ee9acb78ee28e4b8 \ + --hash=sha256:cf1f6edc968619a4355481c29d5571726723bc12924e2b25bd3348919f9bc992 \ + --hash=sha256:cf7af6458cf6343a2a0632ae2fc5f04821b2ffefc7b8a27f4eacb726ef89c682 \ + --hash=sha256:d0ac29adf468a838884e1507d81e872096238c76fe7da7f3325507e4390b6867 \ + --hash=sha256:d7ad871d044a96f794170f2434e832c6b42804d0b53721377d03f865245cd273 \ + --hash=sha256:ddb790f2fdc22984fba643866b21d04733c5cf7c3ace2a1e99e0c1c1d2336aab \ + --hash=sha256:e3b686b49adeb7fc45791dfae96ffcffeba1038e8b7603f369d6661f59e479fc \ + --hash=sha256:e7b8cb38a93de87b980f882f0dcd19f2e3ad43216f34e06916315cb3a03e6964 \ + --hash=sha256:f4f8ff776ca38ce272d9c164a7f77db8a54a8cad6d9468124317adf8732be07d + # via poetry +filelock==3.10.0 \ + --hash=sha256:3199fd0d3faea8b911be52b663dfccceb84c95949dd13179aa21436d1a79c4ce \ + --hash=sha256:e90b34656470756edf8b19656785c5fea73afa1953f3e1b0d645cef11cab3182 + # via + # poetry + # virtualenv +html5lib==1.1 \ + --hash=sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d \ + --hash=sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f + # via poetry +idna==3.3 \ + --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ + --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d + # via requests +importlib-metadata==6.1.0 \ + --hash=sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20 \ + --hash=sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09 + # via + # keyring + # poetry +installer==0.6.0 \ + --hash=sha256:ae7c62d1d6158b5c096419102ad0d01fdccebf857e784cee57f94165635fe038 \ + --hash=sha256:f3bd36cd261b440a88a1190b1becca0578fee90b4b62decc796932fdd5ae8839 + # via poetry +jaraco-classes==3.2.3 \ + --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ + --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a + # via keyring +jsonschema==4.17.3 \ + --hash=sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d \ + --hash=sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6 + # via poetry +keyring==23.13.1 \ + --hash=sha256:771ed2a91909389ed6148631de678f82ddc73737d85a927f382a8a1b157898cd \ + --hash=sha256:ba2e15a9b35e21908d0aaf4e0a47acc52d6ae33444df0da2b49d41a46ef6d678 + # via poetry +lockfile==0.12.2 \ + --hash=sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799 \ + --hash=sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa + # via + # cachecontrol + # poetry +more-itertools==9.1.0 \ + --hash=sha256:cabaa341ad0389ea83c17a94566a53ae4c9d07349861ecb14dc6d0345cf9ac5d \ + --hash=sha256:d2bc7f02446e86a68911e58ded76d6561eea00cddfb2a91e7019bbb586c799f3 + # via jaraco-classes +msgpack==1.0.3 \ + --hash=sha256:0d8c332f53ffff01953ad25131272506500b14750c1d0ce8614b17d098252fbc \ + --hash=sha256:1c58cdec1cb5fcea8c2f1771d7b5fec79307d056874f746690bd2bdd609ab147 \ + --hash=sha256:2c3ca57c96c8e69c1a0d2926a6acf2d9a522b41dc4253a8945c4c6cd4981a4e3 \ + --hash=sha256:2f30dd0dc4dfe6231ad253b6f9f7128ac3202ae49edd3f10d311adc358772dba \ + --hash=sha256:2f97c0f35b3b096a330bb4a1a9247d0bd7e1f3a2eba7ab69795501504b1c2c39 \ + --hash=sha256:36a64a10b16c2ab31dcd5f32d9787ed41fe68ab23dd66957ca2826c7f10d0b85 \ + --hash=sha256:3d875631ecab42f65f9dce6f55ce6d736696ced240f2634633188de2f5f21af9 \ + --hash=sha256:40fb89b4625d12d6027a19f4df18a4de5c64f6f3314325049f219683e07e678a \ + --hash=sha256:47d733a15ade190540c703de209ffbc42a3367600421b62ac0c09fde594da6ec \ + --hash=sha256:494471d65b25a8751d19c83f1a482fd411d7ca7a3b9e17d25980a74075ba0e88 \ + --hash=sha256:51fdc7fb93615286428ee7758cecc2f374d5ff363bdd884c7ea622a7a327a81e \ + --hash=sha256:6eef0cf8db3857b2b556213d97dd82de76e28a6524853a9beb3264983391dc1a \ + --hash=sha256:6f4c22717c74d44bcd7af353024ce71c6b55346dad5e2cc1ddc17ce8c4507c6b \ + --hash=sha256:73a80bd6eb6bcb338c1ec0da273f87420829c266379c8c82fa14c23fb586cfa1 \ + --hash=sha256:89908aea5f46ee1474cc37fbc146677f8529ac99201bc2faf4ef8edc023c2bf3 \ + --hash=sha256:8a3a5c4b16e9d0edb823fe54b59b5660cc8d4782d7bf2c214cb4b91a1940a8ef \ + --hash=sha256:96acc674bb9c9be63fa8b6dabc3248fdc575c4adc005c440ad02f87ca7edd079 \ + --hash=sha256:973ad69fd7e31159eae8f580f3f707b718b61141838321c6fa4d891c4a2cca52 \ + --hash=sha256:9b6f2d714c506e79cbead331de9aae6837c8dd36190d02da74cb409b36162e8a \ + --hash=sha256:9c0903bd93cbd34653dd63bbfcb99d7539c372795201f39d16fdfde4418de43a \ + --hash=sha256:9fce00156e79af37bb6db4e7587b30d11e7ac6a02cb5bac387f023808cd7d7f4 \ + --hash=sha256:a598d0685e4ae07a0672b59792d2cc767d09d7a7f39fd9bd37ff84e060b1a996 \ + --hash=sha256:b0a792c091bac433dfe0a70ac17fc2087d4595ab835b47b89defc8bbabcf5c73 \ + --hash=sha256:bb87f23ae7d14b7b3c21009c4b1705ec107cb21ee71975992f6aca571fb4a42a \ + --hash=sha256:bf1e6bfed4860d72106f4e0a1ab519546982b45689937b40257cfd820650b920 \ + --hash=sha256:c1ba333b4024c17c7591f0f372e2daa3c31db495a9b2af3cf664aef3c14354f7 \ + --hash=sha256:c2140cf7a3ec475ef0938edb6eb363fa704159e0bf71dde15d953bacc1cf9d7d \ + --hash=sha256:c7e03b06f2982aa98d4ddd082a210c3db200471da523f9ac197f2828e80e7770 \ + --hash=sha256:d02cea2252abc3756b2ac31f781f7a98e89ff9759b2e7450a1c7a0d13302ff50 \ + --hash=sha256:da24375ab4c50e5b7486c115a3198d207954fe10aaa5708f7b65105df09109b2 \ + --hash=sha256:e4c309a68cb5d6bbd0c50d5c71a25ae81f268c2dc675c6f4ea8ab2feec2ac4e2 \ + --hash=sha256:f01b26c2290cbd74316990ba84a14ac3d599af9cebefc543d241a66e785cf17d \ + --hash=sha256:f201d34dc89342fabb2a10ed7c9a9aaaed9b7af0f16a5923f1ae562b31258dea \ + --hash=sha256:f74da1e5fcf20ade12c6bf1baa17a2dc3604958922de8dc83cbe3eff22e8b611 + # via cachecontrol +packaging==20.9 \ + --hash=sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5 \ + --hash=sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a + # via + # build + # poetry +pexpect==4.8.0 \ + --hash=sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937 \ + --hash=sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c + # via poetry +pkginfo==1.9.6 \ + --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ + --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 + # via poetry +platformdirs==2.6.2 \ + --hash=sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490 \ + --hash=sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2 + # via poetry +poetry==1.4.0 \ + --hash=sha256:151ad741e163a329c8b13ea602dde979b7616fc350cfcff74b604e93263934a8 \ + --hash=sha256:f88a7a812a5d8c1f5a378e0924f898926b2ac10c3b5c03f7282f2182f90d8507 + # via + # -r python/mozbuild/mozbuild/test/vendor_requirements.in + # poetry-plugin-export +poetry-core==1.5.1 \ + --hash=sha256:41887261358863f25831fa0ad1fe7e451fc32d1c81fcf7710ba5174cc0047c6d \ + --hash=sha256:b1900dea81eb18feb7323d404e5f10430205541a4a683a912893f9d2b5807797 + # via + # -r python/mozbuild/mozbuild/test/vendor_requirements.in + # poetry + # poetry-plugin-export +poetry-plugin-export==1.3.0 \ + --hash=sha256:61ae5ec1db233aba947a48e1ce54c6ff66afd0e1c87195d6bce64c73a5ae658c \ + --hash=sha256:6e5919bf84afcb08cdd419a03f909f490d8671f00633a3c6df8ba09b0820dc2f + # via poetry +ptyprocess==0.7.0 \ + --hash=sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35 \ + --hash=sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220 + # via pexpect +pyparsing==3.0.8 \ + --hash=sha256:7bf433498c016c4314268d95df76c81b842a4cb2b276fa3312cfb1e1d85f6954 \ + --hash=sha256:ef7b523f6356f763771559412c0d7134753f037822dad1b16945b7b846f7ad06 + # via packaging +pyproject-hooks==1.0.0 \ + --hash=sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8 \ + --hash=sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5 + # via + # build + # poetry +pyrsistent==0.19.3 \ + --hash=sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8 \ + --hash=sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440 \ + --hash=sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a \ + --hash=sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c \ + --hash=sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3 \ + --hash=sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393 \ + --hash=sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9 \ + --hash=sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da \ + --hash=sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf \ + --hash=sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64 \ + --hash=sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a \ + --hash=sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3 \ + --hash=sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98 \ + --hash=sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2 \ + --hash=sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8 \ + --hash=sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf \ + --hash=sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc \ + --hash=sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7 \ + --hash=sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28 \ + --hash=sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2 \ + --hash=sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b \ + --hash=sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a \ + --hash=sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64 \ + --hash=sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19 \ + --hash=sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1 \ + --hash=sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9 \ + --hash=sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c + # via jsonschema +pywin32-ctypes==0.2.0 \ + --hash=sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942 \ + --hash=sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98 + # via keyring +rapidfuzz==2.13.7 \ + --hash=sha256:020858dd89b60ce38811cd6e37875c4c3c8d7fcd8bc20a0ad2ed1f464b34dc4e \ + --hash=sha256:042644133244bfa7b20de635d500eb9f46af7097f3d90b1724f94866f17cb55e \ + --hash=sha256:08590905a95ccfa43f4df353dcc5d28c15d70664299c64abcad8721d89adce4f \ + --hash=sha256:114810491efb25464016fd554fdf1e20d390309cecef62587494fc474d4b926f \ + --hash=sha256:1333fb3d603d6b1040e365dca4892ba72c7e896df77a54eae27dc07db90906e3 \ + --hash=sha256:16080c05a63d6042643ae9b6cfec1aefd3e61cef53d0abe0df3069b9d4b72077 \ + --hash=sha256:16ffad751f43ab61001187b3fb4a9447ec2d1aedeff7c5bac86d3b95f9980cc3 \ + --hash=sha256:1f50d1227e6e2a0e3ae1fb1c9a2e1c59577d3051af72c7cab2bcc430cb5e18da \ + --hash=sha256:1fbad8fb28d98980f5bff33c7842efef0315d42f0cd59082108482a7e6b61410 \ + --hash=sha256:23524635840500ce6f4d25005c9529a97621689c85d2f727c52eed1782839a6a \ + --hash=sha256:24d3fea10680d085fd0a4d76e581bfb2b1074e66e78fd5964d4559e1fcd2a2d4 \ + --hash=sha256:24eb6b843492bdc63c79ee4b2f104059b7a2201fef17f25177f585d3be03405a \ + --hash=sha256:25b4cedf2aa19fb7212894ce5f5219010cce611b60350e9a0a4d492122e7b351 \ + --hash=sha256:27be9c63215d302ede7d654142a2e21f0d34ea6acba512a4ae4cfd52bbaa5b59 \ + --hash=sha256:2c836f0f2d33d4614c3fbaf9a1eb5407c0fe23f8876f47fd15b90f78daa64c34 \ + --hash=sha256:3a9bd02e1679c0fd2ecf69b72d0652dbe2a9844eaf04a36ddf4adfbd70010e95 \ + --hash=sha256:3d8b081988d0a49c486e4e845a547565fee7c6e7ad8be57ff29c3d7c14c6894c \ + --hash=sha256:3dcffe1f3cbda0dc32133a2ae2255526561ca594f15f9644384549037b355245 \ + --hash=sha256:3f11a7eff7bc6301cd6a5d43f309e22a815af07e1f08eeb2182892fca04c86cb \ + --hash=sha256:42085d4b154a8232767de8296ac39c8af5bccee6b823b0507de35f51c9cbc2d7 \ + --hash=sha256:424f82c35dbe4f83bdc3b490d7d696a1dc6423b3d911460f5493b7ffae999fd2 \ + --hash=sha256:43fb8cb030f888c3f076d40d428ed5eb4331f5dd6cf1796cfa39c67bf0f0fc1e \ + --hash=sha256:460853983ab88f873173e27cc601c5276d469388e6ad6e08c4fd57b2a86f1064 \ + --hash=sha256:467c1505362823a5af12b10234cb1c4771ccf124c00e3fc9a43696512bd52293 \ + --hash=sha256:46b9b8aa09998bc48dd800854e8d9b74bc534d7922c1d6e1bbf783e7fa6ac29c \ + --hash=sha256:53dcae85956853b787c27c1cb06f18bb450e22cf57a4ad3444cf03b8ff31724a \ + --hash=sha256:585206112c294e335d84de5d5f179c0f932837752d7420e3de21db7fdc476278 \ + --hash=sha256:5ada0a14c67452358c1ee52ad14b80517a87b944897aaec3e875279371a9cb96 \ + --hash=sha256:5e2b3d020219baa75f82a4e24b7c8adcb598c62f0e54e763c39361a9e5bad510 \ + --hash=sha256:6120f2995f5154057454c5de99d86b4ef3b38397899b5da1265467e8980b2f60 \ + --hash=sha256:68a89bb06d5a331511961f4d3fa7606f8e21237467ba9997cae6f67a1c2c2b9e \ + --hash=sha256:7496e8779905b02abc0ab4ba2a848e802ab99a6e20756ffc967a0de4900bd3da \ + --hash=sha256:759a3361711586a29bc753d3d1bdb862983bd9b9f37fbd7f6216c24f7c972554 \ + --hash=sha256:75c45dcd595f8178412367e302fd022860ea025dc4a78b197b35428081ed33d5 \ + --hash=sha256:7d005e058d86f2a968a8d28ca6f2052fab1f124a39035aa0523261d6baf21e1f \ + --hash=sha256:7f7930adf84301797c3f09c94b9c5a9ed90a9e8b8ed19b41d2384937e0f9f5bd \ + --hash=sha256:8109e0324d21993d5b2d111742bf5958f3516bf8c59f297c5d1cc25a2342eb66 \ + --hash=sha256:81642a24798851b118f82884205fc1bd9ff70b655c04018c467824b6ecc1fabc \ + --hash=sha256:8450d15f7765482e86ef9be2ad1a05683cd826f59ad236ef7b9fb606464a56aa \ + --hash=sha256:875d51b3497439a72e2d76183e1cb5468f3f979ab2ddfc1d1f7dde3b1ecfb42f \ + --hash=sha256:8b477b43ced896301665183a5e0faec0f5aea2373005648da8bdcb3c4b73f280 \ + --hash=sha256:8d3e252d4127c79b4d7c2ae47271636cbaca905c8bb46d80c7930ab906cf4b5c \ + --hash=sha256:916bc2e6cf492c77ad6deb7bcd088f0ce9c607aaeabc543edeb703e1fbc43e31 \ + --hash=sha256:988f8f6abfba7ee79449f8b50687c174733b079521c3cc121d65ad2d38831846 \ + --hash=sha256:99a84ab9ac9a823e7e93b4414f86344052a5f3e23b23aa365cda01393ad895bd \ + --hash=sha256:9be02162af0376d64b840f2fc8ee3366794fc149f1e06d095a6a1d42447d97c5 \ + --hash=sha256:a5585189b3d90d81ccd62d4f18530d5ac8972021f0aaaa1ffc6af387ff1dce75 \ + --hash=sha256:ae33a72336059213996fe4baca4e0e4860913905c2efb7c991eab33b95a98a0a \ + --hash=sha256:af4f7c3c904ca709493eb66ca9080b44190c38e9ecb3b48b96d38825d5672559 \ + --hash=sha256:b20141fa6cee041917801de0bab503447196d372d4c7ee9a03721b0a8edf5337 \ + --hash=sha256:b3210869161a864f3831635bb13d24f4708c0aa7208ef5baac1ac4d46e9b4208 \ + --hash=sha256:b34e8c0e492949ecdd5da46a1cfc856a342e2f0389b379b1a45a3cdcd3176a6e \ + --hash=sha256:b52ac2626945cd21a2487aeefed794c14ee31514c8ae69b7599170418211e6f6 \ + --hash=sha256:b5dd713a1734574c2850c566ac4286594bacbc2d60b9170b795bee4b68656625 \ + --hash=sha256:b5f705652360d520c2de52bee11100c92f59b3e3daca308ebb150cbc58aecdad \ + --hash=sha256:b6389c50d8d214c9cd11a77f6d501529cb23279a9c9cafe519a3a4b503b5f72a \ + --hash=sha256:b6bad92de071cbffa2acd4239c1779f66851b60ffbbda0e4f4e8a2e9b17e7eef \ + --hash=sha256:b75dd0928ce8e216f88660ab3d5c5ffe990f4dd682fd1709dba29d5dafdde6de \ + --hash=sha256:c2523f8180ebd9796c18d809e9a19075a1060b1a170fde3799e83db940c1b6d5 \ + --hash=sha256:c31022d9970177f6affc6d5dd757ed22e44a10890212032fabab903fdee3bfe7 \ + --hash=sha256:c36fd260084bb636b9400bb92016c6bd81fd80e59ed47f2466f85eda1fc9f782 \ + --hash=sha256:c3741cb0bf9794783028e8b0cf23dab917fa5e37a6093b94c4c2f805f8e36b9f \ + --hash=sha256:c3fbe449d869ea4d0909fc9d862007fb39a584fb0b73349a6aab336f0d90eaed \ + --hash=sha256:c66546e30addb04a16cd864f10f5821272a1bfe6462ee5605613b4f1cb6f7b48 \ + --hash=sha256:c71d9d512b76f05fa00282227c2ae884abb60e09f08b5ca3132b7e7431ac7f0d \ + --hash=sha256:c8601a66fbfc0052bb7860d2eacd303fcde3c14e87fdde409eceff516d659e77 \ + --hash=sha256:c88adbcb933f6b8612f6c593384bf824e562bb35fc8a0f55fac690ab5b3486e5 \ + --hash=sha256:ca00fafd2756bc9649bf80f1cf72c647dce38635f0695d7ce804bc0f759aa756 \ + --hash=sha256:ca8a23097c1f50e0fdb4de9e427537ca122a18df2eead06ed39c3a0bef6d9d3a \ + --hash=sha256:cda1e2f66bb4ba7261a0f4c2d052d5d909798fca557cbff68f8a79a87d66a18f \ + --hash=sha256:cdfc04f7647c29fb48da7a04082c34cdb16f878d3c6d098d62d5715c0ad3000c \ + --hash=sha256:cf62dacb3f9234f3fddd74e178e6d25c68f2067fde765f1d95f87b1381248f58 \ + --hash=sha256:d00df2e4a81ffa56a6b1ec4d2bc29afdcb7f565e0b8cd3092fece2290c4c7a79 \ + --hash=sha256:d248a109699ce9992304e79c1f8735c82cc4c1386cd8e27027329c0549f248a2 \ + --hash=sha256:d63def9bbc6b35aef4d76dc740301a4185867e8870cbb8719ec9de672212fca8 \ + --hash=sha256:d82f20c0060ffdaadaf642b88ab0aa52365b56dffae812e188e5bdb998043588 \ + --hash=sha256:dbcf5371ea704759fcce772c66a07647751d1f5dbdec7818331c9b31ae996c77 \ + --hash=sha256:e8914dad106dacb0775718e54bf15e528055c4e92fb2677842996f2d52da5069 \ + --hash=sha256:ebe303cd9839af69dd1f7942acaa80b1ba90bacef2e7ded9347fbed4f1654672 \ + --hash=sha256:ec55a81ac2b0f41b8d6fb29aad16e55417036c7563bad5568686931aa4ff08f7 \ + --hash=sha256:effe182767d102cb65dfbbf74192237dbd22d4191928d59415aa7d7c861d8c88 \ + --hash=sha256:f42b82f268689f429def9ecfb86fa65ceea0eaf3fed408b570fe113311bf5ce7 \ + --hash=sha256:f6fe570e20e293eb50491ae14ddeef71a6a7e5f59d7e791393ffa99b13f1f8c2 \ + --hash=sha256:f799d1d6c33d81e983d3682571cc7d993ae7ff772c19b3aabb767039c33f6d1e \ + --hash=sha256:f891b98f8bc6c9d521785816085e9657212621e93f223917fb8e32f318b2957e \ + --hash=sha256:fa263135b892686e11d5b84f6a1892523123a00b7e5882eff4fbdabb38667347 \ + --hash=sha256:fa4c598ed77f74ec973247ca776341200b0f93ec3883e34c222907ce72cb92a4 \ + --hash=sha256:fe56659ccadbee97908132135de4b875543353351e0c92e736b7c57aee298b5a \ + --hash=sha256:fe59a0c21a032024edb0c8e43f5dee5623fef0b65a1e3c1281836d9ce199af3b + # via cleo +requests==2.27.1 \ + --hash=sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61 \ + --hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d + # via + # cachecontrol + # poetry + # requests-toolbelt +requests-toolbelt==0.9.1 \ + --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ + --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 + # via poetry +shellingham==1.5.0.post1 \ + --hash=sha256:368bf8c00754fd4f55afb7bbb86e272df77e4dc76ac29dbcbb81a59e9fc15744 \ + --hash=sha256:823bc5fb5c34d60f285b624e7264f4dda254bc803a3774a147bf99c0e3004a28 + # via poetry +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via + # html5lib + # virtualenv +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via + # build + # poetry + # pyproject-hooks +tomlkit==0.11.6 \ + --hash=sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b \ + --hash=sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73 + # via poetry +trove-classifiers==2023.3.9 \ + --hash=sha256:06fd10c95d285e7ddebd59e6a4ba299f03d7417d38d369248a4a40c9754a68fa \ + --hash=sha256:ee42f2f8c1d4bcfe35f746e472f07633570d485fab45407effc0379270a3bb03 + # via poetry +urllib3==1.26.9 \ + --hash=sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14 \ + --hash=sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e + # via + # dulwich + # poetry + # requests +virtualenv==20.4.4 \ + --hash=sha256:09c61377ef072f43568207dc8e46ddeac6bcdcaf288d49011bda0e7f4d38c4a2 \ + --hash=sha256:a935126db63128861987a7d5d30e23e8ec045a73840eeccb467c148514e29535 + # via poetry +webencodings==0.5.1 \ + --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ + --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 + # via html5lib +zipp==3.6.0 \ + --hash=sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832 \ + --hash=sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc + # via importlib-metadata diff --git a/python/mozbuild/mozbuild/testing.py b/python/mozbuild/mozbuild/testing.py new file mode 100644 index 0000000000..f951434f97 --- /dev/null +++ b/python/mozbuild/mozbuild/testing.py @@ -0,0 +1,266 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import sys + +import manifestparser +import mozpack.path as mozpath +from mozpack.copier import FileCopier +from mozpack.manifests import InstallManifest + +# These definitions provide a single source of truth for modules attempting +# to get a view of all tests for a build. Used by the emitter to figure out +# how to read/install manifests and by test dependency annotations in Files() +# entries to enumerate test flavors. + +# While there are multiple test manifests, the behavior is very similar +# across them. We enforce this by having common handling of all +# manifests and outputting a single class type with the differences +# described inside the instance. +# +# Keys are variable prefixes and values are tuples describing how these +# manifests should be handled: +# +# (flavor, install_root, install_subdir, package_tests) +# +# flavor identifies the flavor of this test. +# install_root is the path prefix to install the files starting from the root +# directory and not as specified by the manifest location. (bug 972168) +# install_subdir is the path of where to install the files in +# the tests directory. +# package_tests indicates whether to package test files into the test +# package; suites that compile the test files should not install +# them into the test package. +# +TEST_MANIFESTS = dict( + A11Y=("a11y", "testing/mochitest", "a11y", True), + BROWSER_CHROME=("browser-chrome", "testing/mochitest", "browser", True), + ANDROID_INSTRUMENTATION=("instrumentation", "instrumentation", ".", False), + FIREFOX_UI_FUNCTIONAL=("firefox-ui-functional", "firefox-ui", ".", False), + FIREFOX_UI_UPDATE=("firefox-ui-update", "firefox-ui", ".", False), + PYTHON_UNITTEST=("python", "python", ".", False), + CRAMTEST=("cram", "cram", ".", False), + TELEMETRY_TESTS_CLIENT=( + "telemetry-tests-client", + "toolkit/components/telemetry/tests/marionette/", + ".", + False, + ), + # marionette tests are run from the srcdir + # TODO(ato): make packaging work as for other test suites + MARIONETTE=("marionette", "marionette", ".", False), + MARIONETTE_UNIT=("marionette", "marionette", ".", False), + MARIONETTE_WEBAPI=("marionette", "marionette", ".", False), + MOCHITEST=("mochitest", "testing/mochitest", "tests", True), + MOCHITEST_CHROME=("chrome", "testing/mochitest", "chrome", True), + WEBRTC_SIGNALLING_TEST=("steeplechase", "steeplechase", ".", True), + XPCSHELL_TESTS=("xpcshell", "xpcshell", ".", True), + PERFTESTS=("perftest", "testing/perf", "perf", True), +) + +# reftests, wpt, and puppeteer all have their own manifest formats +# and are processed separately +REFTEST_FLAVORS = ("crashtest", "reftest") +PUPPETEER_FLAVORS = ("puppeteer",) +WEB_PLATFORM_TESTS_FLAVORS = ("web-platform-tests",) + + +def all_test_flavors(): + return ( + [v[0] for v in TEST_MANIFESTS.values()] + + list(REFTEST_FLAVORS) + + list(PUPPETEER_FLAVORS) + + list(WEB_PLATFORM_TESTS_FLAVORS) + ) + + +class TestInstallInfo(object): + def __init__(self): + self.seen = set() + self.pattern_installs = [] + self.installs = [] + self.external_installs = set() + self.deferred_installs = set() + + def __ior__(self, other): + self.pattern_installs.extend(other.pattern_installs) + self.installs.extend(other.installs) + self.external_installs |= other.external_installs + self.deferred_installs |= other.deferred_installs + return self + + +class SupportFilesConverter(object): + """Processes a "support-files" entry from a test object, either from + a parsed object from a test manifests or its representation in + moz.build and returns the installs to perform for this test object. + + Processing the same support files multiple times will not have any further + effect, and the structure of the parsed objects from manifests will have a + lot of repeated entries, so this class takes care of memoizing. + """ + + def __init__(self): + self._fields = ( + ("head", set()), + ("support-files", set()), + ("generated-files", set()), + ) + + def convert_support_files(self, test, install_root, manifest_dir, out_dir): + # Arguments: + # test - The test object to process. + # install_root - The directory under $objdir/_tests that will contain + # the tests for this harness (examples are "testing/mochitest", + # "xpcshell"). + # manifest_dir - Absoulute path to the (srcdir) directory containing the + # manifest that included this test + # out_dir - The path relative to $objdir/_tests used as the destination for the + # test, based on the relative path to the manifest in the srcdir and + # the install_root. + info = TestInstallInfo() + for field, seen in self._fields: + value = test.get(field, "") + for pattern in value.split(): + + # We track uniqueness locally (per test) where duplicates are forbidden, + # and globally, where they are permitted. If a support file appears multiple + # times for a single test, there are unnecessary entries in the manifest. But + # many entries will be shared across tests that share defaults. + key = field, pattern, out_dir + if key in info.seen: + raise ValueError( + "%s appears multiple times in a test manifest under a %s field," + " please omit the duplicate entry." % (pattern, field) + ) + info.seen.add(key) + if key in seen: + continue + seen.add(key) + + if field == "generated-files": + info.external_installs.add( + mozpath.normpath(mozpath.join(out_dir, pattern)) + ) + # '!' indicates our syntax for inter-directory support file + # dependencies. These receive special handling in the backend. + elif pattern[0] == "!": + info.deferred_installs.add(pattern) + # We only support globbing on support-files because + # the harness doesn't support * for head. + elif "*" in pattern and field == "support-files": + info.pattern_installs.append((manifest_dir, pattern, out_dir)) + # "absolute" paths identify files that are to be + # placed in the install_root directory (no globs) + elif pattern[0] == "/": + full = mozpath.normpath( + mozpath.join(manifest_dir, mozpath.basename(pattern)) + ) + info.installs.append( + (full, mozpath.join(install_root, pattern[1:])) + ) + else: + full = mozpath.normpath(mozpath.join(manifest_dir, pattern)) + dest_path = mozpath.join(out_dir, pattern) + + # If the path resolves to a different directory + # tree, we take special behavior depending on the + # entry type. + if not full.startswith(manifest_dir): + # If it's a support file, we install the file + # into the current destination directory. + # This implementation makes installing things + # with custom prefixes impossible. If this is + # needed, we can add support for that via a + # special syntax later. + if field == "support-files": + dest_path = mozpath.join(out_dir, os.path.basename(pattern)) + # If it's not a support file, we ignore it. + # This preserves old behavior so things like + # head files doesn't get installed multiple + # times. + else: + continue + info.installs.append((full, mozpath.normpath(dest_path))) + return info + + +def install_test_files(topsrcdir, topobjdir, tests_root): + """Installs the requested test files to the objdir. This is invoked by + test runners to avoid installing tens of thousands of test files when + only a few tests need to be run. + """ + + manifest = InstallManifest( + mozpath.join(topobjdir, "_build_manifests", "install", "_test_files") + ) + + harness_files_manifest = mozpath.join( + topobjdir, "_build_manifests", "install", tests_root + ) + + if os.path.isfile(harness_files_manifest): + # If the backend has generated an install manifest for test harness + # files they are treated as a monolith and installed each time we + # run tests. Fortunately there are not very many. + manifest |= InstallManifest(harness_files_manifest) + + copier = FileCopier() + manifest.populate_registry(copier) + copier.copy(mozpath.join(topobjdir, tests_root), remove_unaccounted=False) + + +# Convenience methods for test manifest reading. +def read_manifestparser_manifest(context, manifest_path): + path = manifest_path.full_path + return manifestparser.TestManifest( + manifests=[path], + strict=True, + rootdir=context.config.topsrcdir, + finder=context._finder, + handle_defaults=False, + ) + + +def read_reftest_manifest(context, manifest_path): + import reftest + + path = manifest_path.full_path + manifest = reftest.ReftestManifest(finder=context._finder) + manifest.load(path) + return manifest + + +def read_wpt_manifest(context, paths): + manifest_path, tests_root = paths + full_path = mozpath.normpath(mozpath.join(context.srcdir, manifest_path)) + old_path = sys.path[:] + try: + # Setup sys.path to include all the dependencies required to import + # the web-platform-tests manifest parser. web-platform-tests provides + # a the localpaths.py to do the path manipulation, which we load, + # providing the __file__ variable so it can resolve the relative + # paths correctly. + paths_file = os.path.join( + context.config.topsrcdir, + "testing", + "web-platform", + "tests", + "tools", + "localpaths.py", + ) + _globals = {"__file__": paths_file} + execfile(paths_file, _globals) + import manifest as wptmanifest + finally: + sys.path = old_path + f = context._finder.get(full_path) + try: + rv = wptmanifest.manifest.load(tests_root, f) + except wptmanifest.manifest.ManifestVersionMismatch: + # If we accidentially end up with a committed manifest that's the wrong + # version, then return an empty manifest here just to not break the build + rv = wptmanifest.manifest.Manifest() + return rv diff --git a/python/mozbuild/mozbuild/toolchains.py b/python/mozbuild/mozbuild/toolchains.py new file mode 100644 index 0000000000..c5418089bb --- /dev/null +++ b/python/mozbuild/mozbuild/toolchains.py @@ -0,0 +1,32 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os + +import six + + +def toolchain_task_definitions(): + import gecko_taskgraph # noqa: triggers override of the `graph_config_schema` + from taskgraph.generator import load_tasks_for_kind + + # Don't import globally to allow this module being imported without + # the taskgraph module being available (e.g. standalone js) + params = {"level": os.environ.get("MOZ_SCM_LEVEL", "3")} + root_dir = os.path.join( + os.path.dirname(__file__), "..", "..", "..", "taskcluster", "ci" + ) + toolchains = load_tasks_for_kind(params, "toolchain", root_dir=root_dir) + aliased = {} + for t in toolchains.values(): + aliases = t.attributes.get("toolchain-alias") + if not aliases: + aliases = [] + if isinstance(aliases, six.text_type): + aliases = [aliases] + for alias in aliases: + aliased["toolchain-{}".format(alias)] = t + toolchains.update(aliased) + + return toolchains diff --git a/python/mozbuild/mozbuild/util.py b/python/mozbuild/mozbuild/util.py new file mode 100644 index 0000000000..c1f24445ea --- /dev/null +++ b/python/mozbuild/mozbuild/util.py @@ -0,0 +1,1407 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +# This file contains miscellaneous utility functions that don't belong anywhere +# in particular. + +import argparse +import collections +import collections.abc +import copy +import ctypes +import difflib +import errno +import functools +import hashlib +import io +import itertools +import os +import re +import stat +import sys +import time +from collections import OrderedDict +from io import BytesIO, StringIO +from pathlib import Path + +import six +from packaging.version import Version + +MOZBUILD_METRICS_PATH = os.path.abspath( + os.path.join(__file__, "..", "..", "metrics.yaml") +) + +if sys.platform == "win32": + _kernel32 = ctypes.windll.kernel32 + _FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 0x2000 + system_encoding = "mbcs" +else: + system_encoding = "utf-8" + + +def exec_(object, globals=None, locals=None): + """Wrapper around the exec statement to avoid bogus errors like: + + SyntaxError: unqualified exec is not allowed in function ... + it is a nested function. + + or + + SyntaxError: unqualified exec is not allowed in function ... + it contains a nested function with free variable + + which happen with older versions of python 2.7. + """ + exec(object, globals, locals) + + +def _open(path, mode): + if "b" in mode: + return io.open(path, mode) + return io.open(path, mode, encoding="utf-8", newline="\n") + + +def hash_file(path, hasher=None): + """Hashes a file specified by the path given and returns the hex digest.""" + + # If the default hashing function changes, this may invalidate + # lots of cached data. Don't change it lightly. + h = hasher or hashlib.sha1() + + with open(path, "rb") as fh: + while True: + data = fh.read(8192) + + if not len(data): + break + + h.update(data) + + return h.hexdigest() + + +class EmptyValue(six.text_type): + """A dummy type that behaves like an empty string and sequence. + + This type exists in order to support + :py:class:`mozbuild.frontend.reader.EmptyConfig`. It should likely not be + used elsewhere. + """ + + def __init__(self): + super(EmptyValue, self).__init__() + + +class ReadOnlyNamespace(object): + """A class for objects with immutable attributes set at initialization.""" + + def __init__(self, **kwargs): + for k, v in six.iteritems(kwargs): + super(ReadOnlyNamespace, self).__setattr__(k, v) + + def __delattr__(self, key): + raise Exception("Object does not support deletion.") + + def __setattr__(self, key, value): + raise Exception("Object does not support assignment.") + + def __ne__(self, other): + return not (self == other) + + def __eq__(self, other): + return self is other or ( + hasattr(other, "__dict__") and self.__dict__ == other.__dict__ + ) + + def __repr__(self): + return "<%s %r>" % (self.__class__.__name__, self.__dict__) + + +class ReadOnlyDict(dict): + """A read-only dictionary.""" + + def __init__(self, *args, **kwargs): + dict.__init__(self, *args, **kwargs) + + def __delitem__(self, key): + raise Exception("Object does not support deletion.") + + def __setitem__(self, key, value): + raise Exception("Object does not support assignment.") + + def update(self, *args, **kwargs): + raise Exception("Object does not support update.") + + def __copy__(self, *args, **kwargs): + return ReadOnlyDict(**dict.copy(self, *args, **kwargs)) + + def __deepcopy__(self, memo): + result = {} + for k, v in self.items(): + result[k] = copy.deepcopy(v, memo) + + return ReadOnlyDict(**result) + + +class undefined_default(object): + """Represents an undefined argument value that isn't None.""" + + +undefined = undefined_default() + + +class ReadOnlyDefaultDict(ReadOnlyDict): + """A read-only dictionary that supports default values on retrieval.""" + + def __init__(self, default_factory, *args, **kwargs): + ReadOnlyDict.__init__(self, *args, **kwargs) + self._default_factory = default_factory + + def __missing__(self, key): + value = self._default_factory() + dict.__setitem__(self, key, value) + return value + + +def ensureParentDir(path): + """Ensures the directory parent to the given file exists.""" + d = os.path.dirname(path) + if d and not os.path.exists(path): + try: + os.makedirs(d) + except OSError as error: + if error.errno != errno.EEXIST: + raise + + +def mkdir(path, not_indexed=False): + """Ensure a directory exists. + + If ``not_indexed`` is True, an attribute is set that disables content + indexing on the directory. + """ + try: + os.makedirs(path) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + if not_indexed: + if sys.platform == "win32": + if isinstance(path, six.string_types): + fn = _kernel32.SetFileAttributesW + else: + fn = _kernel32.SetFileAttributesA + + fn(path, _FILE_ATTRIBUTE_NOT_CONTENT_INDEXED) + elif sys.platform == "darwin": + with open(os.path.join(path, ".metadata_never_index"), "a"): + pass + + +def simple_diff(filename, old_lines, new_lines): + """Returns the diff between old_lines and new_lines, in unified diff form, + as a list of lines. + + old_lines and new_lines are lists of non-newline terminated lines to + compare. + old_lines can be None, indicating a file creation. + new_lines can be None, indicating a file deletion. + """ + + old_name = "/dev/null" if old_lines is None else filename + new_name = "/dev/null" if new_lines is None else filename + + return difflib.unified_diff( + old_lines or [], new_lines or [], old_name, new_name, n=4, lineterm="" + ) + + +class FileAvoidWrite(BytesIO): + """File-like object that buffers output and only writes if content changed. + + We create an instance from an existing filename. New content is written to + it. When we close the file object, if the content in the in-memory buffer + differs from what is on disk, then we write out the new content. Otherwise, + the original file is untouched. + + Instances can optionally capture diffs of file changes. This feature is not + enabled by default because it a) doesn't make sense for binary files b) + could add unwanted overhead to calls. + + Additionally, there is dry run mode where the file is not actually written + out, but reports whether the file was existing and would have been updated + still occur, as well as diff capture if requested. + """ + + def __init__(self, filename, capture_diff=False, dry_run=False, readmode="r"): + BytesIO.__init__(self) + self.name = filename + assert type(capture_diff) == bool + assert type(dry_run) == bool + assert "r" in readmode + self._capture_diff = capture_diff + self._write_to_file = not dry_run + self.diff = None + self.mode = readmode + self._binary_mode = "b" in readmode + + def write(self, buf): + BytesIO.write(self, six.ensure_binary(buf)) + + def avoid_writing_to_file(self): + self._write_to_file = False + + def close(self): + """Stop accepting writes, compare file contents, and rewrite if needed. + + Returns a tuple of bools indicating what action was performed: + + (file existed, file updated) + + If ``capture_diff`` was specified at construction time and the + underlying file was changed, ``.diff`` will be populated with the diff + of the result. + """ + # Use binary data if the caller explicitly asked for it. + ensure = six.ensure_binary if self._binary_mode else six.ensure_text + buf = ensure(self.getvalue()) + + BytesIO.close(self) + existed = False + old_content = None + + try: + existing = _open(self.name, self.mode) + existed = True + except IOError: + pass + else: + try: + old_content = existing.read() + if old_content == buf: + return True, False + except IOError: + pass + finally: + existing.close() + + if self._write_to_file: + ensureParentDir(self.name) + # Maintain 'b' if specified. 'U' only applies to modes starting with + # 'r', so it is dropped. + writemode = "w" + if self._binary_mode: + writemode += "b" + buf = six.ensure_binary(buf) + else: + buf = six.ensure_text(buf) + with _open(self.name, writemode) as file: + file.write(buf) + + self._generate_diff(buf, old_content) + + return existed, True + + def _generate_diff(self, new_content, old_content): + """Generate a diff for the changed contents if `capture_diff` is True. + + If the changed contents could not be decoded as utf-8 then generate a + placeholder message instead of a diff. + + Args: + new_content: Str or bytes holding the new file contents. + old_content: Str or bytes holding the original file contents. Should be + None if no old content is being overwritten. + """ + if not self._capture_diff: + return + + try: + if old_content is None: + old_lines = None + else: + if self._binary_mode: + # difflib doesn't work with bytes. + old_content = old_content.decode("utf-8") + + old_lines = old_content.splitlines() + + if self._binary_mode: + # difflib doesn't work with bytes. + new_content = new_content.decode("utf-8") + + new_lines = new_content.splitlines() + + self.diff = simple_diff(self.name, old_lines, new_lines) + # FileAvoidWrite isn't unicode/bytes safe. So, files with non-ascii + # content or opened and written in different modes may involve + # implicit conversion and this will make Python unhappy. Since + # diffing isn't a critical feature, we just ignore the failure. + # This can go away once FileAvoidWrite uses io.BytesIO and + # io.StringIO. But that will require a lot of work. + except (UnicodeDecodeError, UnicodeEncodeError): + self.diff = ["Binary or non-ascii file changed: %s" % self.name] + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + if not self.closed: + self.close() + + +def resolve_target_to_make(topobjdir, target): + r""" + Resolve `target` (a target, directory, or file) to a make target. + + `topobjdir` is the object directory; all make targets will be + rooted at or below the top-level Makefile in this directory. + + Returns a pair `(reldir, target)` where `reldir` is a directory + relative to `topobjdir` containing a Makefile and `target` is a + make target (possibly `None`). + + A directory resolves to the nearest directory at or above + containing a Makefile, and target `None`. + + A regular (non-Makefile) file resolves to the nearest directory at + or above the file containing a Makefile, and an appropriate + target. + + A Makefile resolves to the nearest parent strictly above the + Makefile containing a different Makefile, and an appropriate + target. + """ + + target = target.replace(os.sep, "/").lstrip("/") + abs_target = os.path.join(topobjdir, target) + + # For directories, run |make -C dir|. If the directory does not + # contain a Makefile, check parents until we find one. At worst, + # this will terminate at the root. + if os.path.isdir(abs_target): + current = abs_target + + while True: + make_path = os.path.join(current, "Makefile") + if os.path.exists(make_path): + return (current[len(topobjdir) + 1 :], None) + + current = os.path.dirname(current) + + # If it's not in a directory, this is probably a top-level make + # target. Treat it as such. + if "/" not in target: + return (None, target) + + # We have a relative path within the tree. We look for a Makefile + # as far into the path as possible. Then, we compute the make + # target as relative to that directory. + reldir = os.path.dirname(target) + target = os.path.basename(target) + + while True: + make_path = os.path.join(topobjdir, reldir, "Makefile") + + # We append to target every iteration, so the check below + # happens exactly once. + if target != "Makefile" and os.path.exists(make_path): + return (reldir, target) + + target = os.path.join(os.path.basename(reldir), target) + reldir = os.path.dirname(reldir) + + +class List(list): + """A list specialized for moz.build environments. + + We overload the assignment and append operations to require that the + appended thing is a list. This avoids bad surprises coming from appending + a string to a list, which would just add each letter of the string. + """ + + def __init__(self, iterable=None, **kwargs): + if iterable is None: + iterable = [] + if not isinstance(iterable, list): + raise ValueError("List can only be created from other list instances.") + + self._kwargs = kwargs + super(List, self).__init__(iterable) + + def extend(self, l): + if not isinstance(l, list): + raise ValueError("List can only be extended with other list instances.") + + return super(List, self).extend(l) + + def __setitem__(self, key, val): + if isinstance(key, slice): + if not isinstance(val, list): + raise ValueError( + "List can only be sliced with other list " "instances." + ) + if key.step: + raise ValueError("List cannot be sliced with a nonzero step " "value") + # Python 2 and Python 3 do this differently for some reason. + if six.PY2: + return super(List, self).__setslice__(key.start, key.stop, val) + else: + return super(List, self).__setitem__(key, val) + return super(List, self).__setitem__(key, val) + + def __setslice__(self, i, j, sequence): + return self.__setitem__(slice(i, j), sequence) + + def __add__(self, other): + # Allow None and EmptyValue is a special case because it makes undefined + # variable references in moz.build behave better. + other = [] if isinstance(other, (type(None), EmptyValue)) else other + if not isinstance(other, list): + raise ValueError("Only lists can be appended to lists.") + + new_list = self.__class__(self, **self._kwargs) + new_list.extend(other) + return new_list + + def __iadd__(self, other): + other = [] if isinstance(other, (type(None), EmptyValue)) else other + if not isinstance(other, list): + raise ValueError("Only lists can be appended to lists.") + + return super(List, self).__iadd__(other) + + +class UnsortedError(Exception): + def __init__(self, srtd, original): + assert len(srtd) == len(original) + + self.sorted = srtd + self.original = original + + for i, orig in enumerate(original): + s = srtd[i] + + if orig != s: + self.i = i + break + + def __str__(self): + s = StringIO() + + s.write("An attempt was made to add an unsorted sequence to a list. ") + s.write("The incoming list is unsorted starting at element %d. " % self.i) + s.write( + 'We expected "%s" but got "%s"' + % (self.sorted[self.i], self.original[self.i]) + ) + + return s.getvalue() + + +class StrictOrderingOnAppendList(List): + """A list specialized for moz.build environments. + + We overload the assignment and append operations to require that incoming + elements be ordered. This enforces cleaner style in moz.build files. + """ + + @staticmethod + def ensure_sorted(l): + if isinstance(l, StrictOrderingOnAppendList): + return + + def _first_element(e): + # If the list entry is a tuple, we sort based on the first element + # in the tuple. + return e[0] if isinstance(e, tuple) else e + + srtd = sorted(l, key=lambda x: _first_element(x).lower()) + + if srtd != l: + raise UnsortedError(srtd, l) + + def __init__(self, iterable=None, **kwargs): + if iterable is None: + iterable = [] + + StrictOrderingOnAppendList.ensure_sorted(iterable) + + super(StrictOrderingOnAppendList, self).__init__(iterable, **kwargs) + + def extend(self, l): + StrictOrderingOnAppendList.ensure_sorted(l) + + return super(StrictOrderingOnAppendList, self).extend(l) + + def __setitem__(self, key, val): + if isinstance(key, slice): + StrictOrderingOnAppendList.ensure_sorted(val) + return super(StrictOrderingOnAppendList, self).__setitem__(key, val) + + def __add__(self, other): + StrictOrderingOnAppendList.ensure_sorted(other) + + return super(StrictOrderingOnAppendList, self).__add__(other) + + def __iadd__(self, other): + StrictOrderingOnAppendList.ensure_sorted(other) + + return super(StrictOrderingOnAppendList, self).__iadd__(other) + + +class ImmutableStrictOrderingOnAppendList(StrictOrderingOnAppendList): + """Like StrictOrderingOnAppendList, but not allowing mutations of the value.""" + + def append(self, elt): + raise Exception("cannot use append on this type") + + def extend(self, iterable): + raise Exception("cannot use extend on this type") + + def __setslice__(self, i, j, iterable): + raise Exception("cannot assign to slices on this type") + + def __setitem__(self, i, elt): + raise Exception("cannot assign to indexes on this type") + + def __iadd__(self, other): + raise Exception("cannot use += on this type") + + +class StrictOrderingOnAppendListWithAction(StrictOrderingOnAppendList): + """An ordered list that accepts a callable to be applied to each item. + + A callable (action) passed to the constructor is run on each item of input. + The result of running the callable on each item will be stored in place of + the original input, but the original item must be used to enforce sortedness. + """ + + def __init__(self, iterable=(), action=None): + if not callable(action): + raise ValueError( + "A callable action is required to construct " + "a StrictOrderingOnAppendListWithAction" + ) + + self._action = action + if not isinstance(iterable, (tuple, list)): + raise ValueError( + "StrictOrderingOnAppendListWithAction can only be initialized " + "with another list" + ) + iterable = [self._action(i) for i in iterable] + super(StrictOrderingOnAppendListWithAction, self).__init__( + iterable, action=action + ) + + def extend(self, l): + if not isinstance(l, list): + raise ValueError( + "StrictOrderingOnAppendListWithAction can only be extended " + "with another list" + ) + l = [self._action(i) for i in l] + return super(StrictOrderingOnAppendListWithAction, self).extend(l) + + def __setitem__(self, key, val): + if isinstance(key, slice): + if not isinstance(val, list): + raise ValueError( + "StrictOrderingOnAppendListWithAction can only be sliced " + "with another list" + ) + val = [self._action(item) for item in val] + return super(StrictOrderingOnAppendListWithAction, self).__setitem__(key, val) + + def __add__(self, other): + if not isinstance(other, list): + raise ValueError( + "StrictOrderingOnAppendListWithAction can only be added with " + "another list" + ) + return super(StrictOrderingOnAppendListWithAction, self).__add__(other) + + def __iadd__(self, other): + if not isinstance(other, list): + raise ValueError( + "StrictOrderingOnAppendListWithAction can only be added with " + "another list" + ) + other = [self._action(i) for i in other] + return super(StrictOrderingOnAppendListWithAction, self).__iadd__(other) + + +class MozbuildDeletionError(Exception): + pass + + +def FlagsFactory(flags): + """Returns a class which holds optional flags for an item in a list. + + The flags are defined in the dict given as argument, where keys are + the flag names, and values the type used for the value of that flag. + + The resulting class is used by the various WithFlagsFactory + functions below. + """ + assert isinstance(flags, dict) + assert all(isinstance(v, type) for v in flags.values()) + + class Flags(object): + __slots__ = flags.keys() + _flags = flags + + def update(self, **kwargs): + for k, v in six.iteritems(kwargs): + setattr(self, k, v) + + def __getattr__(self, name): + if name not in self.__slots__: + raise AttributeError( + "'%s' object has no attribute '%s'" + % (self.__class__.__name__, name) + ) + try: + return object.__getattr__(self, name) + except AttributeError: + value = self._flags[name]() + self.__setattr__(name, value) + return value + + def __setattr__(self, name, value): + if name not in self.__slots__: + raise AttributeError( + "'%s' object has no attribute '%s'" + % (self.__class__.__name__, name) + ) + if not isinstance(value, self._flags[name]): + raise TypeError( + "'%s' attribute of class '%s' must be '%s'" + % (name, self.__class__.__name__, self._flags[name].__name__) + ) + return object.__setattr__(self, name, value) + + def __delattr__(self, name): + raise MozbuildDeletionError("Unable to delete attributes for this object") + + return Flags + + +class StrictOrderingOnAppendListWithFlags(StrictOrderingOnAppendList): + """A list with flags specialized for moz.build environments. + + Each subclass has a set of typed flags; this class lets us use `isinstance` + for natural testing. + """ + + +def StrictOrderingOnAppendListWithFlagsFactory(flags): + """Returns a StrictOrderingOnAppendList-like object, with optional + flags on each item. + + The flags are defined in the dict given as argument, where keys are + the flag names, and values the type used for the value of that flag. + + Example: + + .. code-block:: python + + FooList = StrictOrderingOnAppendListWithFlagsFactory({ + 'foo': bool, 'bar': unicode + }) + foo = FooList(['a', 'b', 'c']) + foo['a'].foo = True + foo['b'].bar = 'bar' + """ + + class StrictOrderingOnAppendListWithFlagsSpecialization( + StrictOrderingOnAppendListWithFlags + ): + def __init__(self, iterable=None): + if iterable is None: + iterable = [] + StrictOrderingOnAppendListWithFlags.__init__(self, iterable) + self._flags_type = FlagsFactory(flags) + self._flags = dict() + + def __getitem__(self, name): + if name not in self._flags: + if name not in self: + raise KeyError("'%s'" % name) + self._flags[name] = self._flags_type() + return self._flags[name] + + def __setitem__(self, name, value): + if not isinstance(name, slice): + raise TypeError( + "'%s' object does not support item assignment" + % self.__class__.__name__ + ) + result = super( + StrictOrderingOnAppendListWithFlagsSpecialization, self + ).__setitem__(name, value) + # We may have removed items. + for k in set(self._flags.keys()) - set(self): + del self._flags[k] + if isinstance(value, StrictOrderingOnAppendListWithFlags): + self._update_flags(value) + return result + + def _update_flags(self, other): + if self._flags_type._flags != other._flags_type._flags: + raise ValueError( + "Expected a list of strings with flags like %s, not like %s" + % (self._flags_type._flags, other._flags_type._flags) + ) + intersection = set(self._flags.keys()) & set(other._flags.keys()) + if intersection: + raise ValueError( + "Cannot update flags: both lists of strings with flags configure %s" + % intersection + ) + self._flags.update(other._flags) + + def extend(self, l): + result = super( + StrictOrderingOnAppendListWithFlagsSpecialization, self + ).extend(l) + if isinstance(l, StrictOrderingOnAppendListWithFlags): + self._update_flags(l) + return result + + def __add__(self, other): + result = super( + StrictOrderingOnAppendListWithFlagsSpecialization, self + ).__add__(other) + if isinstance(other, StrictOrderingOnAppendListWithFlags): + # Result has flags from other but not from self, since + # internally we duplicate self and then extend with other, and + # only extend knows about flags. Since we don't allow updating + # when the set of flag keys intersect, which we instance we pass + # to _update_flags here matters. This needs to be correct but + # is an implementation detail. + result._update_flags(self) + return result + + def __iadd__(self, other): + result = super( + StrictOrderingOnAppendListWithFlagsSpecialization, self + ).__iadd__(other) + if isinstance(other, StrictOrderingOnAppendListWithFlags): + self._update_flags(other) + return result + + return StrictOrderingOnAppendListWithFlagsSpecialization + + +class HierarchicalStringList(object): + """A hierarchy of lists of strings. + + Each instance of this object contains a list of strings, which can be set or + appended to. A sub-level of the hierarchy is also an instance of this class, + can be added by appending to an attribute instead. + + For example, the moz.build variable EXPORTS is an instance of this class. We + can do: + + EXPORTS += ['foo.h'] + EXPORTS.mozilla.dom += ['bar.h'] + + In this case, we have 3 instances (EXPORTS, EXPORTS.mozilla, and + EXPORTS.mozilla.dom), and the first and last each have one element in their + list. + """ + + __slots__ = ("_strings", "_children") + + def __init__(self): + # Please change ContextDerivedTypedHierarchicalStringList in context.py + # if you make changes here. + self._strings = StrictOrderingOnAppendList() + self._children = {} + + class StringListAdaptor(collections.abc.Sequence): + def __init__(self, hsl): + self._hsl = hsl + + def __getitem__(self, index): + return self._hsl._strings[index] + + def __len__(self): + return len(self._hsl._strings) + + def walk(self): + """Walk over all HierarchicalStringLists in the hierarchy. + + This is a generator of (path, sequence). + + The path is '' for the root level and '/'-delimited strings for + any descendants. The sequence is a read-only sequence of the + strings contained at that level. + """ + + if self._strings: + path_to_here = "" + yield path_to_here, self.StringListAdaptor(self) + + for k, l in sorted(self._children.items()): + for p, v in l.walk(): + path_to_there = "%s/%s" % (k, p) + yield path_to_there.strip("/"), v + + def __setattr__(self, name, value): + if name in self.__slots__: + return object.__setattr__(self, name, value) + + # __setattr__ can be called with a list when a simple assignment is + # used: + # + # EXPORTS.foo = ['file.h'] + # + # In this case, we need to overwrite foo's current list of strings. + # + # However, __setattr__ is also called with a HierarchicalStringList + # to try to actually set the attribute. We want to ignore this case, + # since we don't actually create an attribute called 'foo', but just add + # it to our list of children (using _get_exportvariable()). + self._set_exportvariable(name, value) + + def __getattr__(self, name): + if name.startswith("__"): + return object.__getattr__(self, name) + return self._get_exportvariable(name) + + def __delattr__(self, name): + raise MozbuildDeletionError("Unable to delete attributes for this object") + + def __iadd__(self, other): + if isinstance(other, HierarchicalStringList): + self._strings += other._strings + for c in other._children: + self[c] += other[c] + else: + self._check_list(other) + self._strings += other + return self + + def __getitem__(self, name): + return self._get_exportvariable(name) + + def __setitem__(self, name, value): + self._set_exportvariable(name, value) + + def _get_exportvariable(self, name): + # Please change ContextDerivedTypedHierarchicalStringList in context.py + # if you make changes here. + child = self._children.get(name) + if not child: + child = self._children[name] = HierarchicalStringList() + return child + + def _set_exportvariable(self, name, value): + if name in self._children: + if value is self._get_exportvariable(name): + return + raise KeyError("global_ns", "reassign", ".%s" % name) + + exports = self._get_exportvariable(name) + exports._check_list(value) + exports._strings += value + + def _check_list(self, value): + if not isinstance(value, list): + raise ValueError("Expected a list of strings, not %s" % type(value)) + for v in value: + if not isinstance(v, six.string_types): + raise ValueError( + "Expected a list of strings, not an element of %s" % type(v) + ) + + +class LockFile(object): + """LockFile is used by the lock_file method to hold the lock. + + This object should not be used directly, but only through + the lock_file method below. + """ + + def __init__(self, lockfile): + self.lockfile = lockfile + + def __del__(self): + while True: + try: + os.remove(self.lockfile) + break + except OSError as e: + if e.errno == errno.EACCES: + # Another process probably has the file open, we'll retry. + # Just a short sleep since we want to drop the lock ASAP + # (but we need to let some other process close the file + # first). + time.sleep(0.1) + else: + # Re-raise unknown errors + raise + + +def lock_file(lockfile, max_wait=600): + """Create and hold a lockfile of the given name, with the given timeout. + + To release the lock, delete the returned object. + """ + + # FUTURE This function and object could be written as a context manager. + + while True: + try: + fd = os.open(lockfile, os.O_EXCL | os.O_RDWR | os.O_CREAT) + # We created the lockfile, so we're the owner + break + except OSError as e: + if e.errno == errno.EEXIST or ( + sys.platform == "win32" and e.errno == errno.EACCES + ): + pass + else: + # Should not occur + raise + + try: + # The lock file exists, try to stat it to get its age + # and read its contents to report the owner PID + f = open(lockfile, "r") + s = os.stat(lockfile) + except EnvironmentError as e: + if e.errno == errno.ENOENT or e.errno == errno.EACCES: + # We didn't create the lockfile, so it did exist, but it's + # gone now. Just try again + continue + + raise Exception( + "{0} exists but stat() failed: {1}".format(lockfile, e.strerror) + ) + + # We didn't create the lockfile and it's still there, check + # its age + now = int(time.time()) + if now - s[stat.ST_MTIME] > max_wait: + pid = f.readline().rstrip() + raise Exception( + "{0} has been locked for more than " + "{1} seconds (PID {2})".format(lockfile, max_wait, pid) + ) + + # It's not been locked too long, wait a while and retry + f.close() + time.sleep(1) + + # if we get here. we have the lockfile. Convert the os.open file + # descriptor into a Python file object and record our PID in it + f = os.fdopen(fd, "w") + f.write("{0}\n".format(os.getpid())) + f.close() + + return LockFile(lockfile) + + +class OrderedDefaultDict(OrderedDict): + """A combination of OrderedDict and defaultdict.""" + + def __init__(self, default_factory, *args, **kwargs): + OrderedDict.__init__(self, *args, **kwargs) + self._default_factory = default_factory + + def __missing__(self, key): + value = self[key] = self._default_factory() + return value + + +class KeyedDefaultDict(dict): + """Like a defaultdict, but the default_factory function takes the key as + argument""" + + def __init__(self, default_factory, *args, **kwargs): + dict.__init__(self, *args, **kwargs) + self._default_factory = default_factory + + def __missing__(self, key): + value = self._default_factory(key) + dict.__setitem__(self, key, value) + return value + + +class ReadOnlyKeyedDefaultDict(KeyedDefaultDict, ReadOnlyDict): + """Like KeyedDefaultDict, but read-only.""" + + +class memoize(dict): + """A decorator to memoize the results of function calls depending + on its arguments. + Both functions and instance methods are handled, although in the + instance method case, the results are cache in the instance itself. + """ + + def __init__(self, func): + self.func = func + functools.update_wrapper(self, func) + + def __call__(self, *args): + if args not in self: + self[args] = self.func(*args) + return self[args] + + def method_call(self, instance, *args): + name = "_%s" % self.func.__name__ + if not hasattr(instance, name): + setattr(instance, name, {}) + cache = getattr(instance, name) + if args not in cache: + cache[args] = self.func(instance, *args) + return cache[args] + + def __get__(self, instance, cls): + return functools.update_wrapper( + functools.partial(self.method_call, instance), self.func + ) + + +class memoized_property(object): + """A specialized version of the memoize decorator that works for + class instance properties. + """ + + def __init__(self, func): + self.func = func + + def __get__(self, instance, cls): + name = "_%s" % self.func.__name__ + if not hasattr(instance, name): + setattr(instance, name, self.func(instance)) + return getattr(instance, name) + + +def TypedNamedTuple(name, fields): + """Factory for named tuple types with strong typing. + + Arguments are an iterable of 2-tuples. The first member is the + the field name. The second member is a type the field will be validated + to be. + + Construction of instances varies from ``collections.namedtuple``. + + First, if a single tuple argument is given to the constructor, this is + treated as the equivalent of passing each tuple value as a separate + argument into __init__. e.g.:: + + t = (1, 2) + TypedTuple(t) == TypedTuple(1, 2) + + This behavior is meant for moz.build files, so vanilla tuples are + automatically cast to typed tuple instances. + + Second, fields in the tuple are validated to be instances of the specified + type. This is done via an ``isinstance()`` check. To allow multiple types, + pass a tuple as the allowed types field. + """ + cls = collections.namedtuple(name, (name for name, typ in fields)) + + class TypedTuple(cls): + __slots__ = () + + def __new__(klass, *args, **kwargs): + if len(args) == 1 and not kwargs and isinstance(args[0], tuple): + args = args[0] + + return super(TypedTuple, klass).__new__(klass, *args, **kwargs) + + def __init__(self, *args, **kwargs): + for i, (fname, ftype) in enumerate(self._fields): + value = self[i] + + if not isinstance(value, ftype): + raise TypeError( + "field in tuple not of proper type: %s; " + "got %s, expected %s" % (fname, type(value), ftype) + ) + + TypedTuple._fields = fields + + return TypedTuple + + +@memoize +def TypedList(type, base_class=List): + """A list with type coercion. + + The given ``type`` is what list elements are being coerced to. It may do + strict validation, throwing ValueError exceptions. + + A ``base_class`` type can be given for more specific uses than a List. For + example, a Typed StrictOrderingOnAppendList can be created with: + + TypedList(unicode, StrictOrderingOnAppendList) + """ + + class _TypedList(base_class): + @staticmethod + def normalize(e): + if not isinstance(e, type): + e = type(e) + return e + + def _ensure_type(self, l): + if isinstance(l, self.__class__): + return l + + return [self.normalize(e) for e in l] + + def __init__(self, iterable=None, **kwargs): + if iterable is None: + iterable = [] + iterable = self._ensure_type(iterable) + + super(_TypedList, self).__init__(iterable, **kwargs) + + def extend(self, l): + l = self._ensure_type(l) + + return super(_TypedList, self).extend(l) + + def __setitem__(self, key, val): + val = self._ensure_type(val) + + return super(_TypedList, self).__setitem__(key, val) + + def __add__(self, other): + other = self._ensure_type(other) + + return super(_TypedList, self).__add__(other) + + def __iadd__(self, other): + other = self._ensure_type(other) + + return super(_TypedList, self).__iadd__(other) + + def append(self, other): + self += [other] + + return _TypedList + + +def group_unified_files(files, unified_prefix, unified_suffix, files_per_unified_file): + """Return an iterator of (unified_filename, source_filenames) tuples. + + We compile most C and C++ files in "unified mode"; instead of compiling + ``a.cpp``, ``b.cpp``, and ``c.cpp`` separately, we compile a single file + that looks approximately like:: + + #include "a.cpp" + #include "b.cpp" + #include "c.cpp" + + This function handles the details of generating names for the unified + files, and determining which original source files go in which unified + file.""" + + # Our last returned list of source filenames may be short, and we + # don't want the fill value inserted by zip_longest to be an + # issue. So we do a little dance to filter it out ourselves. + dummy_fill_value = ("dummy",) + + def filter_out_dummy(iterable): + return six.moves.filter(lambda x: x != dummy_fill_value, iterable) + + # From the itertools documentation, slightly modified: + def grouper(n, iterable): + "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx" + args = [iter(iterable)] * n + return six.moves.zip_longest(fillvalue=dummy_fill_value, *args) + + for i, unified_group in enumerate(grouper(files_per_unified_file, files)): + just_the_filenames = list(filter_out_dummy(unified_group)) + yield "%s%d.%s" % (unified_prefix, i, unified_suffix), just_the_filenames + + +def pair(iterable): + """Given an iterable, returns an iterable pairing its items. + + For example, + list(pair([1,2,3,4,5,6])) + returns + [(1,2), (3,4), (5,6)] + """ + i = iter(iterable) + return six.moves.zip_longest(i, i) + + +def pairwise(iterable): + """Given an iterable, returns an iterable of overlapped pairs of + its items. Based on the Python itertools documentation. + + For example, + list(pairwise([1,2,3,4,5,6])) + returns + [(1,2), (2,3), (3,4), (4,5), (5,6)] + """ + a, b = itertools.tee(iterable) + next(b, None) + return zip(a, b) + + +VARIABLES_RE = re.compile("\$\((\w+)\)") + + +def expand_variables(s, variables): + """Given a string with $(var) variable references, replace those references + with the corresponding entries from the given `variables` dict. + + If a variable value is not a string, it is iterated and its items are + joined with a whitespace.""" + result = "" + for s, name in pair(VARIABLES_RE.split(s)): + result += s + value = variables.get(name) + if not value: + continue + if not isinstance(value, six.string_types): + value = " ".join(value) + result += value + return result + + +class DefinesAction(argparse.Action): + """An ArgumentParser action to handle -Dvar[=value] type of arguments.""" + + def __call__(self, parser, namespace, values, option_string): + defines = getattr(namespace, self.dest) + if defines is None: + defines = {} + values = values.split("=", 1) + if len(values) == 1: + name, value = values[0], 1 + else: + name, value = values + if value.isdigit(): + value = int(value) + defines[name] = value + setattr(namespace, self.dest, defines) + + +class EnumStringComparisonError(Exception): + pass + + +class EnumString(six.text_type): + """A string type that only can have a limited set of values, similarly to + an Enum, and can only be compared against that set of values. + + The class is meant to be subclassed, where the subclass defines + POSSIBLE_VALUES. The `subclass` method is a helper to create such + subclasses. + """ + + POSSIBLE_VALUES = () + + def __init__(self, value): + if value not in self.POSSIBLE_VALUES: + raise ValueError( + "'%s' is not a valid value for %s" % (value, self.__class__.__name__) + ) + + def __eq__(self, other): + if other not in self.POSSIBLE_VALUES: + raise EnumStringComparisonError( + "Can only compare with %s" + % ", ".join("'%s'" % v for v in self.POSSIBLE_VALUES) + ) + return super(EnumString, self).__eq__(other) + + def __ne__(self, other): + return not (self == other) + + def __hash__(self): + return super(EnumString, self).__hash__() + + @staticmethod + def subclass(*possible_values): + class EnumStringSubclass(EnumString): + POSSIBLE_VALUES = possible_values + + return EnumStringSubclass + + +def _escape_char(c): + # str.encode('unicode_espace') doesn't escape quotes, presumably because + # quoting could be done with either ' or ". + if c == "'": + return "\\'" + return six.text_type(c.encode("unicode_escape")) + + +def ensure_bytes(value, encoding="utf-8"): + if isinstance(value, six.text_type): + return value.encode(encoding) + return value + + +def ensure_unicode(value, encoding="utf-8"): + if isinstance(value, six.binary_type): + return value.decode(encoding) + return value + + +def process_time(): + if six.PY2: + return time.clock() + else: + return time.process_time() + + +def hexdump(buf): + """ + Returns a list of hexdump-like lines corresponding to the given input buffer. + """ + assert six.PY3 + off_format = "%0{}x ".format(len(str(len(buf)))) + lines = [] + for off in range(0, len(buf), 16): + line = off_format % off + chunk = buf[off : min(off + 16, len(buf))] + for n, byte in enumerate(chunk): + line += " %02x" % byte + if n == 7: + line += " " + for n in range(len(chunk), 16): + line += " " + if n == 7: + line += " " + line += " |" + for byte in chunk: + if byte < 127 and byte >= 32: + line += chr(byte) + else: + line += "." + for n in range(len(chunk), 16): + line += " " + line += "|\n" + lines.append(line) + return lines + + +def mozilla_build_version(): + mozilla_build = os.environ.get("MOZILLABUILD") + + version_file = Path(mozilla_build) / "VERSION" + + assert version_file.exists(), ( + f'The MozillaBuild VERSION file was not found at "{version_file}".\n' + "Please check if MozillaBuild is installed correctly and that the" + "`MOZILLABUILD` environment variable is to the correct path." + ) + + with version_file.open() as file: + return Version(file.readline().rstrip("\n")) diff --git a/python/mozbuild/mozbuild/vendor/__init__.py b/python/mozbuild/mozbuild/vendor/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozbuild/vendor/host_angle.py b/python/mozbuild/mozbuild/vendor/host_angle.py new file mode 100644 index 0000000000..9716c76a24 --- /dev/null +++ b/python/mozbuild/mozbuild/vendor/host_angle.py @@ -0,0 +1,37 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, # You can obtain one at http://mozilla.org/MPL/2.0/. + +import requests + +from mozbuild.vendor.host_base import BaseHost + + +class AngleHost(BaseHost): + def upstream_commit(self, revision): + raise Exception("Should not be called") + + def upstream_tag(self, revision): + data = requests.get("https://omahaproxy.appspot.com/all.json").json() + + for row in data: + if row["os"] == "win64": + for version in row["versions"]: + if version["channel"] == "beta": + branch = "chromium/" + version["true_branch"] + + if revision != "HEAD" and revision != branch: + raise Exception( + "Passing a --revision for Angle that is not HEAD " + + "or the true branch is not supported." + ) + + return ( + branch, + version["current_reldate"], + ) + + raise Exception("Could not find win64 beta version in the JSON response") + + def upstream_snapshot(self, revision): + raise Exception("Not supported for Angle") diff --git a/python/mozbuild/mozbuild/vendor/host_base.py b/python/mozbuild/mozbuild/vendor/host_base.py new file mode 100644 index 0000000000..2484d82e09 --- /dev/null +++ b/python/mozbuild/mozbuild/vendor/host_base.py @@ -0,0 +1,77 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, # You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import subprocess +import tempfile +import urllib + + +class BaseHost: + def __init__(self, manifest): + self.manifest = manifest + self.repo_url = urllib.parse.urlparse(self.manifest["vendoring"]["url"]) + + def upstream_tag(self, revision): + """Temporarily clone the repo to get the latest tag and timestamp""" + with tempfile.TemporaryDirectory() as temp_repo_clone: + starting_directory = os.getcwd() + os.chdir(temp_repo_clone) + subprocess.run( + [ + "git", + "clone", + "-c", + "core.autocrlf=input", + self.manifest["vendoring"]["url"], + self.manifest["origin"]["name"], + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + check=True, + ) + os.chdir("/".join([temp_repo_clone, self.manifest["origin"]["name"]])) + if revision == "HEAD": + tag = subprocess.run( + ["git", "--no-pager", "tag", "--sort=creatordate"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + check=True, + ).stdout.splitlines()[-1] + else: + try: + tag = subprocess.run( + ["git", "--no-pager", "tag", "-l", revision], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + check=True, + ).stdout.splitlines()[-1] + except IndexError: # 0 lines of output, the tag does not exist + raise Exception(f"Requested tag {revision} not found in source.") + + tag_timestamp = subprocess.run( + [ + "git", + "log", + "-1", + "--date=iso8601-strict", + "--format=%ad", + tag, + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + check=True, + ).stdout.splitlines()[-1] + os.chdir(starting_directory) + return tag, tag_timestamp + + def upstream_snapshot(self, revision): + raise Exception("Unimplemented for this subclass...") + + def upstream_path_to_file(self, revision, filepath): + raise Exception("Unimplemented for this subclass...") diff --git a/python/mozbuild/mozbuild/vendor/host_codeberg.py b/python/mozbuild/mozbuild/vendor/host_codeberg.py new file mode 100644 index 0000000000..158dd0472d --- /dev/null +++ b/python/mozbuild/mozbuild/vendor/host_codeberg.py @@ -0,0 +1,28 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, # You can obtain one at http://mozilla.org/MPL/2.0/. + +import requests + +from mozbuild.vendor.host_base import BaseHost + + +class CodebergHost(BaseHost): + def upstream_commit(self, revision): + """Query the codeberg api for a git commit id and timestamp.""" + codeberg_api = ( + self.repo_url.scheme + "://" + self.repo_url.netloc + "/api/v1/repos/" + ) + codeberg_api += self.repo_url.path[1:] + codeberg_api += "/git/commits" + req = requests.get("/".join([codeberg_api, revision])) + req.raise_for_status() + info = req.json() + return (info["sha"], info["created"]) + + def upstream_snapshot(self, revision): + codeberg_api = ( + self.repo_url.scheme + "://" + self.repo_url.netloc + "/api/v1/repos/" + ) + codeberg_api += self.repo_url.path[1:] + return "/".join([codeberg_api, "archive", revision + ".tar.gz"]) diff --git a/python/mozbuild/mozbuild/vendor/host_github.py b/python/mozbuild/mozbuild/vendor/host_github.py new file mode 100644 index 0000000000..eeaa4b9eaf --- /dev/null +++ b/python/mozbuild/mozbuild/vendor/host_github.py @@ -0,0 +1,27 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, # You can obtain one at http://mozilla.org/MPL/2.0/. + +import requests + +from mozbuild.vendor.host_base import BaseHost + + +class GitHubHost(BaseHost): + def upstream_commit(self, revision): + """Query the github api for a git commit id and timestamp.""" + github_api = "https://api.github.com" + repo = self.repo_url.path[1:].strip("/") + req = requests.get("/".join([github_api, "repos", repo, "commits", revision])) + req.raise_for_status() + info = req.json() + return (info["sha"], info["commit"]["committer"]["date"]) + + def upstream_snapshot(self, revision): + return "/".join( + [self.manifest["vendoring"]["url"], "archive", revision + ".tar.gz"] + ) + + def upstream_path_to_file(self, revision, filepath): + repo = self.repo_url.path[1:] + return "/".join(["https://raw.githubusercontent.com", repo, revision, filepath]) diff --git a/python/mozbuild/mozbuild/vendor/host_gitlab.py b/python/mozbuild/mozbuild/vendor/host_gitlab.py new file mode 100644 index 0000000000..8bfc3ddc79 --- /dev/null +++ b/python/mozbuild/mozbuild/vendor/host_gitlab.py @@ -0,0 +1,26 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, # You can obtain one at http://mozilla.org/MPL/2.0/. + +import requests + +from mozbuild.vendor.host_base import BaseHost + + +class GitLabHost(BaseHost): + def upstream_commit(self, revision): + """Query the gitlab api for a git commit id and timestamp.""" + gitlab_api = ( + self.repo_url.scheme + "://" + self.repo_url.netloc + "/api/v4/projects/" + ) + gitlab_api += self.repo_url.path[1:].replace("/", "%2F") + gitlab_api += "/repository/commits" + req = requests.get("/".join([gitlab_api, revision])) + req.raise_for_status() + info = req.json() + return (info["id"], info["committed_date"]) + + def upstream_snapshot(self, revision): + return "/".join( + [self.manifest["vendoring"]["url"], "-", "archive", revision + ".tar.gz"] + ) diff --git a/python/mozbuild/mozbuild/vendor/host_googlesource.py b/python/mozbuild/mozbuild/vendor/host_googlesource.py new file mode 100644 index 0000000000..c903bd99b5 --- /dev/null +++ b/python/mozbuild/mozbuild/vendor/host_googlesource.py @@ -0,0 +1,32 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, # You can obtain one at http://mozilla.org/MPL/2.0/. + +import requests + +from mozbuild.vendor.host_base import BaseHost + + +class GoogleSourceHost(BaseHost): + def upstream_commit(self, revision): + """Query for a git commit and timestamp.""" + url = "/".join( + [self.manifest["vendoring"]["url"], "+", revision + "?format=JSON"] + ) + req = requests.get(url) + req.raise_for_status() + try: + info = req.json() + except ValueError: + # As of 2017 May, googlesource sends 4 garbage characters + # at the beginning of the json response. Work around this. + # https://bugs.chromium.org/p/chromium/issues/detail?id=718550 + import json + + info = json.loads(req.text[4:]) + return (info["commit"], info["committer"]["time"]) + + def upstream_snapshot(self, revision): + return "/".join( + [self.manifest["vendoring"]["url"], "+archive", revision + ".tar.gz"] + ) diff --git a/python/mozbuild/mozbuild/vendor/mach_commands.py b/python/mozbuild/mozbuild/vendor/mach_commands.py new file mode 100644 index 0000000000..30fb0e16a5 --- /dev/null +++ b/python/mozbuild/mozbuild/vendor/mach_commands.py @@ -0,0 +1,232 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, # You can obtain one at http://mozilla.org/MPL/2.0/. + +import logging +import sys + +from mach.decorators import Command, CommandArgument, SubCommand + +from mozbuild.vendor.moz_yaml import MozYamlVerifyError, load_moz_yaml + + +# Fun quirk of ./mach - you can specify a default argument as well as subcommands. +# If the default argument matches a subcommand, the subcommand gets called. If it +# doesn't, we wind up in the default command. +@Command( + "vendor", + category="misc", + description="Vendor third-party dependencies into the source repository.", +) +@CommandArgument( + "--check-for-update", + action="store_true", + help="For scripted use, prints the new commit to update to, or nothing if up to date.", + default=False, +) +@CommandArgument( + "--add-to-exports", + action="store_true", + help="Will attempt to add new header files into any relevant EXPORTS block.", + default=False, +) +@CommandArgument( + "--ignore-modified", + action="store_true", + help="Ignore modified files in current checkout.", + default=False, +) +@CommandArgument("-r", "--revision", help="Repository tag or commit to update to.") +@CommandArgument( + "-f", + "--force", + action="store_true", + help="Force a re-vendor even if we're up to date", +) +@CommandArgument( + "--verify", "-v", action="store_true", help="(Only) verify the manifest." +) +@CommandArgument( + "--patch-mode", + help="Select how vendored patches will be imported. 'none' skips patch import, and" + "'only' imports patches and skips library vendoring.", + default="", +) +@CommandArgument("library", nargs=1, help="The moz.yaml file of the library to vendor.") +def vendor( + command_context, + library, + revision, + ignore_modified=False, + check_for_update=False, + add_to_exports=False, + force=False, + verify=False, + patch_mode="", +): + """ + Vendor third-party dependencies into the source repository. + + Vendoring rust and python can be done with ./mach vendor [rust/python]. + Vendoring other libraries can be done with ./mach vendor [arguments] path/to/file.yaml + """ + library = library[0] + assert library not in ["rust", "python"] + + command_context.populate_logger() + command_context.log_manager.enable_unstructured() + if check_for_update: + logging.disable(level=logging.CRITICAL) + + try: + manifest = load_moz_yaml(library) + if verify: + print("%s: OK" % library) + sys.exit(0) + except MozYamlVerifyError as e: + print(e) + sys.exit(1) + + if "vendoring" not in manifest: + raise Exception( + "Cannot perform update actions if we don't have a 'vendoring' section in the moz.yaml" + ) + + if patch_mode and patch_mode not in ["none", "only"]: + print( + "Unknown patch mode given '%s'. Please use one of: 'none' or 'only'." + % patch_mode + ) + sys.exit(1) + if ( + manifest["vendoring"].get("patches", []) + and not patch_mode + and not check_for_update + ): + print( + "Patch mode was not given when required. Please use one of: 'none' or 'only'" + ) + sys.exit(1) + if patch_mode == "only" and not manifest["vendoring"].get("patches", []): + print( + "Patch import was specified for %s but there are no vendored patches defined." + % library + ) + sys.exit(1) + + if not ignore_modified and not check_for_update: + check_modified_files(command_context) + elif ignore_modified and not check_for_update: + print( + "Because you passed --ignore-modified we will not be " + + "able to detect spurious upstream updates." + ) + + if not revision: + revision = "HEAD" + + from mozbuild.vendor.vendor_manifest import VendorManifest + + vendor_command = command_context._spawn(VendorManifest) + vendor_command.vendor( + command_context, + library, + manifest, + revision, + ignore_modified, + check_for_update, + force, + add_to_exports, + patch_mode, + ) + + sys.exit(0) + + +def check_modified_files(command_context): + """ + Ensure that there aren't any uncommitted changes to files + in the working copy, since we're going to change some state + on the user. + """ + modified = command_context.repository.get_changed_files("M") + if modified: + command_context.log( + logging.ERROR, + "modified_files", + {}, + """You have uncommitted changes to the following files: + +{files} + +Please commit or stash these changes before vendoring, or re-run with `--ignore-modified`. +""".format( + files="\n".join(sorted(modified)) + ), + ) + sys.exit(1) + + +# ===================================================================== + + +@SubCommand( + "vendor", + "rust", + description="Vendor rust crates from crates.io into third_party/rust", +) +@CommandArgument( + "--ignore-modified", + action="store_true", + help="Ignore modified files in current checkout", + default=False, +) +@CommandArgument( + "--build-peers-said-large-imports-were-ok", + action="store_true", + help=( + "Permit overly-large files to be added to the repository. " + "To get permission to set this, raise a question in the #build " + "channel at https://chat.mozilla.org." + ), + default=False, +) +@CommandArgument( + "--issues-json", + help="Path to a code-review issues.json file to write out", +) +def vendor_rust(command_context, **kwargs): + from mozbuild.vendor.vendor_rust import VendorRust + + vendor_command = command_context._spawn(VendorRust) + issues_json = kwargs.pop("issues_json", None) + ok = vendor_command.vendor(**kwargs) + if issues_json: + with open(issues_json, "w") as fh: + fh.write(vendor_command.serialize_issues_json()) + sys.exit(0 if ok else 1) + + +# ===================================================================== + + +@SubCommand( + "vendor", + "python", + description="Vendor Python packages from pypi.org into third_party/python. " + "Some extra files like docs and tests will automatically be excluded." + "Installs the packages listed in third_party/python/requirements.in and " + "their dependencies.", + virtualenv_name="vendor", +) +@CommandArgument( + "--keep-extra-files", + action="store_true", + default=False, + help="Keep all files, including tests and documentation.", +) +def vendor_python(command_context, keep_extra_files): + from mozbuild.vendor.vendor_python import VendorPython + + vendor_command = command_context._spawn(VendorPython) + vendor_command.vendor(keep_extra_files) diff --git a/python/mozbuild/mozbuild/vendor/moz.build b/python/mozbuild/mozbuild/vendor/moz.build new file mode 100644 index 0000000000..315dc32600 --- /dev/null +++ b/python/mozbuild/mozbuild/vendor/moz.build @@ -0,0 +1,8 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +with Files("**"): + BUG_COMPONENT = ("Developer Infrastructure", "Mach Vendor & Updatebot") diff --git a/python/mozbuild/mozbuild/vendor/moz_yaml.py b/python/mozbuild/mozbuild/vendor/moz_yaml.py new file mode 100644 index 0000000000..51210e19b2 --- /dev/null +++ b/python/mozbuild/mozbuild/vendor/moz_yaml.py @@ -0,0 +1,770 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, # You can obtain one at http://mozilla.org/MPL/2.0/. + +# Utility package for working with moz.yaml files. +# +# Requires `pyyaml` and `voluptuous` +# (both are in-tree under third_party/python) + +import errno +import os +import re + +import voluptuous +import yaml +from voluptuous import ( + All, + Boolean, + FqdnUrl, + In, + Invalid, + Length, + Match, + Msg, + Required, + Schema, + Unique, +) +from yaml.error import MarkedYAMLError + +# TODO ensure this matches the approved list of licenses +VALID_LICENSES = [ + # Standard Licenses (as per https://spdx.org/licenses/) + "Apache-2.0", + "BSD-2-Clause", + "BSD-3-Clause", + "BSD-3-Clause-Clear", + "BSL-1.0", + "CC0-1.0", + "ISC", + "ICU", + "LGPL-2.1", + "LGPL-3.0", + "MIT", + "MPL-1.1", + "MPL-2.0", + "Unlicense", + "WTFPL", + "Zlib", + # Unique Licenses + "ACE", # http://www.cs.wustl.edu/~schmidt/ACE-copying.html + "Anti-Grain-Geometry", # http://www.antigrain.com/license/index.html + "JPNIC", # https://www.nic.ad.jp/ja/idn/idnkit/download/index.html + "Khronos", # https://www.khronos.org/openmaxdl + "libpng", # http://www.libpng.org/pub/png/src/libpng-LICENSE.txt + "Unicode", # http://www.unicode.org/copyright.html +] + +VALID_SOURCE_HOSTS = ["gitlab", "googlesource", "github", "angle", "codeberg"] + +""" +--- +# Third-Party Library Template +# All fields are mandatory unless otherwise noted + +# Version of this schema +schema: 1 + +bugzilla: + # Bugzilla product and component for this directory and subdirectories + product: product name + component: component name + +# Document the source of externally hosted code +origin: + + # Short name of the package/library + name: name of the package + + description: short (one line) description + + # Full URL for the package's homepage/etc + # Usually different from repository url + url: package's homepage url + + # Human-readable identifier for this version/release + # Generally "version NNN", "tag SSS", "bookmark SSS" + release: identifier + + # Revision to pull in + # Must be a long or short commit SHA (long preferred) + revision: sha + + # The package's license, where possible using the mnemonic from + # https://spdx.org/licenses/ + # Multiple licenses can be specified (as a YAML list) + # A "LICENSE" file must exist containing the full license text + license: MPL-2.0 + + # If the package's license is specified in a particular file, + # this is the name of the file. + # optional + license-file: COPYING + + # If there are any mozilla-specific notes you want to put + # about a library, they can be put here. + notes: Notes about the library + +# Configuration for the automated vendoring system. +# optional +vendoring: + + # Repository URL to vendor from + # eg. https://github.com/kinetiknz/nestegg + # Any repository host can be specified here, however initially we'll only + # support automated vendoring from selected sources. + url: source url (generally repository clone url) + + # Type of hosting for the upstream repository + # Valid values are 'gitlab', 'github', googlesource + source-hosting: gitlab + + # Type of Vendoring + # This is either 'regular', 'individual-files', or 'rust' + # If omitted, will default to 'regular' + flavor: rust + + # Type of git reference (commit, tag) to track updates from. + # You cannot use tag tracking with the individual-files flavor + # If omitted, will default to tracking commits. + tracking: commit + + # Base directory of the location where the source files will live in-tree. + # If omitted, will default to the location the moz.yaml file is in. + vendor-directory: third_party/directory + + # Allows skipping certain steps of the vendoring process. + # Most useful if e.g. vendoring upstream is complicated and should be done by a script + # The valid steps that can be skipped are listed below + skip-vendoring-steps: + - fetch + - keep + - include + - exclude + - move-contents + - hg-add + - spurious-check + - update-moz-yaml + - update-moz-build + + # List of patch files to apply after vendoring. Applied in the order + # specified, and alphabetically if globbing is used. Patches must apply + # cleanly before changes are pushed. + # Patch files should be relative to the vendor-directory rather than the gecko + # root directory. + # All patch files are implicitly added to the keep file list. + # optional + patches: + - file + - path/to/file + - path/*.patch + - path/** # Captures all files and subdirectories below path + - path/* # Captures all files but _not_ subdirectories below path. Equivalent to `path/` + + # List of files that are not removed from the destination directory while vendoring + # in a new version of the library. Intended for mozilla files not present in upstream. + # Implicitly contains "moz.yaml", "moz.build", and any files referenced in + # "patches" + # optional + keep: + - file + - path/to/file + - another/path + - *.mozilla + + # Files/paths that will not be vendored from the upstream repository + # Implicitly contains ".git", and ".gitignore" + # optional + exclude: + - file + - path/to/file + - another/path + - docs + - src/*.test + + # Files/paths that will always be vendored from source repository, even if + # they would otherwise be excluded by "exclude". + # optional + include: + - file + - path/to/file + - another/path + - docs/LICENSE.* + + # Files that are modified as part of the update process. + # To avoid creating updates that don't update anything, ./mach vendor will detect + # if any in-tree files have changed. If there are files that are always changed + # during an update process (e.g. version numbers or source revisions), list them + # here to avoid having them counted as substative changes. + # This field does NOT support directories or globbing + # optional + generated: + - '{yaml_dir}/vcs_version.h' + + # If neither "exclude" or "include" are set, all files will be vendored + # Files/paths in "include" will always be vendored, even if excluded + # eg. excluding "docs/" then including "docs/LICENSE" will vendor just the + # LICENSE file from the docs directory + + # All three file/path parameters ("keep", "exclude", and "include") support + # filenames, directory names, and globs/wildcards. + + # Actions to take after updating. Applied in order. + # The action subfield is required. It must be one of: + # - copy-file + # - move-file + # - move-dir + # - replace-in-file + # - replace-in-file-regex + # - delete-path + # - run-script + # Unless otherwise noted, all subfields of action are required. + # + # If the action is copy-file, move-file, or move-dir: + # from is the source file + # to is the destination + # + # If the action is replace-in-file or replace-in-file-regex: + # pattern is what in the file to search for. It is an exact strng match. + # with is the string to replace it with. Accepts the special keyword + # '{revision}' for the commit we are updating to. + # File is the file to replace it in. + # + # If the action is delete-path + # path is the file or directory to recursively delete + # + # If the action is run-script: + # script is the script to run + # cwd is the directory the script should run with as its cwd + # args is a list of arguments to pass to the script + # + # If the action is run-command: + # command is the command to run + # Unlike run-script, `command` is _not_ processed to be relative + # to the vendor directory, and is passed directly to python's + # execution code without any path substitution or manipulation + # cwd is the directory the command should run with as its cwd + # args is a list of arguments to pass to the command + # + # + # Unless specified otherwise, all files/directories are relative to the + # vendor-directory. If the vendor-directory is different from the + # directory of the yaml file, the keyword '{yaml_dir}' may be used + # to make the path relative to that directory. + # 'run-script' supports the addictional keyword {cwd} which, if used, + # must only be used at the beginning of the path. + # + # optional + update-actions: + - action: copy-file + from: include/vcs_version.h.in + to: '{yaml_dir}/vcs_version.h' + + - action: replace-in-file + pattern: '@VCS_TAG@' + with: '{revision}' + file: '{yaml_dir}/vcs_version.h' + + - action: delete-path + path: '{yaml_dir}/config' + + - action: run-script + script: '{cwd}/generate_sources.sh' + cwd: '{yaml_dir}' + + +# Configuration for automatic updating system. +# optional +updatebot: + + # TODO: allow multiple users to be specified + # Phabricator username for a maintainer of the library, used for assigning + # reviewers. For a review group, preface with #, such as "#build"" + maintainer-phab: tjr + + # Bugzilla email address for a maintainer of the library, used for needinfos + maintainer-bz: tom@mozilla.com + + # Optional: A preset for ./mach try to use. If present, fuzzy-query and fuzzy-paths will + # be ignored. If it, fuzzy-query, and fuzzy-path are omitted, ./mach try auto will be used + try-preset: media + + # Optional: A query string for ./mach try fuzzy. If try-preset, it and fuzzy-paths are omitted + # then ./mach try auto will be used + fuzzy-query: media + + # Optional: An array of test paths for ./mach try fuzzy. If try-preset, it and fuzzy-query are + # omitted then ./mach try auto will be used + fuzzy-paths: ['media'] + + # The tasks that Updatebot can run. Only one of each task is currently permitted + # optional + tasks: + - type: commit-alert + branch: upstream-branch-name + cc: ["bugzilla@email.address", "another@example.com"] + needinfo: ["bugzilla@email.address", "another@example.com"] + enabled: True + filter: security + frequency: every + platform: windows + blocking: 1234 + - type: vendoring + branch: master + enabled: False + + # frequency can be 'every', 'release', 'N weeks', 'N commits' + # or 'N weeks, M commits' requiring satisfying both constraints. + frequency: 2 weeks +""" + +RE_SECTION = re.compile(r"^(\S[^:]*):").search +RE_FIELD = re.compile(r"^\s\s([^:]+):\s+(\S+)$").search + + +class MozYamlVerifyError(Exception): + def __init__(self, filename, error): + self.filename = filename + self.error = error + + def __str__(self): + return "%s: %s" % (self.filename, self.error) + + +def load_moz_yaml(filename, verify=True, require_license_file=True): + """Loads and verifies the specified manifest.""" + + # Load and parse YAML. + try: + with open(filename, "r") as f: + manifest = yaml.load(f, Loader=yaml.BaseLoader) + except IOError as e: + if e.errno == errno.ENOENT: + raise MozYamlVerifyError(filename, "Failed to find manifest: %s" % filename) + raise + except MarkedYAMLError as e: + raise MozYamlVerifyError(filename, e) + + if not verify: + return manifest + + # Verify schema. + if "schema" not in manifest: + raise MozYamlVerifyError(filename, 'Missing manifest "schema"') + if manifest["schema"] == "1": + schema = _schema_1() + schema_additional = _schema_1_additional + schema_transform = _schema_1_transform + else: + raise MozYamlVerifyError(filename, "Unsupported manifest schema") + + try: + schema(manifest) + schema_additional(filename, manifest, require_license_file=require_license_file) + manifest = schema_transform(manifest) + except (voluptuous.Error, ValueError) as e: + raise MozYamlVerifyError(filename, e) + + return manifest + + +def _schema_1(): + """Returns Voluptuous Schema object.""" + return Schema( + { + Required("schema"): "1", + Required("bugzilla"): { + Required("product"): All(str, Length(min=1)), + Required("component"): All(str, Length(min=1)), + }, + "origin": { + Required("name"): All(str, Length(min=1)), + Required("description"): All(str, Length(min=1)), + "notes": All(str, Length(min=1)), + Required("url"): FqdnUrl(), + Required("license"): Msg(License(), msg="Unsupported License"), + "license-file": All(str, Length(min=1)), + Required("release"): All(str, Length(min=1)), + # The following regex defines a valid git reference + # The first group [^ ~^:?*[\]] matches 0 or more times anything + # that isn't a Space, ~, ^, :, ?, *, or ] + # The second group [^ ~^:?*[\]\.]+ matches 1 or more times + # anything that isn't a Space, ~, ^, :, ?, *, [, ], or . + "revision": Match(r"^[^ ~^:?*[\]]*[^ ~^:?*[\]\.]+$"), + }, + "updatebot": { + Required("maintainer-phab"): All(str, Length(min=1)), + Required("maintainer-bz"): All(str, Length(min=1)), + "try-preset": All(str, Length(min=1)), + "fuzzy-query": All(str, Length(min=1)), + "fuzzy-paths": All([str], Length(min=1)), + "tasks": All( + UpdatebotTasks(), + [ + { + Required("type"): In( + ["vendoring", "commit-alert"], + msg="Invalid type specified in tasks", + ), + "branch": All(str, Length(min=1)), + "enabled": Boolean(), + "cc": Unique([str]), + "needinfo": Unique([str]), + "filter": In( + ["none", "security", "source-extensions"], + msg="Invalid filter value specified in tasks", + ), + "source-extensions": Unique([str]), + "blocking": Match(r"^[0-9]+$"), + "frequency": Match( + r"^(every|release|[1-9][0-9]* weeks?|[1-9][0-9]* commits?|" + + r"[1-9][0-9]* weeks?, ?[1-9][0-9]* commits?)$" + ), + "platform": Match(r"^(windows|linux)$"), + } + ], + ), + }, + "vendoring": { + Required("url"): FqdnUrl(), + Required("source-hosting"): All( + str, + Length(min=1), + In(VALID_SOURCE_HOSTS, msg="Unsupported Source Hosting"), + ), + "tracking": Match(r"^(commit|tag)$"), + "flavor": Match(r"^(regular|rust|individual-files)$"), + "skip-vendoring-steps": Unique([str]), + "vendor-directory": All(str, Length(min=1)), + "patches": Unique([str]), + "keep": Unique([str]), + "exclude": Unique([str]), + "include": Unique([str]), + "generated": Unique([str]), + "individual-files": [ + { + Required("upstream"): All(str, Length(min=1)), + Required("destination"): All(str, Length(min=1)), + } + ], + "individual-files-default-upstream": All(str, Length(min=1)), + "individual-files-default-destination": All(str, Length(min=1)), + "individual-files-list": Unique([str]), + "update-actions": All( + UpdateActions(), + [ + { + Required("action"): In( + [ + "copy-file", + "move-file", + "move-dir", + "replace-in-file", + "replace-in-file-regex", + "run-script", + "run-command", + "delete-path", + ], + msg="Invalid action specified in update-actions", + ), + "from": All(str, Length(min=1)), + "to": All(str, Length(min=1)), + "pattern": All(str, Length(min=1)), + "with": All(str, Length(min=1)), + "file": All(str, Length(min=1)), + "script": All(str, Length(min=1)), + "command": All(str, Length(min=1)), + "args": All([All(str, Length(min=1))]), + "cwd": All(str, Length(min=1)), + "path": All(str, Length(min=1)), + } + ], + ), + }, + } + ) + + +def _schema_1_additional(filename, manifest, require_license_file=True): + """Additional schema/validity checks""" + + vendor_directory = os.path.dirname(filename) + if "vendoring" in manifest and "vendor-directory" in manifest["vendoring"]: + vendor_directory = manifest["vendoring"]["vendor-directory"] + + # LICENSE file must exist, except for Rust crates which are exempted + # because the license is required to be specified in the Cargo.toml file + if require_license_file and "origin" in manifest: + files = [f.lower() for f in os.listdir(vendor_directory)] + if ( + not ( + "license-file" in manifest["origin"] + and manifest["origin"]["license-file"].lower() in files + ) + and not ( + "license" in files + or "license.txt" in files + or "license.rst" in files + or "license.html" in files + or "license.md" in files + ) + and not ( + "vendoring" in manifest + and manifest["vendoring"].get("flavor", "regular") == "rust" + ) + ): + license = manifest["origin"]["license"] + if isinstance(license, list): + license = "/".join(license) + raise ValueError("Failed to find %s LICENSE file" % license) + + # Cannot vendor without an origin. + if "vendoring" in manifest and "origin" not in manifest: + raise ValueError('"vendoring" requires an "origin"') + + # Cannot vendor without a computer-readable revision. + if "vendoring" in manifest and "revision" not in manifest["origin"]: + raise ValueError( + 'If "vendoring" is present, "revision" must be present in "origin"' + ) + + # The Rust and Individual Flavor type precludes a lot of options + # individual-files could, in theory, use several of these, but until we have a use case let's + # disallow them so we're not worrying about whether they work. When we need them we can make + # sure they do. + if ( + "vendoring" in manifest + and manifest["vendoring"].get("flavor", "regular") != "regular" + ): + for i in [ + "skip-vendoring-steps", + "keep", + "exclude", + "include", + "generated", + ]: + if i in manifest["vendoring"]: + raise ValueError("A non-regular flavor of update cannot use '%s'" % i) + + if manifest["vendoring"].get("flavor", "regular") == "rust": + for i in [ + "update-actions", + ]: + if i in manifest["vendoring"]: + raise ValueError("A rust flavor of update cannot use '%s'" % i) + + # Ensure that only individual-files flavor uses those options + if ( + "vendoring" in manifest + and manifest["vendoring"].get("flavor", "regular") != "individual-files" + ): + if ( + "individual-files" in manifest["vendoring"] + or "individual-files-list" in manifest["vendoring"] + ): + raise ValueError( + "Only individual-files flavor of update can use 'individual-files'" + ) + + # Ensure that the individual-files flavor has all the correct options + if ( + "vendoring" in manifest + and manifest["vendoring"].get("flavor", "regular") == "individual-files" + ): + # Because the only way we can determine the latest tag is by doing a local clone, + # we don't want to do that for individual-files flavors because those flavors are + # usually on gigantic repos we don't want to clone for such a simple thing. + if manifest["vendoring"].get("tracking", "commit") == "tag": + raise ValueError( + "You cannot use tag tracking with the individual-files flavor. (Sorry.)" + ) + + # We need either individual-files or individual-files-list + if ( + "individual-files" not in manifest["vendoring"] + and "individual-files-list" not in manifest["vendoring"] + ): + raise ValueError( + "The individual-files flavor must include either " + + "'individual-files' or 'individual-files-list'" + ) + # For whichever we have, make sure we don't have the other and we don't have + # options we shouldn't or lack ones we should. + if "individual-files" in manifest["vendoring"]: + if "individual-files-list" in manifest["vendoring"]: + raise ValueError( + "individual-files-list is mutually exclusive with individual-files" + ) + if "individual-files-default-upstream" in manifest["vendoring"]: + raise ValueError( + "individual-files-default-upstream can only be used with individual-files-list" + ) + if "individual-files-default-destination" in manifest["vendoring"]: + raise ValueError( + "individual-files-default-destination can only be used " + + "with individual-files-list" + ) + if "individual-files-list" in manifest["vendoring"]: + if "individual-files" in manifest["vendoring"]: + raise ValueError( + "individual-files is mutually exclusive with individual-files-list" + ) + if "individual-files-default-upstream" not in manifest["vendoring"]: + raise ValueError( + "individual-files-default-upstream must be used with individual-files-list" + ) + if "individual-files-default-destination" not in manifest["vendoring"]: + raise ValueError( + "individual-files-default-destination must be used with individual-files-list" + ) + + if "updatebot" in manifest: + # If there are Updatebot tasks, then certain fields must be present and + # defaults need to be set. + if "tasks" in manifest["updatebot"]: + if "vendoring" not in manifest or "url" not in manifest["vendoring"]: + raise ValueError( + "If Updatebot tasks are specified, a vendoring url must be included." + ) + + if "try-preset" in manifest["updatebot"]: + for f in ["fuzzy-query", "fuzzy-paths"]: + if f in manifest["updatebot"]: + raise ValueError( + "If 'try-preset' is specified, then %s cannot be" % f + ) + + # Check for a simple YAML file + with open(filename, "r") as f: + has_schema = False + for line in f.readlines(): + m = RE_SECTION(line) + if m: + if m.group(1) == "schema": + has_schema = True + break + if not has_schema: + raise ValueError("Not simple YAML") + + +# Do type conversion for the few things that need it. +# Everythig is parsed as a string to (a) not cause problems with revisions that +# are only numerals and (b) not strip leading zeros from the numbers if we just +# converted them to string +def _schema_1_transform(manifest): + if "updatebot" in manifest: + if "tasks" in manifest["updatebot"]: + for i in range(len(manifest["updatebot"]["tasks"])): + if "enabled" in manifest["updatebot"]["tasks"][i]: + val = manifest["updatebot"]["tasks"][i]["enabled"] + manifest["updatebot"]["tasks"][i]["enabled"] = ( + val.lower() == "true" or val.lower() == "yes" + ) + return manifest + + +class UpdateActions(object): + """Voluptuous validator which verifies the update actions(s) are valid.""" + + def __call__(self, values): + for v in values: + if "action" not in v: + raise Invalid("All file-update entries must specify a valid action") + if v["action"] in ["copy-file", "move-file", "move-dir"]: + if "from" not in v or "to" not in v or len(v.keys()) != 3: + raise Invalid( + "%s action must (only) specify 'from' and 'to' keys" + % v["action"] + ) + elif v["action"] in ["replace-in-file", "replace-in-file-regex"]: + if ( + "pattern" not in v + or "with" not in v + or "file" not in v + or len(v.keys()) != 4 + ): + raise Invalid( + "replace-in-file action must (only) specify " + + "'pattern', 'with', and 'file' keys" + ) + elif v["action"] == "delete-path": + if "path" not in v or len(v.keys()) != 2: + raise Invalid( + "delete-path action must (only) specify the 'path' key" + ) + elif v["action"] == "run-script": + if "script" not in v or "cwd" not in v: + raise Invalid( + "run-script action must specify 'script' and 'cwd' keys" + ) + if set(v.keys()) - set(["args", "cwd", "script", "action"]) != set(): + raise Invalid( + "run-script action may only specify 'script', 'cwd', and 'args' keys" + ) + elif v["action"] == "run-command": + if "command" not in v or "cwd" not in v: + raise Invalid( + "run-command action must specify 'command' and 'cwd' keys" + ) + if set(v.keys()) - set(["args", "cwd", "command", "action"]) != set(): + raise Invalid( + "run-command action may only specify 'command', 'cwd', and 'args' keys" + ) + else: + # This check occurs before the validator above, so the above is + # redundant but we leave it to be verbose. + raise Invalid("Supplied action " + v["action"] + " is invalid.") + return values + + def __repr__(self): + return "UpdateActions" + + +class UpdatebotTasks(object): + """Voluptuous validator which verifies the updatebot task(s) are valid.""" + + def __call__(self, values): + seenTaskTypes = set() + for v in values: + if "type" not in v: + raise Invalid("All updatebot tasks must specify a valid type") + + if v["type"] in seenTaskTypes: + raise Invalid("Only one type of each task is currently supported") + seenTaskTypes.add(v["type"]) + + if v["type"] == "vendoring": + for i in ["filter", "branch", "source-extensions"]: + if i in v: + raise Invalid( + "'%s' is only valid for commit-alert task types" % i + ) + elif v["type"] == "commit-alert": + pass + else: + # This check occurs before the validator above, so the above is + # redundant but we leave it to be verbose. + raise Invalid("Supplied type " + v["type"] + " is invalid.") + return values + + def __repr__(self): + return "UpdatebotTasks" + + +class License(object): + """Voluptuous validator which verifies the license(s) are valid as per our + allow list.""" + + def __call__(self, values): + if isinstance(values, str): + values = [values] + elif not isinstance(values, list): + raise Invalid("Must be string or list") + for v in values: + if v not in VALID_LICENSES: + raise Invalid("Bad License") + return values + + def __repr__(self): + return "License" diff --git a/python/mozbuild/mozbuild/vendor/rewrite_mozbuild.py b/python/mozbuild/mozbuild/vendor/rewrite_mozbuild.py new file mode 100644 index 0000000000..8163c05dc3 --- /dev/null +++ b/python/mozbuild/mozbuild/vendor/rewrite_mozbuild.py @@ -0,0 +1,1286 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, # You can obtain one at http://mozilla.org/MPL/2.0/. + +# Utility package for working with moz.yaml files. +# +# Requires `pyyaml` and `voluptuous` +# (both are in-tree under third_party/python) + +""" +Problem: + ./mach vendor needs to be able to add or remove files from moz.build files automatically to + be able to effectively update a library automatically and send useful try runs in. + + So far, it has been difficult to do that. + + Why: + - Some files need to go into UNIFIED_SOURCES vs SOURCES + - Some files are os-specific, and need to go into per-OS conditionals + - Some files are both UNIFIED_SOURCES/SOURCES sensitive and OS-specific. + +Proposal: + Design an algorithm that maps a third party library file to a suspected moz.build location. + Run the algorithm on all files specified in all third party libraries' moz.build files. + See if the proposed place in the moz.build file matches the actual place. + +Initial Algorithm + Given a file, which includes the filename and the path from gecko root, we want to find the + correct moz.build file and location within that file. + Take the path of the file, and iterate up the directory tree, looking for moz.build files as + we go. + Consider each of these moz.build files, starting with the one closest to the file. + Within a moz.build file, identify the SOURCES or UNIFIED_SOURCES block(s) that contains a file + in the same directory path as the file to be added. + If there is only one such block, use that one. + If there are multiple blocks, look at the files within each block and note the longest length + of a common prefix (including partial filenames - if we just did full directories the + result would be the same as the prior step and we would not narrow the results down). Use + the block containing the longest prefix. (We call this 'guessing'.) + +Result of the proposal: + The initial implementation works on 1675 of 1977 elligible files. + The files it does not work on include: + - general failures. Such as when we find that avutil.cpp wants to be next to adler32.cpp + but avutil.cpp is in SOURCES and adler32.cpp is in UNIFIED_SOURCES. (And many similar + cases.) + - per-cpu-feature files, where only a single file is added under a conditional + - When guessing, because of a len(...) > longest_so_far comparison, we would prefer the + first block we found. + - Changing this to prefer UNIFIED_SOURCES in the event of a tie + yielded 17 additional correct assignments (about a 1% improvement) + - As a result of the change immediately above, when guessing, because given equal + prefixes, we would prefer a UNIFIED_SOURCES block over other blocks, even if the other + blocks are longer + - Changing this (again) to prefer the block containing more files yielded 49 additional + correct assignments (about a 2.5% improvement) + + The files that are ineligible for consideration are: + - Those in libwebrtc + - Those specified in source assignments composed of generators (e.g. [f for f in '%.c']) + - Those specified in source assignments to subscripted variables + (e.g. SOURCES += foo['x86_files']) + + We needed to iterate up the directory and look at a different moz.build file _zero_ times. + This indicates this code is probably not needed, and therefore we will remove it from the + algorithm. + We needed to guess base on the longest prefix 944 times, indicating that this code is + absolutely crucial and should be double-checked. (And indeed, upon double-checking it, + bugs were identified.) + + After some initial testing, it was determined that this code completely fell down when the + vendoring directory differed from the moz.yaml directory (definitions below.) The code was + slightly refactored to handle this case, primarily by (a) re-inserting the logic to check + multiple moz.build files instead of the first and (b) handling some complicated normalization + notions (details in comments). + +Slightly Improved Algorithm Changes: + Don't bother iterating up the directory tree looking for moz.build files, just take the first. + When guessing, in the event of a common-prefix tie, prefer the block containing more files + + With these changes, we now Successfully Matched 1724 of 1977 files + +CODE CONCEPTS + +source-assignment + An assignment of files to a SOURCES or UNIFIED_SOURCES variable, such as + SOURCES += ['ffpvx.cpp'] + + We specifically look only for these two variable names to avoid identifying things + such as CXX_FLAGS. + + Sometimes; however, there is an intermediary variable, such as `SOURCES += celt_filenames` + In this situation we find the celt_filenames assignment, and treat it as a 'source-assignment' + +source-assignment-location + source-assignment-location is a human readable string that identifies where in the moz.build + file the source-assignment is. It can used to visually match the location upon manual + inspection; and given a source-assignment-location, re-identify it when iterating over all + source-assignments in a file. + + The actual string consists of the path from the root of the moz.build file to the + source-assignment, plus a suffix number. + + We suffix the final value with an incrementing counter. This is to support moz.build files + that, for whatever reason, use multiple SOURCES += [] list in the same basic block. This index + is per-file, so no two assignments in the same file (even if they have separate locations) + should have the same suffix. + + For example: + + When `SOURCES += ['ffpvx.xpp']` appears as the first line of the file (or any other + unindented-location) its source-assignment-location will be `> SOURCES 1`. + + When `SOURCES += ['ffpvx.xpp']` appears inside a conditional such as + `CONFIG['OS_TARGET'] == 'WINNT'` then its source-assignment-location will be + `> if CONFIG['OS_TARGET'] == 'WINNT' > SOURCES 1` + + When SOURCES += ['ffpvx.xpp'] appears as the second line of the file, and a different + SOURCES += [] was the first line, then its source-assignment-location will be "> SOURCES 2". + + No two source-assignments may have the same source-assignment-location. If they do, we raise + an assert. + +file vs filename + a 'filename' is a string specifing the name and sometimes the path of a file. + a 'file' is an object you get from open()-ing a filename + + A variable that is a string should always use 'filename' + +vendoring directory vs moz.yaml directory + In many cases, a library's moz.yaml file, moz.build file(s), and sources files will all live + under a single directory. e.g. libjpeg + + In other cases, a library's source files are in one directory (we call this the 'vendoring + directory') and the moz.yaml file and moz.build file(s) are in another directory (we call this + the moz.yaml directory). e.g. libdav1d + +normalized-filename + A filename is 'normalized' if it has been expanded to the full path from the gecko root. This + requires a moz.build file. + + For example a filename `lib/opus.c` may be specified inside the `media/libopus/moz.build` + file. The filename is normalized by os.path.join()-ing the dirname of the moz.build file + (i.e. `media/libopus`) to the filename, resulting in `media/libopus/lib/opus.c` + + A filename that begins with '/' is presumed to already be specified relative to the gecko + root, and therefore is not modified. + + Normalization gets more complicated when dealing with separate vendoring and moz.yaml + directories. This is because a file can be considered normalized when it looks like + third_party/libdav1d/src/a.cpp + _or_ when it looks like + media/libdav1d/../../third_party/libdav1d/src/a.cpp + This is because in the moz.build file, it will be specified as + `../../third_party/libdav1d/src/a.cpp` and we 'normalize' it by prepending the path to the + moz.build file. + + Normalization is not just about having an 'absolute' path from gecko_root to file. In fact + it's not really about that at all - it's about matching filenames. Therefore when we are + dealing with separate vendoring and moz.yaml directories we will very quickly 're-normalize' + a normalized filename to get it into one of those foo/bar/../../third_party/... paths that + will make sense for the moz.build file we are interested in. + + Whenever a filename is normalized, it should be specified as such in the variable name, + either as a prefix (normalized_filename) or a suffix (target_filename_normalized) + +statistic + Using some hacky stuff, we report statistics about how many times we hit certain branches of + the code. + e.g. + - "How many times did we refine a guess based on prefix length" + - "How many times did we refine a guess based on the number of files in the block" + - "What is the histogram of guess candidates" + + We do this to identify how frequently certain code paths were taken, allowing us to identify + strange behavior and investigate outliers. This process lead to identifying bugs and small + improvements. +""" + +import ast +import copy +import os +import re +import shutil +import subprocess +import sys +from pprint import pprint + +try: + from mozbuild.frontend.sandbox import alphabetical_sorted +except Exception: + + def alphabetical_sorted(iterable, key=lambda x: x.lower(), reverse=False): + return sorted(iterable, key=key, reverse=reverse) + + +# This can be edited to enable better Python 3.8 behavior, but is set so that +# everything is consistent by default so errors can be detected more easily. +FORCE_DOWNGRADE_BEHAVIOR = True + +statistics = { + "guess_candidates": {}, + "number_refinements": {}, + "needed_to_guess": 0, + "length_logic": {}, +} + + +def log(*args, **kwargs): + # If is helpful to keep some logging statements around, but we don't want to print them + # unless we are debugging + # print(*args, **kwargs) + pass + + +############################################## + +import inspect + + +def node_to_name(code, node): + if ( + not FORCE_DOWNGRADE_BEHAVIOR + and sys.version_info[0] >= 3 + and sys.version_info[1] >= 8 + ): + return ast.get_source_segment(code, node) + + return node.__class__.__name__ + + +def get_attribute_label(node): + assert isinstance(node, ast.Attribute) + + label = "" + subtarget = node + while isinstance(subtarget, ast.Attribute): + label = subtarget.attr + ("." if label else "") + label + subtarget = subtarget.value + + if isinstance(subtarget, ast.Name): + label = subtarget.id + "." + label + elif isinstance(subtarget, ast.Subscript) and isinstance(subtarget.value, ast.Name): + label = subtarget.value.id + "." + label + else: + raise Exception( + "Unxpected subtarget of type %s found in get_attribute_label. label=%s" + % (subtarget, label) + ) + + return label + + +def ast_get_source_segment(code, node): + caller = inspect.stack()[1] + + if "sphinx" in caller.filename or ( + not FORCE_DOWNGRADE_BEHAVIOR + and sys.version_info[0] >= 3 + and sys.version_info[1] >= 8 + ): + return ast.original_get_source_segment(code, node) + + if caller.function == "assignment_node_to_source_filename_list": + return "" + + raise Exception( + "ast_get_source_segment is not available with this Python version. (ver=%s.%s, caller=%s)" + % (sys.version_info.major, sys.version_info.minor, caller.function) + ) + + +# Overwrite it so we don't accidently use it +if sys.version_info[0] >= 3 and sys.version_info[1] >= 8: + ast.original_get_source_segment = ast.get_source_segment + ast.get_source_segment = ast_get_source_segment + + +############################################## + + +def node_to_readable_file_location(code, node, child_node=None): + location = "" + + if isinstance(node.parent, ast.Module): + # The next node up is the root, don't go higher. + pass + else: + location += node_to_readable_file_location(code, node.parent, node) + + location += " > " + if isinstance(node, ast.Module): + raise Exception("We shouldn't see a Module") + elif isinstance(node, ast.If): + assert child_node + if child_node in node.body: + location += "if " + node_to_name(code, node.test) + else: + location += "else-of-if " + node_to_name(code, node.test) + elif isinstance(node, ast.For): + location += ( + "for " + + node_to_name(code, node.target) + + " in " + + node_to_name(code, node.iter) + ) + elif isinstance(node, ast.AugAssign): + if isinstance(node.target, ast.Name): + location += node.target.id + else: + location += node_to_name(code, node.target) + elif isinstance(node, ast.Assign): + # This assert would fire if we did e.g. some_sources = all_sources = [ ... ] + assert len(node.targets) == 1, "Assignment node contains more than one target" + if isinstance(node.targets[0], ast.Name): + location += node.targets[0].id + else: + location += node_to_name(code, node.targets[0]) + else: + raise Exception("Got a node type I don't know how to handle: " + str(node)) + + return location + + +def assignment_node_to_source_filename_list(code, node): + """ + If the list of filenames is not a list of constants (e.g. it's a generated list) + it's (probably) infeasible to try and figure it out. At least we're not going to try + right now. Maybe in the future? + + If this happens, we'll return an empty list. The consequence of this is that we + won't be able to match a file against this list, so we may not be able to add it. + + (But if the file matches a generated list, perhaps it will be included in the + Sources list automatically?) + """ + if isinstance(node.value, ast.List) and "elts" in node.value._fields: + for f in node.value.elts: + if not isinstance(f, ast.Constant) and not isinstance(f, ast.Str): + log( + "Found non-constant source file name in list: ", + ast_get_source_segment(code, f), + ) + return [] + return [ + f.value if isinstance(f, ast.Constant) else f.s for f in node.value.elts + ] + elif isinstance(node.value, ast.ListComp): + # SOURCES += [f for f in foo if blah] + log("Could not find the files for " + ast_get_source_segment(code, node.value)) + elif isinstance(node.value, ast.Name) or isinstance(node.value, ast.Subscript): + # SOURCES += other_var + # SOURCES += files['X64_SOURCES'] + log("Could not find the files for " + ast_get_source_segment(code, node)) + elif isinstance(node.value, ast.Call): + # SOURCES += sorted(...) + log("Could not find the files for " + ast_get_source_segment(code, node)) + else: + raise Exception( + "Unexpected node received in assignment_node_to_source_filename_list: " + + str(node) + ) + return [] + + +def mozbuild_file_to_source_assignments(normalized_mozbuild_filename, assignment_type): + """ + Returns a dictionary of 'source-assignment-location' -> 'normalized source filename list' + contained in the moz.build file specified + + normalized_mozbuild_filename: the moz.build file to read + """ + source_assignments = {} + + if assignment_type == "source-files": + targets = ["SOURCES", "UNIFIED_SOURCES"] + else: + targets = ["EXPORTS"] + + # Parse the AST of the moz.build file + code = open(normalized_mozbuild_filename).read() + root = ast.parse(code) + + # Populate node parents. This allows us to walk up from a node to the root. + # (Really I think python's ast class should do this, but it doesn't, so we monkey-patch it) + for node in ast.walk(root): + for child in ast.iter_child_nodes(node): + child.parent = node + + # Find all the assignments of SOURCES or UNIFIED_SOURCES + if assignment_type == "source-files": + source_assignment_nodes = [ + node + for node in ast.walk(root) + if isinstance(node, ast.AugAssign) + and isinstance(node.target, ast.Name) + and node.target.id in targets + ] + assert ( + len([n for n in source_assignment_nodes if not isinstance(n.op, ast.Add)]) + == 0 + ), "We got a Source assignment that wasn't +=" + + # Recurse and find nodes where we do SOURCES += other_var or SOURCES += FILES['foo'] + recursive_assignment_nodes = [ + node + for node in source_assignment_nodes + if isinstance(node.value, ast.Name) or isinstance(node.value, ast.Subscript) + ] + + recursive_assignment_nodes_names = [ + node.value.id + for node in recursive_assignment_nodes + if isinstance(node.value, ast.Name) + ] + + # TODO: We do not dig into subscript variables. These are currently only used by two + # libraries that use external sources.mozbuild files. + # recursive_assignment_nodes_names.extend([something for node in + # recursive_assignment_nodes if isinstance(node.value, ast.Subscript)] + + additional_assignment_nodes = [ + node + for node in ast.walk(root) + if isinstance(node, ast.Assign) + and isinstance(node.targets[0], ast.Name) + and node.targets[0].id in recursive_assignment_nodes_names + ] + + # Remove the original, useless assignment node (the SOURCES += other_var) + for node in recursive_assignment_nodes: + source_assignment_nodes.remove(node) + # Add the other_var += [''] source-assignment + source_assignment_nodes.extend(additional_assignment_nodes) + else: + source_assignment_nodes = [ + node + for node in ast.walk(root) + if isinstance(node, ast.AugAssign) + and ( + (isinstance(node.target, ast.Name) and node.target.id == "EXPORTS") + or ( + isinstance(node.target, ast.Attribute) + and get_attribute_label(node.target).startswith("EXPORTS") + ) + ) + ] + source_assignment_nodes.extend( + [ + node + for node in ast.walk(root) + if isinstance(node, ast.Assign) + and ( + ( + isinstance(node.targets[0], ast.Name) + and node.targets[0].id == "EXPORTS" + ) + or ( + isinstance(node.targets[0], ast.Attribute) + and get_attribute_label(node.targets[0]).startswith("EXPORTS") + ) + ) + ] + ) + + # Get the source-assignment-location for the node: + assignment_index = 1 + for a in source_assignment_nodes: + source_assignment_location = ( + node_to_readable_file_location(code, a) + " " + str(assignment_index) + ) + source_filename_list = assignment_node_to_source_filename_list(code, a) + + if not source_filename_list: + # In some cases (like generated source file lists) we will have an empty list. + # If that is the case, just omit the source assignment + continue + + normalized_source_filename_list = [ + normalize_filename(normalized_mozbuild_filename, f) + for f in source_filename_list + ] + + if source_assignment_location in source_assignments: + source_assignment_location = node_to_readable_file_location(code, a) + + assert ( + source_assignment_location not in source_assignments + ), "In %s, two assignments have the same key ('%s')" % ( + normalized_mozbuild_filename, + source_assignment_location, + ) + source_assignments[source_assignment_location] = normalized_source_filename_list + assignment_index += 1 + + return (source_assignments, root, code) + + +def unnormalize_filename(normalized_mozbuild_filename, normalized_filename): + if normalized_filename[0] == "/": + return normalized_filename + + mozbuild_path = ( + os.path.dirname(normalized_mozbuild_filename).replace(os.path.sep, "/") + "/" + ) + return normalized_filename.replace(mozbuild_path, "") + + +def normalize_filename(normalized_mozbuild_filename, filename): + if filename[0] == "/": + return filename + + mozbuild_path = os.path.dirname(normalized_mozbuild_filename).replace( + os.path.sep, "/" + ) + return os.path.join(mozbuild_path, filename).replace(os.path.sep, "/") + + +def get_mozbuild_file_search_order( + normalized_filename, + moz_yaml_dir=None, + vendoring_dir=None, + all_mozbuild_filenames_normalized=None, +): + """ + Returns an ordered list of normalized moz.build filenames to consider for a given filename + + normalized_filename: a source filename normalized to the gecko root + + moz_yaml_dir: the path from gecko_root to the moz.yaml file (which is the root of the + moz.build files) + + moz_yaml_dir: the path to where the library's source files are + + all_mozbuild_filenames_normalized: (optional) the list of all third-party moz.build files + If all_mozbuild_filenames_normalized is not specified, we look in the filesystem. + + The list is built out of two distinct steps. + + In Step 1 we will walk up a directory tree, looking for moz.build files. We append moz.build + files in this order, preferring the lowest moz.build we find, then moving on to one in a + higher directory. + The directory we start in is a little complicated. We take the series of subdirectories + between vendoring_dir and the file in question, and then append them to the moz.yaml + directory. + + Example: + + .. code-block:: python + + When moz_yaml directory != vendoring_directory: + moz_yaml_dir = foo/bar/ + vendoring_dir = third_party/baz/ + normalized_filename = third_party/baz/asm/arm/a.S + starting_directory: foo/bar/asm/arm/ + When moz_yaml directory == vendoring_directory + (In this case, these variables will actually be 'None' but the algorthm is the same) + moz_yaml_dir = foo/bar/ + vendoring_dir = foo/bar/ + normalized_filename = foo/bar/asm/arm/a.S + starting_directory: foo/bar/asm/arm/ + + In Step 2 we get a bit desparate. When the vendoring directory and the moz_yaml directory are + not the same, there is no guarentee that the moz_yaml directory will adhere to the same + directory structure as the vendoring directory. And indeed it doesn't in some cases + (e.g. libdav1d.) + So in this situation we start at the root of the moz_yaml directory and walk downwards, adding + _any_ moz.build file we encounter to the list. Later on (in all cases, not just + moz_yaml_dir != vendoring_dir) we only consider a moz.build file if it has source files whose + directory matches the normalized_filename, so this step, though desparate, is safe-ish and + believe it or not has worked for some file additions. + """ + ordered_list = [] + + if all_mozbuild_filenames_normalized is None: + assert os.path.isfile( + ".arcconfig" + ), "We do not seem to be running from the gecko root" + + # The first time around, this variable name is incorrect. + # It's actually the full path+filename, not a directory. + test_directory = None + if (moz_yaml_dir, vendoring_dir) == (None, None): + # In this situation, the library is vendored into the same directory as + # the moz.build files. We can start traversing directories up from the file to + # add to find the correct moz.build file + test_directory = normalized_filename + elif moz_yaml_dir and vendoring_dir: + # In this situation, the library is vendored in a different place (typically + # third_party/foo) from the moz.build files. + subdirectory_path = normalized_filename.replace(vendoring_dir, "") + test_directory = os.path.join(moz_yaml_dir, subdirectory_path) + else: + raise Exception("If moz_yaml_dir or vendoring_dir are specified, both must be") + + # Step 1 + while ( + len(os.path.dirname(test_directory).replace(os.path.sep, "/")) > 1 + ): # While we are not at '/' + containing_directory = os.path.dirname(test_directory) + + possible_normalized_mozbuild_filename = os.path.join( + containing_directory, "moz.build" + ) + + if not all_mozbuild_filenames_normalized: + if os.path.isfile(possible_normalized_mozbuild_filename): + ordered_list.append(possible_normalized_mozbuild_filename) + elif possible_normalized_mozbuild_filename in all_mozbuild_filenames_normalized: + ordered_list.append(possible_normalized_mozbuild_filename) + + test_directory = containing_directory + + # Step 2 + if moz_yaml_dir: + for root, dirs, files in os.walk(moz_yaml_dir): + for f in files: + if f == "moz.build": + ordered_list.append(os.path.join(root, f)) + + return ordered_list + + +def get_closest_mozbuild_file( + normalized_filename, + moz_yaml_dir=None, + vendoring_dir=None, + all_mozbuild_filenames_normalized=None, +): + """ + Returns the closest moz.build file in the directory tree to a normalized filename + """ + r = get_mozbuild_file_search_order( + normalized_filename, + moz_yaml_dir, + vendoring_dir, + all_mozbuild_filenames_normalized, + ) + return r[0] if r else None + + +def filenames_directory_is_in_filename_list( + filename_normalized, list_of_normalized_filenames +): + """ + Given a normalized filename and a list of normalized filenames, first turn them into a + containing directory, and a list of containing directories. Then test if the containing + directory of the filename is in the list. + + ex: + f = filenames_directory_is_in_filename_list + f("foo/bar/a.c", ["foo/b.c"]) -> false + f("foo/bar/a.c", ["foo/b.c", "foo/bar/c.c"]) -> true + f("foo/bar/a.c", ["foo/b.c", "foo/bar/baz/d.c"]) -> false + """ + path_list = set( + [ + os.path.dirname(f).replace(os.path.sep, "/") + for f in list_of_normalized_filenames + ] + ) + return os.path.dirname(filename_normalized).replace(os.path.sep, "/") in path_list + + +def find_all_posible_assignments_from_filename(source_assignments, filename_normalized): + """ + Given a list of source assignments and a normalized filename, narrow the list to assignments + that contain a file whose directory matches the filename's directory. + """ + possible_assignments = {} + for key, list_of_normalized_filenames in source_assignments.items(): + if not list_of_normalized_filenames: + continue + if filenames_directory_is_in_filename_list( + filename_normalized, list_of_normalized_filenames + ): + possible_assignments[key] = list_of_normalized_filenames + return possible_assignments + + +def guess_best_assignment(source_assignments, filename_normalized): + """ + Given several assignments, all of which contain the same directory as the filename, pick one + we think is best and return its source-assignment-location. + + We do this by looking at the filename itself (not just its directory) and picking the + assignment which contains a filename with the longest matching prefix. + + e.g: "foo/asm_neon.c" compared to ["foo/main.c", "foo/all_utility.c"], ["foo/asm_arm.c"] + -> ["foo/asm_arm.c"] (match of `foo/asm_`) + """ + length_of_longest_match = 0 + source_assignment_location_of_longest_match = None + statistic_number_refinements = 0 + statistic_length_logic = 0 + + for key, list_of_normalized_filenames in source_assignments.items(): + for f in list_of_normalized_filenames: + if filename_normalized == f: + # Do not cheat by matching the prefix of the exact file + continue + + prefix = os.path.commonprefix([filename_normalized, f]) + if len(prefix) > length_of_longest_match: + statistic_number_refinements += 1 + length_of_longest_match = len(prefix) + source_assignment_location_of_longest_match = key + elif len(prefix) == length_of_longest_match and len( + source_assignments[key] + ) > len(source_assignments[source_assignment_location_of_longest_match]): + statistic_number_refinements += 1 + statistic_length_logic += 1 + length_of_longest_match = len(prefix) + source_assignment_location_of_longest_match = key + return ( + source_assignment_location_of_longest_match, + (statistic_number_refinements, statistic_length_logic), + ) + + +def edit_moz_build_file_to_add_file( + normalized_mozbuild_filename, + unnormalized_filename_to_add, + unnormalized_list_of_files, +): + """ + This function edits the moz.build file in-place + + I had _really_ hoped to replace this whole damn thing with something that adds a + node to the AST, dumps the AST out, and then runs black on the file but there are + some issues: + - third party moz.build files (or maybe all moz.build files) aren't always run through black + - dumping the ast out losing comments + + """ + + # Make sure that we only write in forward slashes + if "\\" in unnormalized_filename_to_add: + unnormalized_filename_to_add = unnormalized_filename_to_add.replace("\\", "/") + + # add the file into the list, and then sort it in the same way the moz.build validator + # expects + unnormalized_list_of_files.append(unnormalized_filename_to_add) + unnormalized_list_of_files = alphabetical_sorted(unnormalized_list_of_files) + + # we're going to add our file by doing a find/replace of an adjacent file in the list + indx_of_addition = unnormalized_list_of_files.index(unnormalized_filename_to_add) + indx_of_addition + if indx_of_addition == 0: + target_indx = 1 + replace_before = False + else: + target_indx = indx_of_addition - 1 + replace_before = True + + find_str = unnormalized_list_of_files[target_indx] + + # We will only perform the first replacement. This is because sometimes there's moz.build + # code like: + # SOURCES += ['file.cpp'] + # SOURCES['file.cpp'].flags += ['-Winline'] + # If we replaced every time we found the target, we would be inserting into that second + # line. + did_replace = False + + with open(normalized_mozbuild_filename, mode="r") as file: + with open(normalized_mozbuild_filename + ".new", mode="wb") as output: + for line in file: + if not did_replace and find_str in line: + did_replace = True + + # Okay, we found the line we need to edit, now we need to be ugly about it + # Grab the type of quote used in this moz.build file: single or double + quote_type = line[line.index(find_str) - 1] + + if "[" not in line: + # We'll want to put our new file onto its own line + newline_to_add = "\n" + # And copy the indentation of the line we're adding adjacent to + indent_value = line[0 : line.index(quote_type)] + else: + # This is frustrating, we have the start of the array here. We aren't + # going to be able to indent things onto a newline properly. We're just + # going to have to stick it in on the same line. + newline_to_add = "" + indent_value = "" + + find_str = "%s%s%s" % (quote_type, find_str, quote_type) + if replace_before: + replacement_tuple = ( + find_str, + newline_to_add, + indent_value, + quote_type, + unnormalized_filename_to_add, + quote_type, + ) + replace_str = "%s,%s%s%s%s%s" % replacement_tuple + else: + replacement_tuple = ( + quote_type, + unnormalized_filename_to_add, + quote_type, + newline_to_add, + indent_value, + find_str, + ) + replace_str = "%s%s%s,%s%s%s" % replacement_tuple + + line = line.replace(find_str, replace_str) + + output.write((line.rstrip() + "\n").encode("utf-8")) + + shutil.move(normalized_mozbuild_filename + ".new", normalized_mozbuild_filename) + + +def edit_moz_build_file_to_remove_file( + normalized_mozbuild_filename, unnormalized_filename_to_remove +): + """ + This function edits the moz.build file in-place + """ + + simple_file_line = re.compile( + "^\s*['\"]" + unnormalized_filename_to_remove + "['\"],*$" + ) + did_replace = False + + with open(normalized_mozbuild_filename, mode="r") as file: + with open(normalized_mozbuild_filename + ".new", mode="wb") as output: + for line in file: + if not did_replace and unnormalized_filename_to_remove in line: + did_replace = True + + # If the line consists of just a single source file on it, then we're in the + # clear - we can just skip this line. + if simple_file_line.match(line): + # Do not output anything, just keep going. + continue + + # Okay, so the line is a little more complicated. + quote_type = line[line.index(unnormalized_filename_to_remove) - 1] + + if "[" in line or "]" in line: + find_str = "%s%s%s,*" % ( + quote_type, + unnormalized_filename_to_remove, + quote_type, + ) + line = re.sub(find_str, "", line) + else: + raise Exception( + "Got an unusual type of line we're trying to remove a file from:", + line, + ) + + output.write((line.rstrip() + "\n").encode("utf-8")) + + shutil.move(normalized_mozbuild_filename + ".new", normalized_mozbuild_filename) + + +def validate_directory_parameters(moz_yaml_dir, vendoring_dir): + # Validate the parameters + assert (moz_yaml_dir, vendoring_dir) == (None, None) or ( + moz_yaml_dir and vendoring_dir + ), "If either moz_yaml_dir or vendoring_dir are specified, they both must be" + + if moz_yaml_dir is not None and vendoring_dir is not None: + # Ensure they are provided with trailing slashes + moz_yaml_dir += "/" if moz_yaml_dir[-1] != "/" else "" + vendoring_dir += "/" if vendoring_dir[-1] != "/" else "" + + return (moz_yaml_dir, vendoring_dir) + + +HAS_ABSOLUTE = 1 +HAS_TRAVERSE_CHILD = 2 +HAS_RELATIVE_CHILD = 2 # behaves the same as above + + +def get_file_reference_modes(source_assignments): + """ + Given a set of source assignments, this function traverses through the + files references in those assignments to see if the files are referenced + using absolute paths (relative to gecko root) or relative paths. + + It will return all the modes that are seen. + """ + modes = set() + + for key, list_of_normalized_filenames in source_assignments.items(): + if not list_of_normalized_filenames: + continue + for file in list_of_normalized_filenames: + if file[0] == "/": + modes.add(HAS_ABSOLUTE) + elif file[0:2] == "../": + modes.add(HAS_TRAVERSE_CHILD) + else: + modes.add(HAS_RELATIVE_CHILD) + return modes + + +def renormalize_filename( + mode, + moz_yaml_dir, + vendoring_dir, + normalized_mozbuild_filename, + normalized_filename_to_act_on, +): + """ + Edit the normalized_filename_to_act_on to either + - Make it an absolute path from gecko root (if we're in that mode) + - Get a relative path from the vendoring directory to the yaml directory where the + moz.build file is (If they are in separate directories) + """ + if mode == HAS_ABSOLUTE: + # If the moz.build file uses absolute paths from the gecko root, this is easy, + # all we need to do is prepend a '/' to indicate that + normalized_filename_to_act_on = "/" + normalized_filename_to_act_on + elif moz_yaml_dir and vendoring_dir: + # To re-normalize it in this case, we: + # (a) get the path from gecko_root to the moz.build file we are considering + # (b) compute a relative path from that directory to the file we want + # (c) because (b) started at the moz.build file's directory, it is not + # normalized to the gecko_root. Therefore we need to normalize it by + # prepending (a) + a = os.path.dirname(normalized_mozbuild_filename).replace(os.path.sep, "/") + b = os.path.relpath(normalized_filename_to_act_on, start=a).replace( + os.path.sep, "/" + ) + c = os.path.join(a, b).replace(os.path.sep, "/") + normalized_filename_to_act_on = c + + return normalized_filename_to_act_on + + +######################################################### +# PUBLIC API +######################################################### + + +class MozBuildRewriteException(Exception): + pass + + +def remove_file_from_moz_build_file( + normalized_filename_to_remove, moz_yaml_dir=None, vendoring_dir=None +): + """ + Given a filename, relative to the gecko root (aka normalized), we look for the nearest + moz.build file, look in that file for the file, and then edit that moz.build file in-place. + """ + moz_yaml_dir, vendoring_dir = validate_directory_parameters( + moz_yaml_dir, vendoring_dir + ) + + all_possible_normalized_mozbuild_filenames = get_mozbuild_file_search_order( + normalized_filename_to_remove, moz_yaml_dir, vendoring_dir, None + ) + + # normalized_filename_to_remove is the path from gecko_root to the file. However, if we vendor + # separate from moz.yaml; then 'normalization' gets more complicated as explained above. + # We will need to re-normalize the filename for each moz.build file we want to test, so we + # save the original normalized filename for this purpose + original_normalized_filename_to_remove = normalized_filename_to_remove + + # These are the two header file types specified in vendor_manifest.py > source_suffixes + if normalized_filename_to_remove.endswith( + ".h" + ) or normalized_filename_to_remove.endswith(".hpp"): + assignment_type = "header-files" + else: + assignment_type = "source-files" + + for normalized_mozbuild_filename in all_possible_normalized_mozbuild_filenames: + source_assignments, root, code = mozbuild_file_to_source_assignments( + normalized_mozbuild_filename, assignment_type + ) + + modes = get_file_reference_modes(source_assignments) + + for mode in modes: + normalized_filename_to_remove = renormalize_filename( + mode, + moz_yaml_dir, + vendoring_dir, + normalized_mozbuild_filename, + normalized_filename_to_remove, + ) + + for key in source_assignments: + normalized_source_filename_list = source_assignments[key] + if normalized_filename_to_remove in normalized_source_filename_list: + unnormalized_filename_to_remove = unnormalize_filename( + normalized_mozbuild_filename, normalized_filename_to_remove + ) + edit_moz_build_file_to_remove_file( + normalized_mozbuild_filename, unnormalized_filename_to_remove + ) + return + + normalized_filename_to_remove = original_normalized_filename_to_remove + raise MozBuildRewriteException("Could not remove " + normalized_filename_to_remove) + + +def add_file_to_moz_build_file( + normalized_filename_to_add, moz_yaml_dir=None, vendoring_dir=None +): + """ + This is the overall function. Given a filename, relative to the gecko root (aka normalized), + we look for a moz.build file to add it to, look for the place in the moz.build file to add it, + and then edit that moz.build file in-place. + + It accepted two optional parameters. If one is specified they both must be. If a library is + vendored in a separate place from the moz.yaml file, these parameters specify those two + directories. + """ + moz_yaml_dir, vendoring_dir = validate_directory_parameters( + moz_yaml_dir, vendoring_dir + ) + + all_possible_normalized_mozbuild_filenames = get_mozbuild_file_search_order( + normalized_filename_to_add, moz_yaml_dir, vendoring_dir, None + ) + + # normalized_filename_to_add is the path from gecko_root to the file. However, if we vendor + # separate from moz.yaml; then 'normalization' gets more complicated as explained above. + # We will need to re-normalize the filename for each moz.build file we want to test, so we + # save the original normalized filename for this purpose + original_normalized_filename_to_add = normalized_filename_to_add + + if normalized_filename_to_add.endswith(".h") or normalized_filename_to_add.endswith( + ".hpp" + ): + assignment_type = "header-files" + else: + assignment_type = "source-files" + + for normalized_mozbuild_filename in all_possible_normalized_mozbuild_filenames: + source_assignments, root, code = mozbuild_file_to_source_assignments( + normalized_mozbuild_filename, assignment_type + ) + + modes = get_file_reference_modes(source_assignments) + + for mode in modes: + normalized_filename_to_add = renormalize_filename( + mode, + moz_yaml_dir, + vendoring_dir, + normalized_mozbuild_filename, + normalized_filename_to_add, + ) + + possible_assignments = find_all_posible_assignments_from_filename( + source_assignments, normalized_filename_to_add + ) + + if len(possible_assignments) == 0: + normalized_filename_to_add = original_normalized_filename_to_add + continue + + assert ( + len(possible_assignments) > 0 + ), "Could not find a single possible source assignment" + if len(possible_assignments) > 1: + best_guess, _ = guess_best_assignment( + possible_assignments, normalized_filename_to_add + ) + chosen_source_assignment_location = best_guess + else: + chosen_source_assignment_location = list(possible_assignments.keys())[0] + + guessed_list_containing_normalized_filenames = possible_assignments[ + chosen_source_assignment_location + ] + + # unnormalize filenames so we can edit the moz.build file. They rarely use full paths. + unnormalized_filename_to_add = unnormalize_filename( + normalized_mozbuild_filename, normalized_filename_to_add + ) + unnormalized_list_of_files = [ + unnormalize_filename(normalized_mozbuild_filename, f) + for f in guessed_list_containing_normalized_filenames + ] + + edit_moz_build_file_to_add_file( + normalized_mozbuild_filename, + unnormalized_filename_to_add, + unnormalized_list_of_files, + ) + return + + raise MozBuildRewriteException( + "Could not find a single moz.build file to add " + normalized_filename_to_add + ) + + +######################################################### +# TESTING CODE +######################################################### + + +def get_all_target_filenames_normalized(all_mozbuild_filenames_normalized): + """ + Given a list of moz.build files, returns all the files listed in all the souce assignments + in the file. + + This function is only used for debug/testing purposes - there is no reason to call this + as part of 'the algorithm' + """ + all_target_filenames_normalized = [] + for normalized_mozbuild_filename in all_mozbuild_filenames_normalized: + source_assignments, root, code = mozbuild_file_to_source_assignments( + normalized_mozbuild_filename + ) + for key in source_assignments: + list_of_normalized_filenames = source_assignments[key] + all_target_filenames_normalized.extend(list_of_normalized_filenames) + + return all_target_filenames_normalized + + +def try_to_match_target_file( + all_mozbuild_filenames_normalized, target_filename_normalized +): + """ + Runs 'the algorithm' on a target file, and returns if the algorithm was successful + + all_mozbuild_filenames_normalized: the list of all third-party moz.build files + target_filename_normalized - the target filename, normalized to the gecko root + """ + + # We do not update the statistics for failed matches, so save a copy + global statistics + backup_statistics = copy.deepcopy(statistics) + + if "" == target_filename_normalized: + raise Exception("Received an empty target_filename_normalized") + + normalized_mozbuild_filename = get_closest_mozbuild_file( + target_filename_normalized, None, None, all_mozbuild_filenames_normalized + ) + if not normalized_mozbuild_filename: + return (False, "No moz.build file found") + + source_assignments, root, code = mozbuild_file_to_source_assignments( + normalized_mozbuild_filename + ) + possible_assignments = find_all_posible_assignments_from_filename( + source_assignments, target_filename_normalized + ) + + if len(possible_assignments) == 0: + raise Exception("No possible assignments were found") + elif len(possible_assignments) > 1: + ( + best_guess, + (statistic_number_refinements, statistic_length_logic), + ) = guess_best_assignment(possible_assignments, target_filename_normalized) + chosen_source_assignment_location = best_guess + + statistics["needed_to_guess"] += 1 + + if len(possible_assignments) not in statistics["guess_candidates"]: + statistics["guess_candidates"][len(possible_assignments)] = 0 + statistics["guess_candidates"][len(possible_assignments)] += 1 + + if statistic_number_refinements not in statistics["number_refinements"]: + statistics["number_refinements"][statistic_number_refinements] = 0 + statistics["number_refinements"][statistic_number_refinements] += 1 + + if statistic_length_logic not in statistics["length_logic"]: + statistics["length_logic"][statistic_length_logic] = 0 + statistics["length_logic"][statistic_length_logic] += 1 + + else: + chosen_source_assignment_location = list(possible_assignments.keys())[0] + + guessed_list_containing_normalized_filenames = possible_assignments[ + chosen_source_assignment_location + ] + + if target_filename_normalized in guessed_list_containing_normalized_filenames: + return (True, None) + + # Restore the copy of the statistics so we don't alter it for failed matches + statistics = backup_statistics + return (False, chosen_source_assignment_location) + + +def get_gecko_root(): + """ + Using __file__ as a base, find the gecko root + """ + gecko_root = None + directory_to_check = os.path.dirname(os.path.abspath(__file__)) + while not os.path.isfile(os.path.join(directory_to_check, ".arcconfig")): + directory_to_check = os.path.dirname(directory_to_check) + if directory_to_check == "/": + print("Could not find gecko root") + sys.exit(1) + + gecko_root = directory_to_check + return gecko_root + + +def get_all_mozbuild_filenames(gecko_root): + """ + Find all the third party moz.build files in the gecko repo + """ + third_party_paths = open( + os.path.join(gecko_root, "tools", "rewriting", "ThirdPartyPaths.txt") + ).readlines() + all_mozbuild_filenames_normalized = [] + for path in third_party_paths: + # We need shell=True because some paths are specified as globs + # We need an exception handler because sometimes the directory doesn't exist and find barfs + try: + output = subprocess.check_output( + "find %s -name moz.build" % os.path.join(gecko_root, path.strip()), + shell=True, + ).decode("utf-8") + for f in output.split("\n"): + f = f.replace("//", "/").strip().replace(gecko_root, "")[1:] + if f: + all_mozbuild_filenames_normalized.append(f) + except Exception: + pass + + return all_mozbuild_filenames_normalized + + +def test_all_third_party_files(gecko_root, all_mozbuild_filenames_normalized): + """ + Run the algorithm on every source file in a third party moz.build file and output the results + """ + all_mozbuild_filenames_normalized = [ + f for f in all_mozbuild_filenames_normalized if "webrtc" not in f + ] + all_target_filenames_normalized = get_all_target_filenames_normalized( + all_mozbuild_filenames_normalized + ) + + total_attempted = 0 + failed_matched = [] + successfully_matched = 0 + + print("Going to try to match %i files..." % len(all_target_filenames_normalized)) + for target_filename_normalized in all_target_filenames_normalized: + result, wrong_guess = try_to_match_target_file( + all_mozbuild_filenames_normalized, target_filename_normalized + ) + + total_attempted += 1 + if result: + successfully_matched += 1 + else: + failed_matched.append((target_filename_normalized, wrong_guess)) + if total_attempted % 100 == 0: + print("Progress:", total_attempted) + + print( + "Successfully Matched %i of %i files" % (successfully_matched, total_attempted) + ) + if failed_matched: + print("Failed files:") + for f in failed_matched: + print("\t", f[0], f[1]) + print("Statistics:") + pprint(statistics) + + +if __name__ == "__main__": + gecko_root = get_gecko_root() + os.chdir(gecko_root) + + add_file_to_moz_build_file( + "third_party/jpeg-xl/lib/include/jxl/resizable_parallel_runner.h", + "media/libjxl", + "third_party/jpeg-xl", + ) + + # all_mozbuild_filenames_normalized = get_all_mozbuild_filenames(gecko_root) + # test_all_third_party_files(gecko_root, all_mozbuild_filenames_normalized) diff --git a/python/mozbuild/mozbuild/vendor/test_vendor_changes.sh b/python/mozbuild/mozbuild/vendor/test_vendor_changes.sh new file mode 100755 index 0000000000..3d0e390f7f --- /dev/null +++ b/python/mozbuild/mozbuild/vendor/test_vendor_changes.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +if [[ ! -f "CLOBBER" ]]; then + echo "Script should be run from mozilla-central root" + exit 1 +fi + +echo "THIS SCRIPT WILL REVERT AND PURGE UNCOMMIT LOCAL CHANGES" +echo "TYPE ok TO CONTINUE" +read CONFIRMATION +if [[ $CONFIRMATION != "ok" ]]; then + echo "Did not get 'ok', exiting" + exit 0 +fi + +ALL_MOZ_YAML_FILES=$(find . -name moz.yaml) + +for f in $ALL_MOZ_YAML_FILES; do + IFS='' read -r -d '' INPUT <<"EOF" +import sys +import yaml +enabled = False +with open(sys.argv[1]) as yaml_in: + o = yaml.safe_load(yaml_in) + if "updatebot" in o: + if 'tasks' in o["updatebot"]: + for t in o["updatebot"]["tasks"]: + if t["type"] == "vendoring": + if t.get("enabled", True) and t.get("platform", "Linux").lower() == "linux": + enabled = True +if enabled: + print(sys.argv[1]) +EOF + + FILE=$(python3 -c "$INPUT" $f) + + if [[ ! -z $FILE ]]; then + UPDATEBOT_YAML_FILES+=("$FILE") + fi +done + + +for FILE in "${UPDATEBOT_YAML_FILES[@]}"; do + REVISION=$(yq eval ".origin.revision" $FILE) + HAS_PATCHES=$(yq eval ".vendoring.patches | (. != null)" $FILE) + + echo "$FILE - $REVISION" + if [[ $HAS_PATCHES == "false" ]]; then + ./mach vendor $FILE --force --revision $REVISION + if [[ $? == 1 ]]; then + exit 1 + fi + else + ./mach vendor $FILE --force --revision $REVISION --patch-mode=none + if [[ $? == 1 ]]; then + exit 1 + fi + ./mach vendor $FILE --force --revision $REVISION --patch-mode=only --ignore-modified + if [[ $? == 1 ]]; then + exit 1 + fi + fi + hg revert . + hg purge +done diff --git a/python/mozbuild/mozbuild/vendor/vendor_manifest.py b/python/mozbuild/mozbuild/vendor/vendor_manifest.py new file mode 100644 index 0000000000..9de2c23e95 --- /dev/null +++ b/python/mozbuild/mozbuild/vendor/vendor_manifest.py @@ -0,0 +1,789 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, # You can obtain one at http://mozilla.org/MPL/2.0/. + +import functools +import glob +import logging +import os +import re +import shutil +import stat +import sys +import tarfile +import tempfile +from collections import defaultdict + +import mozfile +import mozpack.path as mozpath +import requests + +from mozbuild.base import MozbuildObject +from mozbuild.vendor.rewrite_mozbuild import ( + MozBuildRewriteException, + add_file_to_moz_build_file, + remove_file_from_moz_build_file, +) + +DEFAULT_EXCLUDE_FILES = [".git*", ".git*/**"] +DEFAULT_KEEP_FILES = ["**/moz.build", "**/moz.yaml"] +DEFAULT_INCLUDE_FILES = [] + + +def throwe(): + raise Exception + + +def _replace_in_file(file, pattern, replacement, regex=False): + with open(file) as f: + contents = f.read() + + if regex: + newcontents = re.sub(pattern, replacement, contents) + else: + newcontents = contents.replace(pattern, replacement) + + if newcontents == contents: + raise Exception( + "Could not find '%s' in %s to %sreplace with '%s'" + % (pattern, file, "regex-" if regex else "", replacement) + ) + + with open(file, "w") as f: + f.write(newcontents) + + +def list_of_paths_to_readable_string(paths): + # From https://stackoverflow.com/a/41578071 + dic = defaultdict(list) + for item in paths: + if os.path.isdir(item): # To check path is a directory + _ = dic[item] # will set default value as empty list + else: + path, file = os.path.split(item) + dic[path].append(file) + + final_string = "[" + for key, val in dic.items(): + if len(val) == 0: + final_string += key + ", " + elif len(val) < 3: + final_string += ", ".join([os.path.join(key, v) for v in val]) + ", " + elif len(val) < 10: + final_string += "%s items in %s: %s and %s, " % ( + len(val), + key, + ", ".join(val[0:-1]), + val[-1], + ) + else: + final_string += "%s (omitted) items in %s, " % (len(val), key) + + if final_string[-2:] == ", ": + final_string = final_string[:-2] + + return final_string + "]" + + +class VendorManifest(MozbuildObject): + def should_perform_step(self, step): + return step not in self.manifest["vendoring"].get("skip-vendoring-steps", []) + + def vendor( + self, + command_context, + yaml_file, + manifest, + revision, + ignore_modified, + check_for_update, + force, + add_to_exports, + patch_mode, + ): + self.manifest = manifest + self.yaml_file = yaml_file + self._extract_directory = throwe + self.logInfo = functools.partial(self.log, logging.INFO, "vendor") + if "vendor-directory" not in self.manifest["vendoring"]: + self.manifest["vendoring"]["vendor-directory"] = os.path.dirname( + self.yaml_file + ) + + # ========================================================== + # If we're only patching; do that + if "patches" in self.manifest["vendoring"] and patch_mode == "only": + self.import_local_patches( + self.manifest["vendoring"]["patches"], + os.path.dirname(self.yaml_file), + self.manifest["vendoring"]["vendor-directory"], + ) + return + + # ========================================================== + self.source_host = self.get_source_host() + + ref_type = self.manifest["vendoring"].get("tracking", "commit") + flavor = self.manifest["vendoring"].get("flavor", "regular") + # Individiual files are special + + if revision == "tip": + # This case allows us to force-update a tag-tracking library to master + new_revision, timestamp = self.source_host.upstream_commit("HEAD") + elif ref_type == "tag": + new_revision, timestamp = self.source_host.upstream_tag(revision) + else: + new_revision, timestamp = self.source_host.upstream_commit(revision) + + self.logInfo( + {"ref_type": ref_type, "ref": new_revision, "timestamp": timestamp}, + "Latest {ref_type} is {ref} from {timestamp}", + ) + + # ========================================================== + if not force and self.manifest["origin"]["revision"] == new_revision: + # We're up to date, don't do anything + self.logInfo({}, "Latest upstream matches in-tree.") + return + elif flavor != "individual-file" and check_for_update: + # Only print the new revision to stdout + print("%s %s" % (new_revision, timestamp)) + return + + # ========================================================== + if flavor == "regular": + self.process_regular( + new_revision, timestamp, ignore_modified, add_to_exports + ) + elif flavor == "individual-files": + self.process_individual(new_revision, timestamp, ignore_modified) + elif flavor == "rust": + self.process_rust( + command_context, + self.manifest["origin"]["revision"], + new_revision, + timestamp, + ignore_modified, + ) + else: + raise Exception("Unknown flavor") + + def process_rust( + self, command_context, old_revision, new_revision, timestamp, ignore_modified + ): + # First update the Cargo.toml + cargo_file = os.path.join(os.path.dirname(self.yaml_file), "Cargo.toml") + try: + _replace_in_file(cargo_file, old_revision, new_revision) + except Exception: + # If we can't find it the first time, try again with a short hash + _replace_in_file(cargo_file, old_revision[:8], new_revision) + + # Then call ./mach vendor rust + from mozbuild.vendor.vendor_rust import VendorRust + + vendor_command = command_context._spawn(VendorRust) + vendor_command.vendor( + ignore_modified=True, build_peers_said_large_imports_were_ok=False + ) + + self.update_yaml(new_revision, timestamp) + + def process_individual(self, new_revision, timestamp, ignore_modified): + # This design is used because there is no github API to query + # for the last commit that modified a file; nor a way to get file + # blame. So really all we can do is just download and replace the + # files and see if they changed... + + def download_and_write_file(url, destination): + self.logInfo( + {"local_file": destination, "url": url}, + "Downloading {local_file} from {url}...", + ) + + with mozfile.NamedTemporaryFile() as tmpfile: + try: + req = requests.get(url, stream=True) + for data in req.iter_content(4096): + tmpfile.write(data) + tmpfile.seek(0) + + shutil.copy2(tmpfile.name, destination) + except Exception as e: + raise (e) + + # Only one of these loops will have content, so just do them both + for f in self.manifest["vendoring"].get("individual-files", []): + url = self.source_host.upstream_path_to_file(new_revision, f["upstream"]) + destination = self.get_full_path(f["destination"]) + download_and_write_file(url, destination) + + for f in self.manifest["vendoring"].get("individual-files-list", []): + url = self.source_host.upstream_path_to_file( + new_revision, + self.manifest["vendoring"]["individual-files-default-upstream"] + f, + ) + destination = self.get_full_path( + self.manifest["vendoring"]["individual-files-default-destination"] + f + ) + download_and_write_file(url, destination) + + self.spurious_check(new_revision, ignore_modified) + + self.logInfo({}, "Checking for update actions") + self.update_files(new_revision) + + self.update_yaml(new_revision, timestamp) + + self.logInfo({"rev": new_revision}, "Updated to '{rev}'.") + + if "patches" in self.manifest["vendoring"]: + # Remind the user + self.log( + logging.CRITICAL, + "vendor", + {}, + "Patches present in manifest!!! Please run " + "'./mach vendor --patch-mode only' after commiting changes.", + ) + + def process_regular(self, new_revision, timestamp, ignore_modified, add_to_exports): + + if self.should_perform_step("fetch"): + self.fetch_and_unpack(new_revision) + else: + self.logInfo({}, "Skipping fetching upstream source.") + + self.logInfo({}, "Checking for update actions") + self.update_files(new_revision) + + if self.should_perform_step("hg-add"): + self.logInfo({}, "Registering changes with version control.") + self.repository.add_remove_files( + self.manifest["vendoring"]["vendor-directory"], + os.path.dirname(self.yaml_file), + ) + else: + self.logInfo({}, "Skipping registering changes.") + + if self.should_perform_step("spurious-check"): + self.logInfo({}, "Checking for a spurious update.") + self.spurious_check(new_revision, ignore_modified) + else: + self.logInfo({}, "Skipping the spurious update check.") + + if self.should_perform_step("update-moz-yaml"): + self.logInfo({}, "Updating moz.yaml.") + self.update_yaml(new_revision, timestamp) + else: + self.logInfo({}, "Skipping updating the moz.yaml file.") + + if self.should_perform_step("update-moz-build"): + self.logInfo({}, "Updating moz.build files") + self.update_moz_build( + self.manifest["vendoring"]["vendor-directory"], + os.path.dirname(self.yaml_file), + add_to_exports, + ) + else: + self.logInfo({}, "Skipping update of moz.build files") + + self.logInfo({"rev": new_revision}, "Updated to '{rev}'.") + + if "patches" in self.manifest["vendoring"]: + # Remind the user + self.log( + logging.CRITICAL, + "vendor", + {}, + "Patches present in manifest!!! Please run " + "'./mach vendor --patch-mode only' after commiting changes.", + ) + + def get_source_host(self): + if self.manifest["vendoring"]["source-hosting"] == "gitlab": + from mozbuild.vendor.host_gitlab import GitLabHost + + return GitLabHost(self.manifest) + elif self.manifest["vendoring"]["source-hosting"] == "github": + from mozbuild.vendor.host_github import GitHubHost + + return GitHubHost(self.manifest) + elif self.manifest["vendoring"]["source-hosting"] == "googlesource": + from mozbuild.vendor.host_googlesource import GoogleSourceHost + + return GoogleSourceHost(self.manifest) + elif self.manifest["vendoring"]["source-hosting"] == "angle": + from mozbuild.vendor.host_angle import AngleHost + + return AngleHost(self.manifest) + elif self.manifest["vendoring"]["source-hosting"] == "codeberg": + from mozbuild.vendor.host_codeberg import CodebergHost + + return CodebergHost(self.manifest) + else: + raise Exception( + "Unknown source host: " + self.manifest["vendoring"]["source-hosting"] + ) + + def get_full_path(self, path, support_cwd=False): + if support_cwd and path[0:5] == "{cwd}": + path = path.replace("{cwd}", ".") + elif "{tmpextractdir}" in path: + # _extract_directory() will throw an exception if it is invalid to use it + path = path.replace("{tmpextractdir}", self._extract_directory()) + elif "{yaml_dir}" in path: + path = path.replace("{yaml_dir}", os.path.dirname(self.yaml_file)) + elif "{vendor_dir}" in path: + path = path.replace( + "{vendor_dir}", self.manifest["vendoring"]["vendor-directory"] + ) + else: + path = mozpath.join(self.manifest["vendoring"]["vendor-directory"], path) + return os.path.abspath(path) + + def convert_patterns_to_paths(self, directory, patterns): + # glob.iglob uses shell-style wildcards for path name completion. + # "recursive=True" enables the double asterisk "**" wildcard which matches + # for nested directories as well as the directory we're searching in. + paths = [] + for pattern in patterns: + pattern_full_path = mozpath.join(directory, pattern) + # If pattern is a directory recursively add contents of directory + if os.path.isdir(pattern_full_path): + # Append double asterisk to the end to make glob.iglob recursively match + # contents of directory + paths.extend( + glob.iglob(mozpath.join(pattern_full_path, "**"), recursive=True) + ) + # Otherwise pattern is a file or wildcard expression so add it without altering it + else: + paths.extend(glob.iglob(pattern_full_path, recursive=True)) + # Remove folder names from list of paths in order to avoid prematurely + # truncating directories elsewhere + # Sort the final list to ensure we preserve 01_, 02_ ordering for e.g. *.patch globs + final_paths = sorted( + [mozpath.normsep(path) for path in paths if not os.path.isdir(path)] + ) + return final_paths + + def fetch_and_unpack(self, revision): + """Fetch and unpack upstream source""" + + def validate_tar_member(member, path): + def is_within_directory(directory, target): + real_directory = os.path.realpath(directory) + real_target = os.path.realpath(target) + prefix = os.path.commonprefix([real_directory, real_target]) + return prefix == real_directory + + member_path = os.path.join(path, member.name) + if not is_within_directory(path, member_path): + raise Exception("Attempted path traversal in tar file: " + member.name) + if member.issym(): + link_path = os.path.join(os.path.dirname(member_path), member.linkname) + if not is_within_directory(path, link_path): + raise Exception( + "Attempted link path traversal in tar file: " + member.name + ) + if member.mode & (stat.S_ISUID | stat.S_ISGID): + raise Exception( + "Attempted setuid or setgid in tar file: " + member.name + ) + + def safe_extract(tar, path=".", *, numeric_owner=False): + def _files(tar, path): + for member in tar: + validate_tar_member(member, path) + yield member + + tar.extractall(path, members=_files(tar, path), numeric_owner=numeric_owner) + + url = self.source_host.upstream_snapshot(revision) + self.logInfo({"url": url}, "Fetching code archive from {url}") + + with mozfile.NamedTemporaryFile() as tmptarfile: + tmpextractdir = tempfile.TemporaryDirectory() + try: + req = requests.get(url, stream=True) + for data in req.iter_content(4096): + tmptarfile.write(data) + tmptarfile.seek(0) + + vendor_dir = mozpath.normsep( + self.manifest["vendoring"]["vendor-directory"] + ) + if self.should_perform_step("keep"): + self.logInfo({}, "Retaining wanted in-tree files.") + to_keep = self.convert_patterns_to_paths( + vendor_dir, + self.manifest["vendoring"].get("keep", []) + + DEFAULT_KEEP_FILES + + self.manifest["vendoring"].get("patches", []), + ) + else: + self.logInfo({}, "Skipping retention of in-tree files.") + to_keep = [] + + self.logInfo({"vd": vendor_dir}, "Cleaning {vd} to import changes.") + # We use double asterisk wildcard here to get complete list of recursive contents + for file in self.convert_patterns_to_paths(vendor_dir, ["**"]): + file = mozpath.normsep(file) + if file not in to_keep: + mozfile.remove(file) + + self.logInfo({"vd": vendor_dir}, "Unpacking upstream files for {vd}.") + with tarfile.open(tmptarfile.name) as tar: + + safe_extract(tar, tmpextractdir.name) + + def get_first_dir(p): + halves = os.path.split(p) + return get_first_dir(halves[0]) if halves[0] else halves[1] + + one_prefix = get_first_dir(tar.getnames()[0]) + has_prefix = all( + map(lambda name: name.startswith(one_prefix), tar.getnames()) + ) + + # GitLab puts everything down a directory; move it up. + if has_prefix: + tardir = mozpath.join(tmpextractdir.name, one_prefix) + mozfile.copy_contents(tardir, tmpextractdir.name) + mozfile.remove(tardir) + + if self.should_perform_step("include"): + self.logInfo({}, "Retaining wanted files from upstream changes.") + to_include = self.convert_patterns_to_paths( + tmpextractdir.name, + self.manifest["vendoring"].get("include", []) + + DEFAULT_INCLUDE_FILES, + ) + else: + self.logInfo({}, "Skipping retention of included files.") + to_include = [] + + if self.should_perform_step("exclude"): + self.logInfo({}, "Removing excluded files from upstream changes.") + to_exclude = self.convert_patterns_to_paths( + tmpextractdir.name, + self.manifest["vendoring"].get("exclude", []) + + DEFAULT_EXCLUDE_FILES, + ) + else: + self.logInfo({}, "Skipping removing excluded files.") + to_exclude = [] + + to_exclude = list(set(to_exclude) - set(to_include)) + if to_exclude: + self.logInfo( + {"files": list_of_paths_to_readable_string(to_exclude)}, + "Removing: {files}", + ) + for exclusion in to_exclude: + mozfile.remove(exclusion) + + # Clear out empty directories + # removeEmpty() won't remove directories containing only empty directories + # so just keep callign it as long as it's doing something + def removeEmpty(tmpextractdir): + removed = False + folders = list(os.walk(tmpextractdir))[1:] + for folder in folders: + if not folder[2]: + try: + os.rmdir(folder[0]) + removed = True + except Exception: + pass + return removed + + while removeEmpty(tmpextractdir.name): + pass + + # Then copy over the directories + if self.should_perform_step("move-contents"): + self.logInfo({"d": vendor_dir}, "Copying to {d}.") + mozfile.copy_contents(tmpextractdir.name, vendor_dir) + else: + self.logInfo({}, "Skipping copying contents into tree.") + self._extract_directory = lambda: tmpextractdir.name + except Exception as e: + tmpextractdir.cleanup() + raise e + + def update_yaml(self, revision, timestamp): + with open(self.yaml_file) as f: + yaml = f.readlines() + + replaced = 0 + replacements = [ + [" release:", " %s (%s)." % (revision, timestamp)], + [" revision:", " %s" % (revision)], + ] + + for i in range(0, len(yaml)): + l = yaml[i] + + for r in replacements: + if r[0] in l: + print("Found " + l) + replaced += 1 + yaml[i] = re.sub(r[0] + " [v\.a-f0-9]+.*$", r[0] + r[1], yaml[i]) + + assert len(replacements) == replaced + + with open(self.yaml_file, "wb") as f: + f.write(("".join(yaml)).encode("utf-8")) + + def spurious_check(self, revision, ignore_modified): + changed_files = set( + [ + os.path.abspath(f) + for f in self.repository.get_changed_files(mode="staged") + ] + ) + generated_files = set( + [ + self.get_full_path(f) + for f in self.manifest["vendoring"].get("generated", []) + ] + ) + changed_files = set(changed_files) - generated_files + if not changed_files: + self.logInfo({"r": revision}, "Upstream {r} hasn't modified files locally.") + # We almost certainly won't be here if ignore_modified was passed, because a modified + # local file will show up as a changed_file, but we'll be safe anyway. + if not ignore_modified and generated_files: + for g in generated_files: + self.repository.clean_directory(g) + elif generated_files: + self.log( + logging.CRITICAL, + "vendor", + {"files": generated_files}, + "Because you passed --ignore-modified we are not cleaning your" + + " working directory, but the following files were probably" + + " spuriously edited and can be reverted: {files}", + ) + sys.exit(-2) + + self.logInfo( + {"rev": revision, "num": len(changed_files)}, + "Version '{rev}' has changed {num} files.", + ) + + def update_files(self, revision): + if "update-actions" not in self.manifest["vendoring"]: + return + + for update in self.manifest["vendoring"]["update-actions"]: + if update["action"] == "copy-file": + src = self.get_full_path(update["from"]) + dst = self.get_full_path(update["to"]) + + self.logInfo( + {"s": src, "d": dst}, "action: copy-file src: {s} dst: {d}" + ) + + with open(src) as f: + contents = f.read() + with open(dst, "w") as f: + f.write(contents) + elif update["action"] == "move-file": + src = self.get_full_path(update["from"]) + dst = self.get_full_path(update["to"]) + + self.logInfo( + {"s": src, "d": dst}, "action: move-file src: {s} dst: {d}" + ) + + shutil.move(src, dst) + elif update["action"] == "move-dir": + src = self.get_full_path(update["from"]) + dst = self.get_full_path(update["to"]) + + self.logInfo( + {"src": src, "dst": dst}, "action: move-dir src: {src} dst: {dst}" + ) + + if not os.path.isdir(src): + raise Exception( + "Cannot move from a source directory %s that is not a directory" + % src + ) + os.makedirs(dst, exist_ok=True) + + def copy_tree(src, dst): + names = os.listdir(src) + os.makedirs(dst, exist_ok=True) + + for name in names: + srcname = os.path.join(src, name) + dstname = os.path.join(dst, name) + + if os.path.isdir(srcname): + copy_tree(srcname, dstname) + else: + shutil.copy2(srcname, dstname) + + copy_tree(src, dst) + shutil.rmtree(src) + + elif update["action"] in ["replace-in-file", "replace-in-file-regex"]: + file = self.get_full_path(update["file"]) + + self.logInfo({"file": file}, "action: replace-in-file file: {file}") + + replacement = update["with"].replace("{revision}", revision) + _replace_in_file( + file, + update["pattern"], + replacement, + regex=update["action"] == "replace-in-file-regex", + ) + elif update["action"] == "delete-path": + path = self.get_full_path(update["path"]) + self.logInfo({"path": path}, "action: delete-path path: {path}") + mozfile.remove(path) + elif update["action"] in ["run-script", "run-command"]: + if update["action"] == "run-script": + command = self.get_full_path(update["script"], support_cwd=True) + else: + command = update["command"] + + run_dir = self.get_full_path(update["cwd"], support_cwd=True) + + args = [] + for a in update.get("args", []): + if a == "{revision}": + args.append(revision) + elif any( + s in a + for s in [ + "{cwd}", + "{vendor_dir}", + "{yaml_dir}", + "{tmpextractdir}", + ] + ): + args.append(self.get_full_path(a, support_cwd=True)) + else: + args.append(a) + + self.logInfo( + { + "command": command, + "run_dir": run_dir, + "args": args, + "type": update["action"], + }, + "action: {type} command: {command} working dir: {run_dir} args: {args}", + ) + extra_env = ( + {"GECKO_PATH": os.getcwd()} + if "GECKO_PATH" not in os.environ + else {} + ) + # We also add a signal to scripts that they are running under mach vendor + extra_env["MACH_VENDOR"] = "1" + self.run_process( + args=[command] + args, + cwd=run_dir, + log_name=command, + require_unix_environment=True, + append_env=extra_env, + ) + else: + assert False, "Unknown action supplied (how did this pass validation?)" + + def update_moz_build(self, vendoring_dir, moz_yaml_dir, add_to_exports): + if vendoring_dir == moz_yaml_dir: + vendoring_dir = moz_yaml_dir = None + + # If you edit this (especially for header files) you should double check + # rewrite_mozbuild.py around 'assignment_type' + source_suffixes = [".cc", ".c", ".cpp", ".S", ".asm"] + header_suffixes = [".h", ".hpp"] + + files_removed = self.repository.get_changed_files(diff_filter="D") + files_added = self.repository.get_changed_files(diff_filter="A") + + # Filter the files added to just source files we track in moz.build files. + files_added = [ + f for f in files_added if any([f.endswith(s) for s in source_suffixes]) + ] + header_files_to_add = [ + f for f in files_added if any([f.endswith(s) for s in header_suffixes]) + ] + if add_to_exports: + files_added += header_files_to_add + elif header_files_to_add: + self.log( + logging.WARNING, + "header_files_warning", + {}, + ( + "We found %s header files in the update, pass --add-to-exports if you want" + + " to attempt to include them in EXPORTS blocks: %s" + ) + % (len(header_files_to_add), header_files_to_add), + ) + + self.logInfo( + {"added": len(files_added), "removed": len(files_removed)}, + "Found {added} files added and {removed} files removed.", + ) + + should_abort = False + for f in files_added: + try: + add_file_to_moz_build_file(f, moz_yaml_dir, vendoring_dir) + except MozBuildRewriteException: + self.log( + logging.ERROR, + "vendor", + {}, + "Could not add %s to the appropriate moz.build file" % f, + ) + should_abort = True + + for f in files_removed: + try: + remove_file_from_moz_build_file(f, moz_yaml_dir, vendoring_dir) + except MozBuildRewriteException: + self.log( + logging.ERROR, + "vendor", + {}, + "Could not remove %s from the appropriate moz.build file" % f, + ) + should_abort = True + + if should_abort: + self.log( + logging.ERROR, + "vendor", + {}, + "This is a deficiency in ./mach vendor . " + + "Please review the affected files before committing.", + ) + # Exit with -1 to distinguish this from the Exception case of exiting with 1 + sys.exit(-1) + + def import_local_patches(self, patches, yaml_dir, vendor_dir): + self.logInfo({}, "Importing local patches...") + for patch in self.convert_patterns_to_paths(yaml_dir, patches): + script = [ + "patch", + "-p1", + "--directory", + vendor_dir, + "--input", + os.path.abspath(patch), + "--no-backup-if-mismatch", + ] + self.run_process( + args=script, + log_name=script, + ) diff --git a/python/mozbuild/mozbuild/vendor/vendor_python.py b/python/mozbuild/mozbuild/vendor/vendor_python.py new file mode 100644 index 0000000000..db554e20d4 --- /dev/null +++ b/python/mozbuild/mozbuild/vendor/vendor_python.py @@ -0,0 +1,228 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import shutil +import subprocess +import sys +from pathlib import Path + +import mozfile +from mozfile import TemporaryDirectory +from mozpack.files import FileFinder + +from mozbuild.base import MozbuildObject + +EXCLUDED_PACKAGES = { + # dlmanager's package on PyPI only has metadata, but is missing the code. + # https://github.com/parkouss/dlmanager/issues/1 + "dlmanager", + # gyp's package on PyPI doesn't have any downloadable files. + "gyp", + # We keep some wheels vendored in "_venv" for use in Mozharness + "_venv", + # We manage vendoring "vsdownload" with a moz.yaml file (there is no module + # on PyPI). + "vsdownload", + # The moz.build file isn't a vendored module, so don't delete it. + "moz.build", + "requirements.in", + # The ansicon package contains DLLs and we don't want to arbitrarily vendor + # them since they could be unsafe. This module should rarely be used in practice + # (it's a fallback for old versions of windows). We've intentionally vendored a + # modified 'dummy' version of it so that the dependency checks still succeed, but + # if it ever is attempted to be used, it will fail gracefully. + "ansicon", +} + + +class VendorPython(MozbuildObject): + def __init__(self, *args, **kwargs): + MozbuildObject.__init__(self, *args, virtualenv_name="vendor", **kwargs) + + def vendor(self, keep_extra_files=False): + from mach.python_lockfile import PoetryHandle + + self.populate_logger() + self.log_manager.enable_unstructured() + + vendor_dir = Path(self.topsrcdir) / "third_party" / "python" + requirements_in = vendor_dir / "requirements.in" + poetry_lockfile = vendor_dir / "poetry.lock" + _sort_requirements_in(requirements_in) + + with TemporaryDirectory() as work_dir: + work_dir = Path(work_dir) + poetry = PoetryHandle(work_dir) + poetry.add_requirements_in_file(requirements_in) + poetry.reuse_existing_lockfile(poetry_lockfile) + lockfiles = poetry.generate_lockfiles(do_update=False) + + # Vendoring packages is only viable if it's possible to have a single + # set of packages that work regardless of which environment they're used in. + # So, we scrub environment markers, so that we essentially ask pip to + # download "all dependencies for all environments". Pip will then either + # fetch them as requested, or intelligently raise an error if that's not + # possible (e.g.: if different versions of Python would result in different + # packages/package versions). + pip_lockfile_without_markers = work_dir / "requirements.no-markers.txt" + shutil.copy(str(lockfiles.pip_lockfile), str(pip_lockfile_without_markers)) + remove_environment_markers_from_requirements_txt( + pip_lockfile_without_markers + ) + + with TemporaryDirectory() as tmp: + # use requirements.txt to download archived source distributions of all + # packages + subprocess.check_call( + [ + sys.executable, + "-m", + "pip", + "download", + "-r", + str(pip_lockfile_without_markers), + "--no-deps", + "--dest", + tmp, + "--abi", + "none", + "--platform", + "any", + ] + ) + _purge_vendor_dir(vendor_dir) + self._extract(tmp, vendor_dir, keep_extra_files) + + requirements_out = vendor_dir / "requirements.txt" + + # since requirements.out and poetry.lockfile are both outputs from + # third party code, they may contain carriage returns on Windows. We + # should strip the carriage returns to maintain consistency in our output + # regardless of which platform is doing the vendoring. We can do this and + # the copying at the same time to minimize reads and writes. + _copy_file_strip_carriage_return(lockfiles.pip_lockfile, requirements_out) + _copy_file_strip_carriage_return(lockfiles.poetry_lockfile, poetry_lockfile) + self.repository.add_remove_files(vendor_dir) + + def _extract(self, src, dest, keep_extra_files=False): + """extract source distribution into vendor directory""" + + ignore = () + if not keep_extra_files: + ignore = ("*/doc", "*/docs", "*/test", "*/tests", "**/.git") + finder = FileFinder(src) + for archive, _ in finder.find("*"): + _, ext = os.path.splitext(archive) + archive_path = os.path.join(finder.base, archive) + if ext == ".whl": + # Archive is named like "$package-name-1.0-py2.py3-none-any.whl", and should + # have four dashes that aren't part of the package name. + package_name, version, spec, abi, platform_and_suffix = archive.rsplit( + "-", 4 + ) + + if package_name in EXCLUDED_PACKAGES: + print( + f"'{package_name}' is on the exclusion list and will not be vendored." + ) + continue + + target_package_dir = os.path.join(dest, package_name) + os.mkdir(target_package_dir) + + # Extract all the contents of the wheel into the package subdirectory. + # We're expecting at least a code directory and a ".dist-info" directory, + # though there may be a ".data" directory as well. + mozfile.extract(archive_path, target_package_dir, ignore=ignore) + _denormalize_symlinks(target_package_dir) + else: + # Archive is named like "$package-name-1.0.tar.gz", and the rightmost + # dash should separate the package name from the rest of the archive + # specifier. + package_name, archive_postfix = archive.rsplit("-", 1) + package_dir = os.path.join(dest, package_name) + + if package_name in EXCLUDED_PACKAGES: + print( + f"'{package_name}' is on the exclusion list and will not be vendored." + ) + continue + + # The archive should only contain one top-level directory, which has + # the source files. We extract this directory directly to + # the vendor directory. + extracted_files = mozfile.extract(archive_path, dest, ignore=ignore) + assert len(extracted_files) == 1 + extracted_package_dir = extracted_files[0] + + # The extracted package dir includes the version in the name, + # which we don't we don't want. + mozfile.move(extracted_package_dir, package_dir) + _denormalize_symlinks(package_dir) + + +def _sort_requirements_in(requirements_in: Path): + requirements = {} + with requirements_in.open(mode="r", newline="\n") as f: + comments = [] + for line in f.readlines(): + line = line.strip() + if not line or line.startswith("#"): + comments.append(line) + continue + name, version = line.split("==") + requirements[name] = version, comments + comments = [] + + with requirements_in.open(mode="w", newline="\n") as f: + for name, (version, comments) in sorted(requirements.items()): + if comments: + f.write("{}\n".format("\n".join(comments))) + f.write("{}=={}\n".format(name, version)) + + +def remove_environment_markers_from_requirements_txt(requirements_txt: Path): + with requirements_txt.open(mode="r", newline="\n") as f: + lines = f.readlines() + markerless_lines = [] + continuation_token = " \\" + for line in lines: + line = line.rstrip() + + if not line.startswith(" ") and not line.startswith("#") and ";" in line: + has_continuation_token = line.endswith(continuation_token) + # The first line of each requirement looks something like: + # package-name==X.Y; python_version>=3.7 + # We can scrub the environment marker by splitting on the semicolon + line = line.split(";")[0] + if has_continuation_token: + line += continuation_token + markerless_lines.append(line) + else: + markerless_lines.append(line) + + with requirements_txt.open(mode="w", newline="\n") as f: + f.write("\n".join(markerless_lines)) + + +def _purge_vendor_dir(vendor_dir): + for child in Path(vendor_dir).iterdir(): + if child.name not in EXCLUDED_PACKAGES: + mozfile.remove(str(child)) + + +def _denormalize_symlinks(target): + # If any files inside the vendored package were symlinks, turn them into normal files + # because hg.mozilla.org forbids symlinks in the repository. + link_finder = FileFinder(target) + for _, f in link_finder.find("**"): + if os.path.islink(f.path): + link_target = os.path.realpath(f.path) + os.unlink(f.path) + shutil.copyfile(link_target, f.path) + + +def _copy_file_strip_carriage_return(file_src: Path, file_dst): + shutil.copyfileobj(file_src.open(mode="r"), file_dst.open(mode="w", newline="\n")) diff --git a/python/mozbuild/mozbuild/vendor/vendor_rust.py b/python/mozbuild/mozbuild/vendor/vendor_rust.py new file mode 100644 index 0000000000..f87d2efde8 --- /dev/null +++ b/python/mozbuild/mozbuild/vendor/vendor_rust.py @@ -0,0 +1,961 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, # You can obtain one at http://mozilla.org/MPL/2.0/. + +import errno +import hashlib +import json +import logging +import os +import re +import subprocess +import typing +from collections import defaultdict +from itertools import dropwhile +from pathlib import Path + +import mozpack.path as mozpath +import toml +from looseversion import LooseVersion +from mozboot.util import MINIMUM_RUST_VERSION + +from mozbuild.base import BuildEnvironmentNotFoundException, MozbuildObject + +if typing.TYPE_CHECKING: + import datetime + +# Type of a TOML value. +TomlItem = typing.Union[ + str, + typing.List["TomlItem"], + typing.Dict[str, "TomlItem"], + bool, + int, + float, + "datetime.datetime", + "datetime.date", + "datetime.time", +] + + +CARGO_CONFIG_TEMPLATE = """\ +# This file contains vendoring instructions for cargo. +# It was generated by `mach vendor rust`. +# Please do not edit. + +{config} + +# Take advantage of the fact that cargo will treat lines starting with # +# as comments to add preprocessing directives. This file can thus by copied +# as-is to $topsrcdir/.cargo/config with no preprocessing to be used there +# (for e.g. independent tasks building rust code), or be preprocessed by +# the build system to produce a .cargo/config with the right content. +#define REPLACE_NAME {replace_name} +#define VENDORED_DIRECTORY {directory} +# We explicitly exclude the following section when preprocessing because +# it would overlap with the preprocessed [source."@REPLACE_NAME@"], and +# cargo would fail. +#ifndef REPLACE_NAME +[source.{replace_name}] +directory = "{directory}" +#endif + +# Thankfully, @REPLACE_NAME@ is unlikely to be a legitimate source, so +# cargo will ignore it when it's here verbatim. +#filter substitution +[source."@REPLACE_NAME@"] +directory = "@top_srcdir@/@VENDORED_DIRECTORY@" +""" + + +CARGO_LOCK_NOTICE = """ +NOTE: `cargo vendor` may have made changes to your Cargo.lock. To restore your +Cargo.lock to the HEAD version, run `git checkout -- Cargo.lock` or +`hg revert Cargo.lock`. +""" + + +WINDOWS_UNDESIRABLE_REASON = """\ +The windows and windows-sys crates and their dependencies are too big to \ +vendor, and is a risk of version duplication due to its current update \ +cadence. Until this is worked out with upstream, we prefer to avoid them.\ +""" + +PACKAGES_WE_DONT_WANT = { + "windows-sys": WINDOWS_UNDESIRABLE_REASON, + "windows": WINDOWS_UNDESIRABLE_REASON, + "windows_aarch64_msvc": WINDOWS_UNDESIRABLE_REASON, + "windows_i686_gnu": WINDOWS_UNDESIRABLE_REASON, + "windows_i686_msvc": WINDOWS_UNDESIRABLE_REASON, + "windows_x86_64_gnu": WINDOWS_UNDESIRABLE_REASON, + "windows_x86_64_msvc": WINDOWS_UNDESIRABLE_REASON, +} + +PACKAGES_WE_ALWAYS_WANT_AN_OVERRIDE_OF = [ + "autocfg", + "cmake", + "vcpkg", +] + + +# Historically duplicated crates. Eventually we want this list to be empty. +# If you do need to make changes increasing the number of duplicates, please +# add a comment as to why. +TOLERATED_DUPES = { + "mio": 2, + # Transition from time 0.1 to 0.3 underway, but chrono is stuck on 0.1 + # and hasn't been updated in 1.5 years (an hypothetical update is + # expected to remove the dependency on time altogether). + "time": 2, +} + + +class VendorRust(MozbuildObject): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._issues = [] + + def serialize_issues_json(self): + return json.dumps( + { + "Cargo.lock": [ + { + "path": "Cargo.lock", + "column": None, + "line": None, + "level": "error" if level == logging.ERROR else "warning", + "message": msg, + } + for (level, msg) in self._issues + ] + } + ) + + def log(self, level, action, params, format_str): + if level >= logging.WARNING: + self._issues.append((level, format_str.format(**params))) + super().log(level, action, params, format_str) + + def get_cargo_path(self): + try: + return self.substs["CARGO"] + except (BuildEnvironmentNotFoundException, KeyError): + if "MOZ_AUTOMATION" in os.environ: + cargo = os.path.join( + os.environ["MOZ_FETCHES_DIR"], "rustc", "bin", "cargo" + ) + assert os.path.exists(cargo) + return cargo + # Default if this tree isn't configured. + from mozfile import which + + cargo = which("cargo") + if not cargo: + raise OSError( + errno.ENOENT, + ( + "Could not find 'cargo' on your $PATH. " + "Hint: have you run `mach build` or `mach configure`?" + ), + ) + return cargo + + def check_cargo_version(self, cargo): + """ + Ensure that Cargo is new enough. + """ + out = ( + subprocess.check_output([cargo, "--version"]) + .splitlines()[0] + .decode("UTF-8") + ) + if not out.startswith("cargo"): + return False + version = LooseVersion(out.split()[1]) + # Cargo 1.68.0 changed vendoring in a way that creates a lot of noise + # if we go back and forth between vendoring with an older version and + # a newer version. Only allow the newer versions. + minimum_rust_version = MINIMUM_RUST_VERSION + if LooseVersion("1.68.0") >= MINIMUM_RUST_VERSION: + minimum_rust_version = "1.68.0" + if version < minimum_rust_version: + self.log( + logging.ERROR, + "cargo_version", + {}, + "Cargo >= {0} required (install Rust {0} or newer)".format( + minimum_rust_version + ), + ) + return False + self.log(logging.DEBUG, "cargo_version", {}, "cargo is new enough") + return True + + def has_modified_files(self): + """ + Ensure that there aren't any uncommitted changes to files + in the working copy, since we're going to change some state + on the user. Allow changes to Cargo.{toml,lock} since that's + likely to be a common use case. + """ + modified = [ + f + for f in self.repository.get_changed_files("M") + if os.path.basename(f) not in ("Cargo.toml", "Cargo.lock") + and not f.startswith("supply-chain/") + ] + if modified: + self.log( + logging.ERROR, + "modified_files", + {}, + """You have uncommitted changes to the following files: + +{files} + +Please commit or stash these changes before vendoring, or re-run with `--ignore-modified`. +""".format( + files="\n".join(sorted(modified)) + ), + ) + return modified + + def check_openssl(self): + """ + Set environment flags for building with openssl. + + MacOS doesn't include openssl, but the openssl-sys crate used by + mach-vendor expects one of the system. It's common to have one + installed in /usr/local/opt/openssl by homebrew, but custom link + flags are necessary to build against it. + """ + + test_paths = ["/usr/include", "/usr/local/include"] + if any( + [os.path.exists(os.path.join(path, "openssl/ssl.h")) for path in test_paths] + ): + # Assume we can use one of these system headers. + return None + + if os.path.exists("/usr/local/opt/openssl/include/openssl/ssl.h"): + # Found a likely homebrew install. + self.log( + logging.INFO, "openssl", {}, "Using OpenSSL in /usr/local/opt/openssl" + ) + return { + "OPENSSL_INCLUDE_DIR": "/usr/local/opt/openssl/include", + "OPENSSL_LIB_DIR": "/usr/local/opt/openssl/lib", + } + + self.log(logging.ERROR, "openssl", {}, "OpenSSL not found!") + return None + + def _ensure_cargo(self): + """ + Ensures all the necessary cargo bits are installed. + + Returns the path to cargo if successful, None otherwise. + """ + cargo = self.get_cargo_path() + if not self.check_cargo_version(cargo): + return None + return cargo + + # A whitelist of acceptable license identifiers for the + # packages.license field from https://spdx.org/licenses/. Cargo + # documentation claims that values are checked against the above + # list and that multiple entries can be separated by '/'. We + # choose to list all combinations instead for the sake of + # completeness and because some entries below obviously do not + # conform to the format prescribed in the documentation. + # + # It is insufficient to have additions to this whitelist reviewed + # solely by a build peer; any additions must be checked by somebody + # competent to review licensing minutiae. + + # Licenses for code used at runtime. Please see the above comment before + # adding anything to this list. + RUNTIME_LICENSE_WHITELIST = [ + "Apache-2.0", + "Apache-2.0 WITH LLVM-exception", + # BSD-2-Clause and BSD-3-Clause are ok, but packages using them + # must be added to the appropriate section of about:licenses. + # To encourage people to remember to do that, we do not whitelist + # the licenses themselves, and we require the packages to be added + # to RUNTIME_LICENSE_PACKAGE_WHITELIST below. + "CC0-1.0", + "ISC", + "MIT", + "MPL-2.0", + "Unicode-DFS-2016", + "Unlicense", + "Zlib", + ] + + # Licenses for code used at build time (e.g. code generators). Please see the above + # comments before adding anything to this list. + BUILDTIME_LICENSE_WHITELIST = { + "BSD-3-Clause": [ + "bindgen", + "fuchsia-zircon", + "fuchsia-zircon-sys", + "fuchsia-cprng", + "glsl", + "instant", + ] + } + + # This whitelist should only be used for packages that use an acceptable + # license, but that also need to explicitly mentioned in about:license. + RUNTIME_LICENSE_PACKAGE_WHITELIST = { + "BSD-2-Clause": [ + "arrayref", + "cloudabi", + "Inflector", + "mach", + "qlog", + ], + "BSD-3-Clause": [], + } + + # ICU4X is distributed as individual crates that all share the same LICENSE + # that will need to be individually added to the allow list below. We'll + # define the SHA256 once here, to make the review process easier as new + # ICU4X crates are vendored into the tree. + ICU4X_LICENSE_SHA256 = ( + "02420cc1b4c26d9a3318d60fd57048d015831249a5b776a1ada75cd227e78630" + ) + + # This whitelist should only be used for packages that use a + # license-file and for which the license-file entry has been + # reviewed. The table is keyed by package names and maps to the + # sha256 hash of the license file that we reviewed. + # + # As above, it is insufficient to have additions to this whitelist + # reviewed solely by a build peer; any additions must be checked by + # somebody competent to review licensing minutiae. + RUNTIME_LICENSE_FILE_PACKAGE_WHITELIST = { + # MIT + "deque": "6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb", + # we're whitelisting this fuchsia crate because it doesn't get built in the final + # product but has a license-file that needs ignoring + "fuchsia-cprng": "03b114f53e6587a398931762ee11e2395bfdba252a329940e2c8c9e81813845b", + # Old ICU4X crates for ICU4X 1.0, see comment above. + "yoke-derive": ICU4X_LICENSE_SHA256, + "zerofrom-derive": ICU4X_LICENSE_SHA256, + } + + @staticmethod + def runtime_license(package, license_string): + """Cargo docs say: + --- + https://doc.rust-lang.org/cargo/reference/manifest.html + + This is an SPDX 2.1 license expression for this package. Currently + crates.io will validate the license provided against a whitelist of + known license and exception identifiers from the SPDX license list + 2.4. Parentheses are not currently supported. + + Multiple licenses can be separated with a `/`, although that usage + is deprecated. Instead, use a license expression with AND and OR + operators to get more explicit semantics. + --- + But I have no idea how you can meaningfully AND licenses, so + we will abort if that is detected. We'll handle `/` and OR as + equivalent and approve is any is in our approved list.""" + + # This specific AND combination has been reviewed for encoding_rs. + if ( + license_string == "(Apache-2.0 OR MIT) AND BSD-3-Clause" + and package == "encoding_rs" + ): + return True + + # This specific AND combination has been reviewed for unicode-ident. + if ( + license_string == "(MIT OR Apache-2.0) AND Unicode-DFS-2016" + and package == "unicode-ident" + ): + return True + + if re.search(r"\s+AND", license_string): + return False + + license_list = re.split(r"\s*/\s*|\s+OR\s+", license_string) + for license in license_list: + if license in VendorRust.RUNTIME_LICENSE_WHITELIST: + return True + if package in VendorRust.RUNTIME_LICENSE_PACKAGE_WHITELIST.get(license, []): + return True + return False + + def _check_licenses(self, vendor_dir: str) -> bool: + def verify_acceptable_license(package: str, license: str) -> bool: + self.log( + logging.DEBUG, "package_license", {}, "has license {}".format(license) + ) + + if not self.runtime_license(package, license): + if license not in self.BUILDTIME_LICENSE_WHITELIST: + self.log( + logging.ERROR, + "package_license_error", + {}, + """Package {} has a non-approved license: {}. + + Please request license review on the package's license. If the package's license + is approved, please add it to the whitelist of suitable licenses. + """.format( + package, license + ), + ) + return False + elif package not in self.BUILDTIME_LICENSE_WHITELIST[license]: + self.log( + logging.ERROR, + "package_license_error", + {}, + """Package {} has a license that is approved for build-time dependencies: + {} + but the package itself is not whitelisted as being a build-time only package. + + If your package is build-time only, please add it to the whitelist of build-time + only packages. Otherwise, you need to request license review on the package's license. + If the package's license is approved, please add it to the whitelist of suitable licenses. + """.format( + package, license + ), + ) + return False + return True + + def check_package(package_name: str) -> bool: + self.log( + logging.DEBUG, + "package_check", + {}, + "Checking license for {}".format(package_name), + ) + + toml_file = os.path.join(vendor_dir, package_name, "Cargo.toml") + with open(toml_file, encoding="utf-8") as fh: + toml_data = toml.load(fh) + + package_entry: typing.Dict[str, TomlItem] = toml_data["package"] + license = package_entry.get("license", None) + license_file = package_entry.get("license-file", None) + + if license is not None and type(license) is not str: + self.log( + logging.ERROR, + "package_invalid_license_format", + {}, + "package {} has an invalid `license` field (expected a string)".format( + package_name + ), + ) + return False + + if license_file is not None and type(license_file) is not str: + self.log( + logging.ERROR, + "package_invalid_license_format", + {}, + "package {} has an invalid `license-file` field (expected a string)".format( + package_name + ), + ) + return False + + # License information is optional for crates to provide, but + # we require it. + if not license and not license_file: + self.log( + logging.ERROR, + "package_no_license", + {}, + "package {} does not provide a license".format(package_name), + ) + return False + + # The Cargo.toml spec suggests that crates should either have + # `license` or `license-file`, but not both. We might as well + # be defensive about that, though. + if license and license_file: + self.log( + logging.ERROR, + "package_many_licenses", + {}, + "package {} provides too many licenses".format(package_name), + ) + return False + + if license: + return verify_acceptable_license(package_name, license) + + # otherwise, it's a custom license in a separate file + assert license_file is not None + self.log( + logging.DEBUG, + "package_license_file", + {}, + "package has license-file {}".format(license_file), + ) + + if package_name not in self.RUNTIME_LICENSE_FILE_PACKAGE_WHITELIST: + self.log( + logging.ERROR, + "package_license_file_unknown", + {}, + """Package {} has an unreviewed license file: {}. + +Please request review on the provided license; if approved, the package can be added +to the whitelist of packages whose licenses are suitable. +""".format( + package_name, license_file + ), + ) + return False + + approved_hash = self.RUNTIME_LICENSE_FILE_PACKAGE_WHITELIST[package_name] + + with open( + os.path.join(vendor_dir, package_name, license_file), "rb" + ) as license_buf: + current_hash = hashlib.sha256(license_buf.read()).hexdigest() + + if current_hash != approved_hash: + self.log( + logging.ERROR, + "package_license_file_mismatch", + {}, + """Package {} has changed its license file: {} (hash {}). + +Please request review on the provided license; if approved, please update the +license file's hash. +""".format( + package_name, license_file, current_hash + ), + ) + return False + return True + + # Force all of the packages to be checked for license information + # before reducing via `all`, so all license issues are found in a + # single `mach vendor rust` invocation. + results = [ + check_package(p) + for p in os.listdir(vendor_dir) + if os.path.isdir(os.path.join(vendor_dir, p)) + ] + return all(results) + + def _check_build_rust(self, cargo_lock): + ret = True + crates = {} + for path in Path(self.topsrcdir).glob("build/rust/**/Cargo.toml"): + with open(path) as fh: + cargo_toml = toml.load(fh) + path = path.relative_to(self.topsrcdir) + package = cargo_toml["package"] + key = (package["name"], package["version"]) + if key in crates: + self.log( + logging.ERROR, + "build_rust", + { + "path": crates[key], + "path2": path, + "crate": key[0], + "version": key[1], + }, + "{path} and {path2} both contain {crate} {version}", + ) + ret = False + crates[key] = path + + for package in cargo_lock["package"]: + key = (package["name"], package["version"]) + if key in crates and "source" not in package: + crates.pop(key) + + for ((name, version), path) in crates.items(): + self.log( + logging.ERROR, + "build_rust", + {"path": path, "crate": name, "version": version}, + "{crate} {version} has an override in {path} that is not used", + ) + ret = False + return ret + + def vendor( + self, ignore_modified=False, build_peers_said_large_imports_were_ok=False + ): + from mozbuild.mach_commands import cargo_vet + + self.populate_logger() + self.log_manager.enable_unstructured() + if not ignore_modified and self.has_modified_files(): + return False + + cargo = self._ensure_cargo() + if not cargo: + self.log(logging.ERROR, "cargo_not_found", {}, "Cargo was not found.") + return False + + relative_vendor_dir = "third_party/rust" + vendor_dir = mozpath.join(self.topsrcdir, relative_vendor_dir) + + # We use check_call instead of mozprocess to ensure errors are displayed. + # We do an |update -p| here to regenerate the Cargo.lock file with minimal + # changes. See bug 1324462 + res = subprocess.run([cargo, "update", "-p", "gkrust"], cwd=self.topsrcdir) + if res.returncode: + self.log(logging.ERROR, "cargo_update_failed", {}, "Cargo update failed.") + return False + + with open(os.path.join(self.topsrcdir, "Cargo.lock")) as fh: + cargo_lock = toml.load(fh) + failed = False + for package in cargo_lock.get("patch", {}).get("unused", []): + self.log( + logging.ERROR, + "unused_patch", + {"crate": package["name"]}, + """Unused patch in top-level Cargo.toml for {crate}.""", + ) + failed = True + + if not self._check_build_rust(cargo_lock): + failed = True + + grouped = defaultdict(list) + for package in cargo_lock["package"]: + if package["name"] in PACKAGES_WE_ALWAYS_WANT_AN_OVERRIDE_OF: + # When the in-tree version is used, there is `source` for + # it in Cargo.lock, which is what we expect. + if package.get("source"): + self.log( + logging.ERROR, + "non_overridden", + { + "crate": package["name"], + "version": package["version"], + "source": package["source"], + }, + "Crate {crate} v{version} must be overridden but isn't " + "and comes from {source}.", + ) + failed = True + elif package["name"] in PACKAGES_WE_DONT_WANT: + self.log( + logging.ERROR, + "undesirable", + { + "crate": package["name"], + "version": package["version"], + "reason": PACKAGES_WE_DONT_WANT[package["name"]], + }, + "Crate {crate} is not desirable: {reason}", + ) + failed = True + grouped[package["name"]].append(package) + + for name, packages in grouped.items(): + # Allow to have crates of the same name when one depends on the other. + num = len( + [ + p + for p in packages + if all(d.split()[0] != name for d in p.get("dependencies", [])) + ] + ) + expected = TOLERATED_DUPES.get(name, 1) + if num > expected: + self.log( + logging.ERROR, + "duplicate_crate", + { + "crate": name, + "num": num, + "expected": expected, + "file": Path(__file__).relative_to(self.topsrcdir), + }, + "There are {num} different versions of crate {crate} " + "(expected {expected}). Please avoid the extra duplication " + "or adjust TOLERATED_DUPES in {file} if not possible " + "(but we'd prefer the former).", + ) + failed = True + elif num < expected and num > 1: + self.log( + logging.ERROR, + "less_duplicate_crate", + { + "crate": name, + "num": num, + "expected": expected, + "file": Path(__file__).relative_to(self.topsrcdir), + }, + "There are {num} different versions of crate {crate} " + "(expected {expected}). Please adjust TOLERATED_DUPES in " + "{file} to reflect this improvement.", + ) + failed = True + elif num < expected and num > 0: + self.log( + logging.ERROR, + "less_duplicate_crate", + { + "crate": name, + "file": Path(__file__).relative_to(self.topsrcdir), + }, + "Crate {crate} is not duplicated anymore. " + "Please adjust TOLERATED_DUPES in {file} to reflect this improvement.", + ) + failed = True + elif name in TOLERATED_DUPES and expected <= 1: + self.log( + logging.ERROR, + "broken_allowed_dupes", + { + "crate": name, + "file": Path(__file__).relative_to(self.topsrcdir), + }, + "Crate {crate} is not duplicated. Remove it from " + "TOLERATED_DUPES in {file}.", + ) + failed = True + + for name in TOLERATED_DUPES: + if name not in grouped: + self.log( + logging.ERROR, + "outdated_allowed_dupes", + { + "crate": name, + "file": Path(__file__).relative_to(self.topsrcdir), + }, + "Crate {crate} is not in Cargo.lock anymore. Remove it from " + "TOLERATED_DUPES in {file}.", + ) + failed = True + + # Only emit warnings for cargo-vet for now. + env = os.environ.copy() + env["PATH"] = os.pathsep.join( + ( + str(Path(cargo).parent), + os.environ["PATH"], + ) + ) + flags = ["--output-format=json"] + if "MOZ_AUTOMATION" in os.environ: + flags.append("--locked") + flags.append("--frozen") + res = cargo_vet( + self, + flags, + stdout=subprocess.PIPE, + env=env, + ) + if res.returncode: + vet = json.loads(res.stdout) + logged_error = False + for failure in vet.get("failures", []): + failure["crate"] = failure.pop("name") + self.log( + logging.ERROR, + "cargo_vet_failed", + failure, + "Missing audit for {crate}:{version} (requires {missing_criteria})." + " Run `./mach cargo vet` for more information.", + ) + logged_error = True + # NOTE: This could log more information, but the violation JSON + # output isn't super stable yet, so it's probably simpler to tell + # the caller to run `./mach cargo vet` directly. + for key in vet.get("violations", {}).keys(): + self.log( + logging.ERROR, + "cargo_vet_failed", + {"key": key}, + "Violation conflict for {key}. Run `./mach cargo vet` for more information.", + ) + logged_error = True + if "error" in vet: + # NOTE: The error format produced by cargo-vet is from the + # `miette` crate, and can include a lot of metadata and context. + # If we want to show more details in the future, we can expand + # this rendering to also include things like source labels and + # related error metadata. + error = vet["error"] + self.log( + logging.ERROR, + "cargo_vet_failed", + error, + "Vet {severity}: {message}", + ) + if "help" in error: + self.log(logging.INFO, "cargo_vet_failed", error, " help: {help}") + for cause in error.get("causes", []): + self.log( + logging.INFO, + "cargo_vet_failed", + {"cause": cause}, + " cause: {cause}", + ) + for related in error.get("related", []): + self.log( + logging.INFO, + "cargo_vet_failed", + related, + " related {severity}: {message}", + ) + self.log( + logging.INFO, + "cargo_vet_failed", + {}, + "Run `./mach cargo vet` for more information.", + ) + logged_error = True + if not logged_error: + self.log( + logging.ERROR, + "cargo_vet_failed", + {}, + "Unknown vet error. Run `./mach cargo vet` for more information.", + ) + failed = True + + # If we failed when checking the crates list and/or running `cargo vet`, + # stop before invoking `cargo vendor`. + if failed: + return False + + res = subprocess.run( + [cargo, "vendor", vendor_dir], cwd=self.topsrcdir, stdout=subprocess.PIPE + ) + if res.returncode: + self.log(logging.ERROR, "cargo_vendor_failed", {}, "Cargo vendor failed.") + return False + output = res.stdout.decode("UTF-8") + + # Get the snippet of configuration that cargo vendor outputs, and + # update .cargo/config with it. + # XXX(bug 1576765): Hopefully do something better after + # https://github.com/rust-lang/cargo/issues/7280 is addressed. + config = "\n".join( + dropwhile(lambda l: not l.startswith("["), output.splitlines()) + ) + + # The config is toml; parse it as such. + config = toml.loads(config) + + # For each replace-with, extract their configuration and update the + # corresponding directory to be relative to topsrcdir. + replaces = { + v["replace-with"] for v in config["source"].values() if "replace-with" in v + } + + # We only really expect one replace-with + if len(replaces) != 1: + self.log( + logging.ERROR, + "vendor_failed", + {}, + """cargo vendor didn't output a unique replace-with. Found: %s.""" + % replaces, + ) + return False + + replace_name = replaces.pop() + replace = config["source"].pop(replace_name) + replace["directory"] = mozpath.relpath( + mozpath.normsep(os.path.normcase(replace["directory"])), + mozpath.normsep(os.path.normcase(self.topsrcdir)), + ) + + cargo_config = os.path.join(self.topsrcdir, ".cargo", "config.in") + with open(cargo_config, "w", encoding="utf-8", newline="\n") as fh: + fh.write( + CARGO_CONFIG_TEMPLATE.format( + config=toml.dumps(config), + replace_name=replace_name, + directory=replace["directory"], + ) + ) + + if not self._check_licenses(vendor_dir): + self.log( + logging.ERROR, + "license_check_failed", + {}, + """The changes from `mach vendor rust` will NOT be added to version control. + +{notice}""".format( + notice=CARGO_LOCK_NOTICE + ), + ) + self.repository.clean_directory(vendor_dir) + return False + + self.repository.add_remove_files(vendor_dir) + + # 100k is a reasonable upper bound on source file size. + FILESIZE_LIMIT = 100 * 1024 + large_files = set() + cumulative_added_size = 0 + for f in self.repository.get_changed_files("A"): + path = mozpath.join(self.topsrcdir, f) + size = os.stat(path).st_size + cumulative_added_size += size + if size > FILESIZE_LIMIT: + large_files.add(f) + + # Forcefully complain about large files being added, as history has + # shown that large-ish files typically are not needed. + if large_files and not build_peers_said_large_imports_were_ok: + self.log( + logging.ERROR, + "filesize_check", + {}, + """The following files exceed the filesize limit of {size}: + +{files} + +If you can't reduce the size of these files, talk to a build peer (on the #build +channel at https://chat.mozilla.org) about the particular large files you are +adding. + +The changes from `mach vendor rust` will NOT be added to version control. + +{notice}""".format( + files="\n".join(sorted(large_files)), + size=FILESIZE_LIMIT, + notice=CARGO_LOCK_NOTICE, + ), + ) + self.repository.forget_add_remove_files(vendor_dir) + self.repository.clean_directory(vendor_dir) + return False + + # Only warn for large imports, since we may just have large code + # drops from time to time (e.g. importing features into m-c). + SIZE_WARN_THRESHOLD = 5 * 1024 * 1024 + if cumulative_added_size >= SIZE_WARN_THRESHOLD: + self.log( + logging.WARN, + "filesize_check", + {}, + """Your changes add {size} bytes of added files. + +Please consider finding ways to reduce the size of the vendored packages. +For instance, check the vendored packages for unusually large test or +benchmark files that don't need to be published to crates.io and submit +a pull request upstream to ignore those files when publishing.""".format( + size=cumulative_added_size + ), + ) + return True diff --git a/python/mozbuild/mozpack/__init__.py b/python/mozbuild/mozpack/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozpack/apple_pkg/Distribution.template b/python/mozbuild/mozpack/apple_pkg/Distribution.template new file mode 100644 index 0000000000..2f4b9484d9 --- /dev/null +++ b/python/mozbuild/mozpack/apple_pkg/Distribution.template @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + #${app_name_url_encoded}.pkg + \ No newline at end of file diff --git a/python/mozbuild/mozpack/apple_pkg/PackageInfo.template b/python/mozbuild/mozpack/apple_pkg/PackageInfo.template new file mode 100644 index 0000000000..74d47e396c --- /dev/null +++ b/python/mozbuild/mozpack/apple_pkg/PackageInfo.template @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/mozbuild/mozpack/archive.py b/python/mozbuild/mozpack/archive.py new file mode 100644 index 0000000000..89bf14b179 --- /dev/null +++ b/python/mozbuild/mozpack/archive.py @@ -0,0 +1,153 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import bz2 +import gzip +import stat +import tarfile + +from .files import BaseFile, File + +# 2016-01-01T00:00:00+0000 +DEFAULT_MTIME = 1451606400 + + +# Python 3.9 contains this change: +# https://github.com/python/cpython/commit/674935b8caf33e47c78f1b8e197b1b77a04992d2 +# which changes the output of tar creation compared to earlier versions. +# As this code is used to generate tar files that are meant to be deterministic +# across versions of python (specifically, it's used as part of computing the hash +# of docker images, which needs to be identical between CI (which uses python 3.8), +# and developer environments (using arbitrary versions of python, at this point, +# most probably more recent than 3.9)). +# What we do is subblass TarInfo so that if used on python >= 3.9, it reproduces the +# behavior from python < 3.9. +# Here's how it goes: +# - the behavior in python >= 3.9 is the same as python < 3.9 when the type encoded +# in the tarinfo is CHRTYPE or BLKTYPE. +# - the value of the type is only compared in the context of choosing which behavior +# to take +# - we replace the type with the same value (so that using the value has no changes) +# but that pretends to be the same as CHRTYPE so that the condition that enables the +# old behavior is taken. +class HackedType(bytes): + def __eq__(self, other): + if other == tarfile.CHRTYPE: + return True + return self == other + + +class TarInfo(tarfile.TarInfo): + @staticmethod + def _create_header(info, format, encoding, errors): + info["type"] = HackedType(info["type"]) + return tarfile.TarInfo._create_header(info, format, encoding, errors) + + +def create_tar_from_files(fp, files): + """Create a tar file deterministically. + + Receives a dict mapping names of files in the archive to local filesystem + paths or ``mozpack.files.BaseFile`` instances. + + The files will be archived and written to the passed file handle opened + for writing. + + Only regular files can be written. + + FUTURE accept a filename argument (or create APIs to write files) + """ + # The format is explicitly set to tarfile.GNU_FORMAT, because this default format + # has been changed in Python 3.8. + with tarfile.open( + name="", mode="w", fileobj=fp, dereference=True, format=tarfile.GNU_FORMAT + ) as tf: + for archive_path, f in sorted(files.items()): + if not isinstance(f, BaseFile): + f = File(f) + + ti = TarInfo(archive_path) + ti.mode = f.mode or 0o0644 + ti.type = tarfile.REGTYPE + + if not ti.isreg(): + raise ValueError("not a regular file: %s" % f) + + # Disallow setuid and setgid bits. This is an arbitrary restriction. + # However, since we set uid/gid to root:root, setuid and setgid + # would be a glaring security hole if the archive were + # uncompressed as root. + if ti.mode & (stat.S_ISUID | stat.S_ISGID): + raise ValueError("cannot add file with setuid or setgid set: " "%s" % f) + + # Set uid, gid, username, and group as deterministic values. + ti.uid = 0 + ti.gid = 0 + ti.uname = "" + ti.gname = "" + + # Set mtime to a constant value. + ti.mtime = DEFAULT_MTIME + + ti.size = f.size() + # tarfile wants to pass a size argument to read(). So just + # wrap/buffer in a proper file object interface. + tf.addfile(ti, f.open()) + + +def create_tar_gz_from_files(fp, files, filename=None, compresslevel=9): + """Create a tar.gz file deterministically from files. + + This is a glorified wrapper around ``create_tar_from_files`` that + adds gzip compression. + + The passed file handle should be opened for writing in binary mode. + When the function returns, all data has been written to the handle. + """ + # Offset 3-7 in the gzip header contains an mtime. Pin it to a known + # value so output is deterministic. + gf = gzip.GzipFile( + filename=filename or "", + mode="wb", + fileobj=fp, + compresslevel=compresslevel, + mtime=DEFAULT_MTIME, + ) + with gf: + create_tar_from_files(gf, files) + + +class _BZ2Proxy(object): + """File object that proxies writes to a bz2 compressor.""" + + def __init__(self, fp, compresslevel=9): + self.fp = fp + self.compressor = bz2.BZ2Compressor(compresslevel) + self.pos = 0 + + def tell(self): + return self.pos + + def write(self, data): + data = self.compressor.compress(data) + self.pos += len(data) + self.fp.write(data) + + def close(self): + data = self.compressor.flush() + self.pos += len(data) + self.fp.write(data) + + +def create_tar_bz2_from_files(fp, files, compresslevel=9): + """Create a tar.bz2 file deterministically from files. + + This is a glorified wrapper around ``create_tar_from_files`` that + adds bzip2 compression. + + This function is similar to ``create_tar_gzip_from_files()``. + """ + proxy = _BZ2Proxy(fp, compresslevel=compresslevel) + create_tar_from_files(proxy, files) + proxy.close() diff --git a/python/mozbuild/mozpack/chrome/__init__.py b/python/mozbuild/mozpack/chrome/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozpack/chrome/flags.py b/python/mozbuild/mozpack/chrome/flags.py new file mode 100644 index 0000000000..6b096c862a --- /dev/null +++ b/python/mozbuild/mozpack/chrome/flags.py @@ -0,0 +1,278 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import re +from collections import OrderedDict + +import six +from packaging.version import Version + +from mozpack.errors import errors + + +class Flag(object): + """ + Class for flags in manifest entries in the form: + "flag" (same as "flag=true") + "flag=yes|true|1" + "flag=no|false|0" + """ + + def __init__(self, name): + """ + Initialize a Flag with the given name. + """ + self.name = name + self.value = None + + def add_definition(self, definition): + """ + Add a flag value definition. Replaces any previously set value. + """ + if definition == self.name: + self.value = True + return + assert definition.startswith(self.name) + if definition[len(self.name)] != "=": + return errors.fatal("Malformed flag: %s" % definition) + value = definition[len(self.name) + 1 :] + if value in ("yes", "true", "1", "no", "false", "0"): + self.value = value + else: + return errors.fatal("Unknown value in: %s" % definition) + + def matches(self, value): + """ + Return whether the flag value matches the given value. The values + are canonicalized for comparison. + """ + if value in ("yes", "true", "1", True): + return self.value in ("yes", "true", "1", True) + if value in ("no", "false", "0", False): + return self.value in ("no", "false", "0", False, None) + raise RuntimeError("Invalid value: %s" % value) + + def __str__(self): + """ + Serialize the flag value in the same form given to the last + add_definition() call. + """ + if self.value is None: + return "" + if self.value is True: + return self.name + return "%s=%s" % (self.name, self.value) + + def __eq__(self, other): + return str(self) == other + + +class StringFlag(object): + """ + Class for string flags in manifest entries in the form: + "flag=string" + "flag!=string" + """ + + def __init__(self, name): + """ + Initialize a StringFlag with the given name. + """ + self.name = name + self.values = [] + + def add_definition(self, definition): + """ + Add a string flag definition. + """ + assert definition.startswith(self.name) + value = definition[len(self.name) :] + if value.startswith("="): + self.values.append(("==", value[1:])) + elif value.startswith("!="): + self.values.append(("!=", value[2:])) + else: + return errors.fatal("Malformed flag: %s" % definition) + + def matches(self, value): + """ + Return whether one of the string flag definitions matches the given + value. + For example, + + flag = StringFlag('foo') + flag.add_definition('foo!=bar') + flag.matches('bar') returns False + flag.matches('qux') returns True + flag = StringFlag('foo') + flag.add_definition('foo=bar') + flag.add_definition('foo=baz') + flag.matches('bar') returns True + flag.matches('baz') returns True + flag.matches('qux') returns False + """ + if not self.values: + return True + for comparison, val in self.values: + if eval("value %s val" % comparison): + return True + return False + + def __str__(self): + """ + Serialize the flag definitions in the same form given to each + add_definition() call. + """ + res = [] + for comparison, val in self.values: + if comparison == "==": + res.append("%s=%s" % (self.name, val)) + else: + res.append("%s!=%s" % (self.name, val)) + return " ".join(res) + + def __eq__(self, other): + return str(self) == other + + +class VersionFlag(object): + """ + Class for version flags in manifest entries in the form: + "flag=version" + "flag<=version" + "flag=version" + "flag>version" + """ + + def __init__(self, name): + """ + Initialize a VersionFlag with the given name. + """ + self.name = name + self.values = [] + + def add_definition(self, definition): + """ + Add a version flag definition. + """ + assert definition.startswith(self.name) + value = definition[len(self.name) :] + if value.startswith("="): + self.values.append(("==", Version(value[1:]))) + elif len(value) > 1 and value[0] in ["<", ">"]: + if value[1] == "=": + if len(value) < 3: + return errors.fatal("Malformed flag: %s" % definition) + self.values.append((value[0:2], Version(value[2:]))) + else: + self.values.append((value[0], Version(value[1:]))) + else: + return errors.fatal("Malformed flag: %s" % definition) + + def matches(self, value): + """ + Return whether one of the version flag definitions matches the given + value. + For example, + + flag = VersionFlag('foo') + flag.add_definition('foo>=1.0') + flag.matches('1.0') returns True + flag.matches('1.1') returns True + flag.matches('0.9') returns False + flag = VersionFlag('foo') + flag.add_definition('foo>=1.0') + flag.add_definition('foo<0.5') + flag.matches('0.4') returns True + flag.matches('1.0') returns True + flag.matches('0.6') returns False + """ + value = Version(value) + if not self.values: + return True + for comparison, val in self.values: + if eval("value %s val" % comparison): + return True + return False + + def __str__(self): + """ + Serialize the flag definitions in the same form given to each + add_definition() call. + """ + res = [] + for comparison, val in self.values: + if comparison == "==": + res.append("%s=%s" % (self.name, val)) + else: + res.append("%s%s%s" % (self.name, comparison, val)) + return " ".join(res) + + def __eq__(self, other): + return str(self) == other + + +class Flags(OrderedDict): + """ + Class to handle a set of flags definitions given on a single manifest + entry. + + """ + + FLAGS = { + "application": StringFlag, + "appversion": VersionFlag, + "platformversion": VersionFlag, + "contentaccessible": Flag, + "os": StringFlag, + "osversion": VersionFlag, + "abi": StringFlag, + "platform": Flag, + "xpcnativewrappers": Flag, + "tablet": Flag, + "process": StringFlag, + "backgroundtask": StringFlag, + } + RE = re.compile(r"([!<>=]+)") + + def __init__(self, *flags): + """ + Initialize a set of flags given in string form. + flags = Flags('contentaccessible=yes', 'appversion>=3.5') + """ + OrderedDict.__init__(self) + for f in flags: + name = self.RE.split(f) + name = name[0] + if name not in self.FLAGS: + errors.fatal("Unknown flag: %s" % name) + continue + if name not in self: + self[name] = self.FLAGS[name](name) + self[name].add_definition(f) + + def __str__(self): + """ + Serialize the set of flags. + """ + return " ".join(str(self[k]) for k in self) + + def match(self, **filter): + """ + Return whether the set of flags match the set of given filters. + flags = Flags('contentaccessible=yes', 'appversion>=3.5', + 'application=foo') + + flags.match(application='foo') returns True + flags.match(application='foo', appversion='3.5') returns True + flags.match(application='foo', appversion='3.0') returns False + + """ + for name, value in six.iteritems(filter): + if name not in self: + continue + if not self[name].matches(value): + return False + return True diff --git a/python/mozbuild/mozpack/chrome/manifest.py b/python/mozbuild/mozpack/chrome/manifest.py new file mode 100644 index 0000000000..14c11d4c1d --- /dev/null +++ b/python/mozbuild/mozpack/chrome/manifest.py @@ -0,0 +1,400 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import re + +import six +from six.moves.urllib.parse import urlparse + +import mozpack.path as mozpath +from mozpack.chrome.flags import Flags +from mozpack.errors import errors + + +class ManifestEntry(object): + """ + Base class for all manifest entry types. + Subclasses may define the following class or member variables: + + - localized: indicates whether the manifest entry is used for localized + data. + - type: the manifest entry type (e.g. 'content' in + 'content global content/global/') + - allowed_flags: a set of flags allowed to be defined for the given + manifest entry type. + + A manifest entry is attached to a base path, defining where the manifest + entry is bound to, and that is used to find relative paths defined in + entries. + """ + + localized = False + type = None + allowed_flags = [ + "application", + "platformversion", + "os", + "osversion", + "abi", + "xpcnativewrappers", + "tablet", + "process", + "contentaccessible", + "backgroundtask", + ] + + def __init__(self, base, *flags): + """ + Initialize a manifest entry with the given base path and flags. + """ + self.base = base + self.flags = Flags(*flags) + if not all(f in self.allowed_flags for f in self.flags): + errors.fatal( + "%s unsupported for %s manifest entries" + % ( + ",".join(f for f in self.flags if f not in self.allowed_flags), + self.type, + ) + ) + + def serialize(self, *args): + """ + Serialize the manifest entry. + """ + entry = [self.type] + list(args) + flags = str(self.flags) + if flags: + entry.append(flags) + return " ".join(entry) + + def __eq__(self, other): + return self.base == other.base and str(self) == str(other) + + def __ne__(self, other): + return not self.__eq__(other) + + def __repr__(self): + return "<%s@%s>" % (str(self), self.base) + + def move(self, base): + """ + Return a new manifest entry with a different base path. + """ + return parse_manifest_line(base, str(self)) + + def rebase(self, base): + """ + Return a new manifest entry with all relative paths defined in the + entry relative to a new base directory. + The base class doesn't define relative paths, so it is equivalent to + move(). + """ + return self.move(base) + + +class ManifestEntryWithRelPath(ManifestEntry): + """ + Abstract manifest entry type with a relative path definition. + """ + + def __init__(self, base, relpath, *flags): + ManifestEntry.__init__(self, base, *flags) + self.relpath = relpath + + def __str__(self): + return self.serialize(self.relpath) + + def rebase(self, base): + """ + Return a new manifest entry with all relative paths defined in the + entry relative to a new base directory. + """ + clone = ManifestEntry.rebase(self, base) + clone.relpath = mozpath.rebase(self.base, base, self.relpath) + return clone + + @property + def path(self): + return mozpath.normpath(mozpath.join(self.base, self.relpath)) + + +class Manifest(ManifestEntryWithRelPath): + """ + Class for 'manifest' entries. + manifest some/path/to/another.manifest + """ + + type = "manifest" + + +class ManifestChrome(ManifestEntryWithRelPath): + """ + Abstract class for chrome entries. + """ + + def __init__(self, base, name, relpath, *flags): + ManifestEntryWithRelPath.__init__(self, base, relpath, *flags) + self.name = name + + @property + def location(self): + return mozpath.join(self.base, self.relpath) + + +class ManifestContent(ManifestChrome): + """ + Class for 'content' entries. + content global content/global/ + """ + + type = "content" + allowed_flags = ManifestChrome.allowed_flags + [ + "contentaccessible", + "platform", + ] + + def __str__(self): + return self.serialize(self.name, self.relpath) + + +class ManifestMultiContent(ManifestChrome): + """ + Abstract class for chrome entries with multiple definitions. + Used for locale and skin entries. + """ + + type = None + + def __init__(self, base, name, id, relpath, *flags): + ManifestChrome.__init__(self, base, name, relpath, *flags) + self.id = id + + def __str__(self): + return self.serialize(self.name, self.id, self.relpath) + + +class ManifestLocale(ManifestMultiContent): + """ + Class for 'locale' entries. + locale global en-US content/en-US/ + locale global fr content/fr/ + """ + + localized = True + type = "locale" + + +class ManifestSkin(ManifestMultiContent): + """ + Class for 'skin' entries. + skin global classic/1.0 content/skin/classic/ + """ + + type = "skin" + + +class ManifestOverload(ManifestEntry): + """ + Abstract class for chrome entries defining some kind of overloading. + Used for overlay, override or style entries. + """ + + type = None + + def __init__(self, base, overloaded, overload, *flags): + ManifestEntry.__init__(self, base, *flags) + self.overloaded = overloaded + self.overload = overload + + def __str__(self): + return self.serialize(self.overloaded, self.overload) + + +class ManifestOverlay(ManifestOverload): + """ + Class for 'overlay' entries. + overlay chrome://global/content/viewSource.xul \ + chrome://browser/content/viewSourceOverlay.xul + """ + + type = "overlay" + + +class ManifestStyle(ManifestOverload): + """ + Class for 'style' entries. + style chrome://global/content/viewSource.xul \ + chrome://browser/skin/ + """ + + type = "style" + + +class ManifestOverride(ManifestOverload): + """ + Class for 'override' entries. + override chrome://global/locale/netError.dtd \ + chrome://browser/locale/netError.dtd + """ + + type = "override" + + +class ManifestResource(ManifestEntry): + """ + Class for 'resource' entries. + resource gre-resources toolkit/res/ + resource services-sync resource://gre/modules/services-sync/ + + The target may be a relative path or a resource or chrome url. + """ + + type = "resource" + + def __init__(self, base, name, target, *flags): + ManifestEntry.__init__(self, base, *flags) + self.name = name + self.target = target + + def __str__(self): + return self.serialize(self.name, self.target) + + def rebase(self, base): + u = urlparse(self.target) + if u.scheme and u.scheme != "jar": + return ManifestEntry.rebase(self, base) + clone = ManifestEntry.rebase(self, base) + clone.target = mozpath.rebase(self.base, base, self.target) + return clone + + +class ManifestBinaryComponent(ManifestEntryWithRelPath): + """ + Class for 'binary-component' entries. + binary-component some/path/to/a/component.dll + """ + + type = "binary-component" + + +class ManifestComponent(ManifestEntryWithRelPath): + """ + Class for 'component' entries. + component {b2bba4df-057d-41ea-b6b1-94a10a8ede68} foo.js + """ + + type = "component" + + def __init__(self, base, cid, file, *flags): + ManifestEntryWithRelPath.__init__(self, base, file, *flags) + self.cid = cid + + def __str__(self): + return self.serialize(self.cid, self.relpath) + + +class ManifestInterfaces(ManifestEntryWithRelPath): + """ + Class for 'interfaces' entries. + interfaces foo.xpt + """ + + type = "interfaces" + + +class ManifestCategory(ManifestEntry): + """ + Class for 'category' entries. + category command-line-handler m-browser @mozilla.org/browser/clh; + """ + + type = "category" + + def __init__(self, base, category, name, value, *flags): + ManifestEntry.__init__(self, base, *flags) + self.category = category + self.name = name + self.value = value + + def __str__(self): + return self.serialize(self.category, self.name, self.value) + + +class ManifestContract(ManifestEntry): + """ + Class for 'contract' entries. + contract @mozilla.org/foo;1 {b2bba4df-057d-41ea-b6b1-94a10a8ede68} + """ + + type = "contract" + + def __init__(self, base, contractID, cid, *flags): + ManifestEntry.__init__(self, base, *flags) + self.contractID = contractID + self.cid = cid + + def __str__(self): + return self.serialize(self.contractID, self.cid) + + +# All manifest classes by their type name. +MANIFESTS_TYPES = dict( + [ + (c.type, c) + for c in globals().values() + if type(c) == type + and issubclass(c, ManifestEntry) + and hasattr(c, "type") + and c.type + ] +) + +MANIFEST_RE = re.compile(r"^#.*$") + + +def parse_manifest_line(base, line): + """ + Parse a line from a manifest file with the given base directory and + return the corresponding ManifestEntry instance. + """ + # Remove comments + cmd = MANIFEST_RE.sub("", line).strip().split() + if not cmd: + return None + if not cmd[0] in MANIFESTS_TYPES: + return errors.fatal("Unknown manifest directive: %s" % cmd[0]) + return MANIFESTS_TYPES[cmd[0]](base, *cmd[1:]) + + +def parse_manifest(root, path, fileobj=None): + """ + Parse a manifest file. + """ + base = mozpath.dirname(path) + if root: + path = os.path.normpath(os.path.abspath(os.path.join(root, path))) + if not fileobj: + fileobj = open(path) + linenum = 0 + for line in fileobj: + line = six.ensure_text(line) + linenum += 1 + with errors.context(path, linenum): + e = parse_manifest_line(base, line) + if e: + yield e + + +def is_manifest(path): + """ + Return whether the given path is that of a manifest file. + """ + return ( + path.endswith(".manifest") + and not path.endswith(".CRT.manifest") + and not path.endswith(".exe.manifest") + and os.path.basename(path) != "cose.manifest" + ) diff --git a/python/mozbuild/mozpack/copier.py b/python/mozbuild/mozpack/copier.py new file mode 100644 index 0000000000..c042e5432f --- /dev/null +++ b/python/mozbuild/mozpack/copier.py @@ -0,0 +1,605 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import concurrent.futures as futures +import errno +import os +import stat +import sys +from collections import Counter, OrderedDict, defaultdict + +import six + +import mozpack.path as mozpath +from mozpack.errors import errors +from mozpack.files import BaseFile, Dest + + +class FileRegistry(object): + """ + Generic container to keep track of a set of BaseFile instances. It + preserves the order under which the files are added, but doesn't keep + track of empty directories (directories are not stored at all). + The paths associated with the BaseFile instances are relative to an + unspecified (virtual) root directory. + + registry = FileRegistry() + registry.add('foo/bar', file_instance) + """ + + def __init__(self): + self._files = OrderedDict() + self._required_directories = Counter() + self._partial_paths_cache = {} + + def _partial_paths(self, path): + """ + Turn "foo/bar/baz/zot" into ["foo/bar/baz", "foo/bar", "foo"]. + """ + dir_name = path.rpartition("/")[0] + if not dir_name: + return [] + + partial_paths = self._partial_paths_cache.get(dir_name) + if partial_paths: + return partial_paths + + partial_paths = [dir_name] + self._partial_paths(dir_name) + + self._partial_paths_cache[dir_name] = partial_paths + return partial_paths + + def add(self, path, content): + """ + Add a BaseFile instance to the container, under the given path. + """ + assert isinstance(content, BaseFile) + if path in self._files: + return errors.error("%s already added" % path) + if self._required_directories[path] > 0: + return errors.error("Can't add %s: it is a required directory" % path) + # Check whether any parent of the given path is already stored + partial_paths = self._partial_paths(path) + for partial_path in partial_paths: + if partial_path in self._files: + return errors.error("Can't add %s: %s is a file" % (path, partial_path)) + self._files[path] = content + self._required_directories.update(partial_paths) + + def match(self, pattern): + """ + Return the list of paths, stored in the container, matching the + given pattern. See the mozpack.path.match documentation for a + description of the handled patterns. + """ + if "*" in pattern: + return [p for p in self.paths() if mozpath.match(p, pattern)] + if pattern == "": + return self.paths() + if pattern in self._files: + return [pattern] + return [p for p in self.paths() if mozpath.basedir(p, [pattern]) == pattern] + + def remove(self, pattern): + """ + Remove paths matching the given pattern from the container. See the + mozpack.path.match documentation for a description of the handled + patterns. + """ + items = self.match(pattern) + if not items: + return errors.error( + "Can't remove %s: %s" + % (pattern, "not matching anything previously added") + ) + for i in items: + del self._files[i] + self._required_directories.subtract(self._partial_paths(i)) + + def paths(self): + """ + Return all paths stored in the container, in the order they were added. + """ + return list(self._files) + + def __len__(self): + """ + Return number of paths stored in the container. + """ + return len(self._files) + + def __contains__(self, pattern): + raise RuntimeError( + "'in' operator forbidden for %s. Use contains()." % self.__class__.__name__ + ) + + def contains(self, pattern): + """ + Return whether the container contains paths matching the given + pattern. See the mozpack.path.match documentation for a description of + the handled patterns. + """ + return len(self.match(pattern)) > 0 + + def __getitem__(self, path): + """ + Return the BaseFile instance stored in the container for the given + path. + """ + return self._files[path] + + def __iter__(self): + """ + Iterate over all (path, BaseFile instance) pairs from the container. + for path, file in registry: + (...) + """ + return six.iteritems(self._files) + + def required_directories(self): + """ + Return the set of directories required by the paths in the container, + in no particular order. The returned directories are relative to an + unspecified (virtual) root directory (and do not include said root + directory). + """ + return set(k for k, v in self._required_directories.items() if v > 0) + + def output_to_inputs_tree(self): + """ + Return a dictionary mapping each output path to the set of its + required input paths. + + All paths are normalized. + """ + tree = {} + for output, file in self: + output = mozpath.normpath(output) + tree[output] = set(mozpath.normpath(f) for f in file.inputs()) + return tree + + def input_to_outputs_tree(self): + """ + Return a dictionary mapping each input path to the set of + impacted output paths. + + All paths are normalized. + """ + tree = defaultdict(set) + for output, file in self: + output = mozpath.normpath(output) + for input in file.inputs(): + input = mozpath.normpath(input) + tree[input].add(output) + return dict(tree) + + +class FileRegistrySubtree(object): + """A proxy class to give access to a subtree of an existing FileRegistry. + + Note this doesn't implement the whole FileRegistry interface.""" + + def __new__(cls, base, registry): + if not base: + return registry + return object.__new__(cls) + + def __init__(self, base, registry): + self._base = base + self._registry = registry + + def _get_path(self, path): + # mozpath.join will return a trailing slash if path is empty, and we + # don't want that. + return mozpath.join(self._base, path) if path else self._base + + def add(self, path, content): + return self._registry.add(self._get_path(path), content) + + def match(self, pattern): + return [ + mozpath.relpath(p, self._base) + for p in self._registry.match(self._get_path(pattern)) + ] + + def remove(self, pattern): + return self._registry.remove(self._get_path(pattern)) + + def paths(self): + return [p for p, f in self] + + def __len__(self): + return len(self.paths()) + + def contains(self, pattern): + return self._registry.contains(self._get_path(pattern)) + + def __getitem__(self, path): + return self._registry[self._get_path(path)] + + def __iter__(self): + for p, f in self._registry: + if mozpath.basedir(p, [self._base]): + yield mozpath.relpath(p, self._base), f + + +class FileCopyResult(object): + """Represents results of a FileCopier.copy operation.""" + + def __init__(self): + self.updated_files = set() + self.existing_files = set() + self.removed_files = set() + self.removed_directories = set() + + @property + def updated_files_count(self): + return len(self.updated_files) + + @property + def existing_files_count(self): + return len(self.existing_files) + + @property + def removed_files_count(self): + return len(self.removed_files) + + @property + def removed_directories_count(self): + return len(self.removed_directories) + + +class FileCopier(FileRegistry): + """ + FileRegistry with the ability to copy the registered files to a separate + directory. + """ + + def copy( + self, + destination, + skip_if_older=True, + remove_unaccounted=True, + remove_all_directory_symlinks=True, + remove_empty_directories=True, + ): + """ + Copy all registered files to the given destination path. The given + destination can be an existing directory, or not exist at all. It + can't be e.g. a file. + The copy process acts a bit like rsync: files are not copied when they + don't need to (see mozpack.files for details on file.copy). + + By default, files in the destination directory that aren't + registered are removed and empty directories are deleted. In + addition, all directory symlinks in the destination directory + are deleted: this is a conservative approach to ensure that we + never accidently write files into a directory that is not the + destination directory. In the worst case, we might have a + directory symlink in the object directory to the source + directory. + + To disable removing of unregistered files, pass + remove_unaccounted=False. To disable removing empty + directories, pass remove_empty_directories=False. In rare + cases, you might want to maintain directory symlinks in the + destination directory (at least those that are not required to + be regular directories): pass + remove_all_directory_symlinks=False. Exercise caution with + this flag: you almost certainly do not want to preserve + directory symlinks. + + Returns a FileCopyResult that details what changed. + """ + assert isinstance(destination, six.string_types) + assert not os.path.exists(destination) or os.path.isdir(destination) + + result = FileCopyResult() + have_symlinks = hasattr(os, "symlink") + destination = os.path.normpath(destination) + + # We create the destination directory specially. We can't do this as + # part of the loop doing mkdir() below because that loop munges + # symlinks and permissions and parent directories of the destination + # directory may have their own weird schema. The contract is we only + # manage children of destination, not its parents. + try: + os.makedirs(destination) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + # Because we could be handling thousands of files, code in this + # function is optimized to minimize system calls. We prefer CPU time + # in Python over possibly I/O bound filesystem calls to stat() and + # friends. + + required_dirs = set([destination]) + required_dirs |= set( + os.path.normpath(os.path.join(destination, d)) + for d in self.required_directories() + ) + + # Ensure destination directories are in place and proper. + # + # The "proper" bit is important. We need to ensure that directories + # have appropriate permissions or we will be unable to discover + # and write files. Furthermore, we need to verify directories aren't + # symlinks. + # + # Symlinked directories (a symlink whose target is a directory) are + # incompatible with us because our manifest talks in terms of files, + # not directories. If we leave symlinked directories unchecked, we + # would blindly follow symlinks and this might confuse file + # installation. For example, if an existing directory is a symlink + # to directory X and we attempt to install a symlink in this directory + # to a file in directory X, we may create a recursive symlink! + for d in sorted(required_dirs, key=len): + try: + os.mkdir(d) + except OSError as error: + if error.errno != errno.EEXIST: + raise + + # We allow the destination to be a symlink because the caller + # is responsible for managing the destination and we assume + # they know what they are doing. + if have_symlinks and d != destination: + st = os.lstat(d) + if stat.S_ISLNK(st.st_mode): + # While we have remove_unaccounted, it doesn't apply + # to directory symlinks because if it did, our behavior + # could be very wrong. + os.remove(d) + os.mkdir(d) + + if not os.access(d, os.W_OK): + umask = os.umask(0o077) + os.umask(umask) + os.chmod(d, 0o777 & ~umask) + + if isinstance(remove_unaccounted, FileRegistry): + existing_files = set( + os.path.normpath(os.path.join(destination, p)) + for p in remove_unaccounted.paths() + ) + existing_dirs = set( + os.path.normpath(os.path.join(destination, p)) + for p in remove_unaccounted.required_directories() + ) + existing_dirs |= {os.path.normpath(destination)} + else: + # While we have remove_unaccounted, it doesn't apply to empty + # directories because it wouldn't make sense: an empty directory + # is empty, so removing it should have no effect. + existing_dirs = set() + existing_files = set() + for root, dirs, files in os.walk(destination): + # We need to perform the same symlink detection as above. + # os.walk() doesn't follow symlinks into directories by + # default, so we need to check dirs (we can't wait for root). + if have_symlinks: + filtered = [] + for d in dirs: + full = os.path.join(root, d) + st = os.lstat(full) + if stat.S_ISLNK(st.st_mode): + # This directory symlink is not a required + # directory: any such symlink would have been + # removed and a directory created above. + if remove_all_directory_symlinks: + os.remove(full) + result.removed_files.add(os.path.normpath(full)) + else: + existing_files.add(os.path.normpath(full)) + else: + filtered.append(d) + + dirs[:] = filtered + + existing_dirs.add(os.path.normpath(root)) + + for d in dirs: + existing_dirs.add(os.path.normpath(os.path.join(root, d))) + + for f in files: + existing_files.add(os.path.normpath(os.path.join(root, f))) + + # Now we reconcile the state of the world against what we want. + dest_files = set() + + # Install files. + # + # Creating/appending new files on Windows/NTFS is slow. So we use a + # thread pool to speed it up significantly. The performance of this + # loop is so critical to common build operations on Linux that the + # overhead of the thread pool is worth avoiding, so we have 2 code + # paths. We also employ a low water mark to prevent thread pool + # creation if number of files is too small to benefit. + copy_results = [] + if sys.platform == "win32" and len(self) > 100: + with futures.ThreadPoolExecutor(4) as e: + fs = [] + for p, f in self: + destfile = os.path.normpath(os.path.join(destination, p)) + fs.append((destfile, e.submit(f.copy, destfile, skip_if_older))) + + copy_results = [(path, f.result) for path, f in fs] + else: + for p, f in self: + destfile = os.path.normpath(os.path.join(destination, p)) + copy_results.append((destfile, f.copy(destfile, skip_if_older))) + + for destfile, copy_result in copy_results: + dest_files.add(destfile) + if copy_result: + result.updated_files.add(destfile) + else: + result.existing_files.add(destfile) + + # Remove files no longer accounted for. + if remove_unaccounted: + for f in existing_files - dest_files: + # Windows requires write access to remove files. + if os.name == "nt" and not os.access(f, os.W_OK): + # It doesn't matter what we set permissions to since we + # will remove this file shortly. + os.chmod(f, 0o600) + + os.remove(f) + result.removed_files.add(f) + + if not remove_empty_directories: + return result + + # Figure out which directories can be removed. This is complicated + # by the fact we optionally remove existing files. This would be easy + # if we walked the directory tree after installing files. But, we're + # trying to minimize system calls. + + # Start with the ideal set. + remove_dirs = existing_dirs - required_dirs + + # Then don't remove directories if we didn't remove unaccounted files + # and one of those files exists. + if not remove_unaccounted: + parents = set() + pathsep = os.path.sep + for f in existing_files: + path = f + while True: + # All the paths are normalized and relative by this point, + # so os.path.dirname would only do extra work. + dirname = path.rpartition(pathsep)[0] + if dirname in parents: + break + parents.add(dirname) + path = dirname + remove_dirs -= parents + + # Remove empty directories that aren't required. + for d in sorted(remove_dirs, key=len, reverse=True): + try: + try: + os.rmdir(d) + except OSError as e: + if e.errno in (errno.EPERM, errno.EACCES): + # Permissions may not allow deletion. So ensure write + # access is in place before attempting to rmdir again. + os.chmod(d, 0o700) + os.rmdir(d) + else: + raise + except OSError as e: + # If remove_unaccounted is a # FileRegistry, then we have a + # list of directories that may not be empty, so ignore rmdir + # ENOTEMPTY errors for them. + if ( + isinstance(remove_unaccounted, FileRegistry) + and e.errno == errno.ENOTEMPTY + ): + continue + raise + result.removed_directories.add(d) + + return result + + +class Jarrer(FileRegistry, BaseFile): + """ + FileRegistry with the ability to copy and pack the registered files as a + jar file. Also acts as a BaseFile instance, to be copied with a FileCopier. + """ + + def __init__(self, compress=True): + """ + Create a Jarrer instance. See mozpack.mozjar.JarWriter documentation + for details on the compress argument. + """ + self.compress = compress + self._preload = [] + self._compress_options = {} # Map path to compress boolean option. + FileRegistry.__init__(self) + + def add(self, path, content, compress=None): + FileRegistry.add(self, path, content) + if compress is not None: + self._compress_options[path] = compress + + def copy(self, dest, skip_if_older=True): + """ + Pack all registered files in the given destination jar. The given + destination jar may be a path to jar file, or a Dest instance for + a jar file. + If the destination jar file exists, its (compressed) contents are used + instead of the registered BaseFile instances when appropriate. + """ + + class DeflaterDest(Dest): + """ + Dest-like class, reading from a file-like object initially, but + switching to a Deflater object if written to. + + dest = DeflaterDest(original_file) + dest.read() # Reads original_file + dest.write(data) # Creates a Deflater and write data there + dest.read() # Re-opens the Deflater and reads from it + """ + + def __init__(self, orig=None, compress=True): + self.mode = None + self.deflater = orig + self.compress = compress + + def read(self, length=-1): + if self.mode != "r": + assert self.mode is None + self.mode = "r" + return self.deflater.read(length) + + def write(self, data): + if self.mode != "w": + from mozpack.mozjar import Deflater + + self.deflater = Deflater(self.compress) + self.mode = "w" + self.deflater.write(data) + + def exists(self): + return self.deflater is not None + + if isinstance(dest, six.string_types): + dest = Dest(dest) + assert isinstance(dest, Dest) + + from mozpack.mozjar import JarReader, JarWriter + + try: + old_jar = JarReader(fileobj=dest) + except Exception: + old_jar = [] + + old_contents = dict([(f.filename, f) for f in old_jar]) + + with JarWriter(fileobj=dest, compress=self.compress) as jar: + for path, file in self: + compress = self._compress_options.get(path, self.compress) + if path in old_contents: + deflater = DeflaterDest(old_contents[path], compress) + else: + deflater = DeflaterDest(compress=compress) + file.copy(deflater, skip_if_older) + jar.add(path, deflater.deflater, mode=file.mode, compress=compress) + if self._preload: + jar.preload(self._preload) + + def open(self): + raise RuntimeError("unsupported") + + def preload(self, paths): + """ + Add the given set of paths to the list of preloaded files. See + mozpack.mozjar.JarWriter documentation for details on jar preloading. + """ + self._preload.extend(paths) diff --git a/python/mozbuild/mozpack/dmg.py b/python/mozbuild/mozpack/dmg.py new file mode 100644 index 0000000000..334f3a69cc --- /dev/null +++ b/python/mozbuild/mozpack/dmg.py @@ -0,0 +1,230 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import platform +import shutil +import subprocess +from pathlib import Path +from typing import List + +import mozfile + +from mozbuild.util import ensureParentDir + +is_linux = platform.system() == "Linux" +is_osx = platform.system() == "Darwin" + + +def chmod(dir): + "Set permissions of DMG contents correctly" + subprocess.check_call(["chmod", "-R", "a+rX,a-st,u+w,go-w", dir]) + + +def rsync(source: Path, dest: Path): + "rsync the contents of directory source into directory dest" + # Ensure a trailing slash on directories so rsync copies the *contents* of source. + raw_source = str(source) + if source.is_dir(): + raw_source = str(source) + "/" + subprocess.check_call(["rsync", "-a", "--copy-unsafe-links", raw_source, dest]) + + +def set_folder_icon(dir: Path, tmpdir: Path, hfs_tool: Path = None): + "Set HFS attributes of dir to use a custom icon" + if is_linux: + hfs = tmpdir / "staged.hfs" + subprocess.check_call([hfs_tool, hfs, "attr", "/", "C"]) + elif is_osx: + subprocess.check_call(["SetFile", "-a", "C", dir]) + + +def generate_hfs_file( + stagedir: Path, tmpdir: Path, volume_name: str, mkfshfs_tool: Path +): + """ + When cross compiling, we zero fill an hfs file, that we will turn into + a DMG. To do so we test the size of the staged dir, and add some slight + padding to that. + """ + hfs = tmpdir / "staged.hfs" + output = subprocess.check_output(["du", "-s", stagedir]) + size = int(output.split()[0]) / 1000 # Get in MB + size = int(size * 1.02) # Bump the used size slightly larger. + # Setup a proper file sized out with zero's + subprocess.check_call( + [ + "dd", + "if=/dev/zero", + "of={}".format(hfs), + "bs=1M", + "count={}".format(size), + ] + ) + subprocess.check_call([mkfshfs_tool, "-v", volume_name, hfs]) + + +def create_app_symlink(stagedir: Path, tmpdir: Path, hfs_tool: Path = None): + """ + Make a symlink to /Applications. The symlink name is a space + so we don't have to localize it. The Applications folder icon + will be shown in Finder, which should be clear enough for users. + """ + if is_linux: + hfs = os.path.join(tmpdir, "staged.hfs") + subprocess.check_call([hfs_tool, hfs, "symlink", "/ ", "/Applications"]) + elif is_osx: + os.symlink("/Applications", stagedir / " ") + + +def create_dmg_from_staged( + stagedir: Path, + output_dmg: Path, + tmpdir: Path, + volume_name: str, + hfs_tool: Path = None, + dmg_tool: Path = None, +): + "Given a prepared directory stagedir, produce a DMG at output_dmg." + if is_linux: + # The dmg tool doesn't create the destination directories, and silently + # returns success if the parent directory doesn't exist. + ensureParentDir(output_dmg) + + hfs = os.path.join(tmpdir, "staged.hfs") + subprocess.check_call([hfs_tool, hfs, "addall", stagedir]) + subprocess.check_call( + [dmg_tool, "build", hfs, output_dmg], + # dmg is seriously chatty + stdout=subprocess.DEVNULL, + ) + elif is_osx: + hybrid = tmpdir / "hybrid.dmg" + subprocess.check_call( + [ + "hdiutil", + "makehybrid", + "-hfs", + "-hfs-volume-name", + volume_name, + "-hfs-openfolder", + stagedir, + "-ov", + stagedir, + "-o", + hybrid, + ] + ) + subprocess.check_call( + [ + "hdiutil", + "convert", + "-format", + "UDBZ", + "-imagekey", + "bzip2-level=9", + "-ov", + hybrid, + "-o", + output_dmg, + ] + ) + + +def create_dmg( + source_directory: Path, + output_dmg: Path, + volume_name: str, + extra_files: List[tuple], + dmg_tool: Path, + hfs_tool: Path, + mkfshfs_tool: Path, +): + """ + Create a DMG disk image at the path output_dmg from source_directory. + + Use volume_name as the disk image volume name, and + use extra_files as a list of tuples of (filename, relative path) to copy + into the disk image. + """ + if platform.system() not in ("Darwin", "Linux"): + raise Exception("Don't know how to build a DMG on '%s'" % platform.system()) + + with mozfile.TemporaryDirectory() as tmp: + tmpdir = Path(tmp) + stagedir = tmpdir / "stage" + stagedir.mkdir() + + # Copy the app bundle over using rsync + rsync(source_directory, stagedir) + # Copy extra files + for source, target in extra_files: + full_target = stagedir / target + full_target.parent.mkdir(parents=True, exist_ok=True) + shutil.copyfile(source, full_target) + if is_linux: + # Not needed in osx + generate_hfs_file(stagedir, tmpdir, volume_name, mkfshfs_tool) + create_app_symlink(stagedir, tmpdir, hfs_tool) + # Set the folder attributes to use a custom icon + set_folder_icon(stagedir, tmpdir, hfs_tool) + chmod(stagedir) + create_dmg_from_staged( + stagedir, output_dmg, tmpdir, volume_name, hfs_tool, dmg_tool + ) + + +def extract_dmg_contents( + dmgfile: Path, + destdir: Path, + dmg_tool: Path = None, + hfs_tool: Path = None, +): + if is_linux: + with mozfile.TemporaryDirectory() as tmpdir: + hfs_file = os.path.join(tmpdir, "firefox.hfs") + subprocess.check_call( + [dmg_tool, "extract", dmgfile, hfs_file], + # dmg is seriously chatty + stdout=subprocess.DEVNULL, + ) + subprocess.check_call([hfs_tool, hfs_file, "extractall", "/", destdir]) + else: + # TODO: find better way to resolve topsrcdir (checkout directory) + topsrcdir = Path(__file__).parent.parent.parent.parent.resolve() + unpack_diskimage = topsrcdir / "build/package/mac_osx/unpack-diskimage" + unpack_mountpoint = Path("/tmp/app-unpack") + subprocess.check_call([unpack_diskimage, dmgfile, unpack_mountpoint, destdir]) + + +def extract_dmg( + dmgfile: Path, + output: Path, + dmg_tool: Path = None, + hfs_tool: Path = None, + dsstore: Path = None, + icon: Path = None, + background: Path = None, +): + if platform.system() not in ("Darwin", "Linux"): + raise Exception("Don't know how to extract a DMG on '%s'" % platform.system()) + + with mozfile.TemporaryDirectory() as tmp: + tmpdir = Path(tmp) + extract_dmg_contents(dmgfile, tmpdir, dmg_tool, hfs_tool) + applications_symlink = tmpdir / " " + if applications_symlink.is_symlink(): + # Rsync will fail on the presence of this symlink + applications_symlink.unlink() + rsync(tmpdir, output) + + if dsstore: + dsstore.parent.mkdir(parents=True, exist_ok=True) + rsync(tmpdir / ".DS_Store", dsstore) + if background: + background.parent.mkdir(parents=True, exist_ok=True) + rsync(tmpdir / ".background" / background.name, background) + if icon: + icon.parent.mkdir(parents=True, exist_ok=True) + rsync(tmpdir / ".VolumeIcon.icns", icon) diff --git a/python/mozbuild/mozpack/errors.py b/python/mozbuild/mozpack/errors.py new file mode 100644 index 0000000000..25c0e8549c --- /dev/null +++ b/python/mozbuild/mozpack/errors.py @@ -0,0 +1,151 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import sys +from contextlib import contextmanager + + +class ErrorMessage(Exception): + """Exception type raised from errors.error() and errors.fatal()""" + + +class AccumulatedErrors(Exception): + """Exception type raised from errors.accumulate()""" + + +class ErrorCollector(object): + """ + Error handling/logging class. A global instance, errors, is provided for + convenience. + + Warnings, errors and fatal errors may be logged by calls to the following + functions: + - errors.warn(message) + - errors.error(message) + - errors.fatal(message) + + Warnings only send the message on the logging output, while errors and + fatal errors send the message and throw an ErrorMessage exception. The + exception, however, may be deferred. See further below. + + Errors may be ignored by calling: + - errors.ignore_errors() + + After calling that function, only fatal errors throw an exception. + + The warnings, errors or fatal errors messages may be augmented with context + information when a context is provided. Context is defined by a pair + (filename, linenumber), and may be set with errors.context() used as a + + context manager: + + .. code-block:: python + + with errors.context(filename, linenumber): + errors.warn(message) + + Arbitrary nesting is supported, both for errors.context calls: + + .. code-block:: python + + with errors.context(filename1, linenumber1): + errors.warn(message) + with errors.context(filename2, linenumber2): + errors.warn(message) + + as well as for function calls: + + .. code-block:: python + + def func(): + errors.warn(message) + with errors.context(filename, linenumber): + func() + + Errors and fatal errors can have their exception thrown at a later time, + allowing for several different errors to be reported at once before + throwing. This is achieved with errors.accumulate() as a context manager: + + .. code-block:: python + + with errors.accumulate(): + if test1: + errors.error(message1) + if test2: + errors.error(message2) + + In such cases, a single AccumulatedErrors exception is thrown, but doesn't + contain information about the exceptions. The logged messages do. + """ + + out = sys.stderr + WARN = 1 + ERROR = 2 + FATAL = 3 + _level = ERROR + _context = [] + _count = None + + def ignore_errors(self, ignore=True): + if ignore: + self._level = self.FATAL + else: + self._level = self.ERROR + + def _full_message(self, level, msg): + if level >= self._level: + level = "error" + else: + level = "warning" + if self._context: + file, line = self._context[-1] + return "%s: %s:%d: %s" % (level, file, line, msg) + return "%s: %s" % (level, msg) + + def _handle(self, level, msg): + msg = self._full_message(level, msg) + if level >= self._level: + if self._count is None: + raise ErrorMessage(msg) + self._count += 1 + print(msg, file=self.out) + + def fatal(self, msg): + self._handle(self.FATAL, msg) + + def error(self, msg): + self._handle(self.ERROR, msg) + + def warn(self, msg): + self._handle(self.WARN, msg) + + def get_context(self): + if self._context: + return self._context[-1] + + @contextmanager + def context(self, file, line): + if file and line: + self._context.append((file, line)) + yield + if file and line: + self._context.pop() + + @contextmanager + def accumulate(self): + assert self._count is None + self._count = 0 + yield + count = self._count + self._count = None + if count: + raise AccumulatedErrors() + + @property + def count(self): + # _count can be None. + return self._count if self._count else 0 + + +errors = ErrorCollector() diff --git a/python/mozbuild/mozpack/executables.py b/python/mozbuild/mozpack/executables.py new file mode 100644 index 0000000000..dd6849cabe --- /dev/null +++ b/python/mozbuild/mozpack/executables.py @@ -0,0 +1,140 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import struct +import subprocess +from io import BytesIO + +from mozpack.errors import errors + +MACHO_SIGNATURES = [ + 0xFEEDFACE, # mach-o 32-bits big endian + 0xCEFAEDFE, # mach-o 32-bits little endian + 0xFEEDFACF, # mach-o 64-bits big endian + 0xCFFAEDFE, # mach-o 64-bits little endian +] + +FAT_SIGNATURE = 0xCAFEBABE # mach-o FAT binary + +ELF_SIGNATURE = 0x7F454C46 # Elf binary + +UNKNOWN = 0 +MACHO = 1 +ELF = 2 + + +def get_type(path_or_fileobj): + """ + Check the signature of the give file and returns what kind of executable + matches. + """ + if hasattr(path_or_fileobj, "peek"): + f = BytesIO(path_or_fileobj.peek(8)) + elif hasattr(path_or_fileobj, "read"): + f = path_or_fileobj + else: + f = open(path_or_fileobj, "rb") + signature = f.read(4) + if len(signature) < 4: + return UNKNOWN + signature = struct.unpack(">L", signature)[0] + if signature == ELF_SIGNATURE: + return ELF + if signature in MACHO_SIGNATURES: + return MACHO + if signature != FAT_SIGNATURE: + return UNKNOWN + # We have to sanity check the second four bytes, because Java class + # files use the same magic number as Mach-O fat binaries. + # This logic is adapted from file(1), which says that Mach-O uses + # these bytes to count the number of architectures within, while + # Java uses it for a version number. Conveniently, there are only + # 18 labelled Mach-O architectures, and Java's first released + # class format used the version 43.0. + num = f.read(4) + if len(num) < 4: + return UNKNOWN + num = struct.unpack(">L", num)[0] + if num < 20: + return MACHO + return UNKNOWN + + +def is_executable(path): + """ + Return whether a given file path points to an executable or a library, + where an executable or library is identified by: + - the file extension on OS/2 and WINNT + - the file signature on OS/X and ELF systems (GNU/Linux, Android, BSD, Solaris) + + As this function is intended for use to choose between the ExecutableFile + and File classes in FileFinder, and choosing ExecutableFile only matters + on OS/2, OS/X, ELF and WINNT (in GCC build) systems, we don't bother + detecting other kind of executables. + """ + from buildconfig import substs + + if not os.path.exists(path): + return False + + if substs["OS_ARCH"] == "WINNT": + return path.lower().endswith((substs["DLL_SUFFIX"], substs["BIN_SUFFIX"])) + + return get_type(path) != UNKNOWN + + +def may_strip(path): + """ + Return whether strip() should be called + """ + from buildconfig import substs + + # Bug 1658632: clang-11-based strip complains about d3dcompiler_47.dll. + # It's not clear why this happens, but as a quick fix just avoid stripping + # this DLL. It's not from our build anyway. + if "d3dcompiler" in path: + return False + return bool(substs.get("PKG_STRIP")) + + +def strip(path): + """ + Execute the STRIP command with STRIP_FLAGS on the given path. + """ + from buildconfig import substs + + strip = substs["STRIP"] + flags = substs.get("STRIP_FLAGS", []) + cmd = [strip] + flags + [path] + if subprocess.call(cmd) != 0: + errors.fatal("Error executing " + " ".join(cmd)) + + +def may_elfhack(path): + """ + Return whether elfhack() should be called + """ + # elfhack only supports libraries. We should check the ELF header for + # the right flag, but checking the file extension works too. + from buildconfig import substs + + return ( + "USE_ELF_HACK" in substs + and substs["USE_ELF_HACK"] + and path.endswith(substs["DLL_SUFFIX"]) + and "COMPILE_ENVIRONMENT" in substs + and substs["COMPILE_ENVIRONMENT"] + ) + + +def elfhack(path): + """ + Execute the elfhack command on the given path. + """ + from buildconfig import topobjdir + + cmd = [os.path.join(topobjdir, "build/unix/elfhack/elfhack"), path] + if subprocess.call(cmd) != 0: + errors.fatal("Error executing " + " ".join(cmd)) diff --git a/python/mozbuild/mozpack/files.py b/python/mozbuild/mozpack/files.py new file mode 100644 index 0000000000..691c248b02 --- /dev/null +++ b/python/mozbuild/mozpack/files.py @@ -0,0 +1,1271 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import bisect +import codecs +import errno +import inspect +import os +import platform +import shutil +import stat +import subprocess +import uuid +from collections import OrderedDict +from io import BytesIO +from itertools import chain, takewhile +from tarfile import TarFile, TarInfo +from tempfile import NamedTemporaryFile, mkstemp + +import six +from jsmin import JavascriptMinify + +import mozbuild.makeutil as makeutil +import mozpack.path as mozpath +from mozbuild.preprocessor import Preprocessor +from mozbuild.util import FileAvoidWrite, ensure_unicode, memoize +from mozpack.chrome.manifest import ManifestEntry, ManifestInterfaces +from mozpack.errors import ErrorMessage, errors +from mozpack.executables import elfhack, is_executable, may_elfhack, may_strip, strip +from mozpack.mozjar import JarReader + +try: + import hglib +except ImportError: + hglib = None + + +# For clean builds, copying files on win32 using CopyFile through ctypes is +# ~2x as fast as using shutil.copyfile. +if platform.system() != "Windows": + _copyfile = shutil.copyfile +else: + import ctypes + + _kernel32 = ctypes.windll.kernel32 + _CopyFileA = _kernel32.CopyFileA + _CopyFileW = _kernel32.CopyFileW + + def _copyfile(src, dest): + # False indicates `dest` should be overwritten if it exists already. + if isinstance(src, six.text_type) and isinstance(dest, six.text_type): + _CopyFileW(src, dest, False) + elif isinstance(src, str) and isinstance(dest, str): + _CopyFileA(src, dest, False) + else: + raise TypeError("mismatched path types!") + + +# Helper function; ensures we always open files with the correct encoding when +# opening them in text mode. +def _open(path, mode="r"): + if six.PY3 and "b" not in mode: + return open(path, mode, encoding="utf-8") + return open(path, mode) + + +class Dest(object): + """ + Helper interface for BaseFile.copy. The interface works as follows: + - read() and write() can be used to sequentially read/write from the underlying file. + - a call to read() after a write() will re-open the underlying file and read from it. + - a call to write() after a read() will re-open the underlying file, emptying it, and write to it. + """ + + def __init__(self, path): + self.file = None + self.mode = None + self.path = ensure_unicode(path) + + @property + def name(self): + return self.path + + def read(self, length=-1): + if self.mode != "r": + self.file = _open(self.path, mode="rb") + self.mode = "r" + return self.file.read(length) + + def write(self, data): + if self.mode != "w": + self.file = _open(self.path, mode="wb") + self.mode = "w" + to_write = six.ensure_binary(data) + return self.file.write(to_write) + + def exists(self): + return os.path.exists(self.path) + + def close(self): + if self.mode: + self.mode = None + self.file.close() + self.file = None + + +class BaseFile(object): + """ + Base interface and helper for file copying. Derived class may implement + their own copy function, or rely on BaseFile.copy using the open() member + function and/or the path property. + """ + + @staticmethod + def is_older(first, second): + """ + Compares the modification time of two files, and returns whether the + ``first`` file is older than the ``second`` file. + """ + # os.path.getmtime returns a result in seconds with precision up to + # the microsecond. But microsecond is too precise because + # shutil.copystat only copies milliseconds, and seconds is not + # enough precision. + return int(os.path.getmtime(first) * 1000) <= int( + os.path.getmtime(second) * 1000 + ) + + @staticmethod + def any_newer(dest, inputs): + """ + Compares the modification time of ``dest`` to multiple input files, and + returns whether any of the ``inputs`` is newer (has a later mtime) than + ``dest``. + """ + # os.path.getmtime returns a result in seconds with precision up to + # the microsecond. But microsecond is too precise because + # shutil.copystat only copies milliseconds, and seconds is not + # enough precision. + dest_mtime = int(os.path.getmtime(dest) * 1000) + for input in inputs: + try: + src_mtime = int(os.path.getmtime(input) * 1000) + except OSError as e: + if e.errno == errno.ENOENT: + # If an input file was removed, we should update. + return True + raise + if dest_mtime < src_mtime: + return True + return False + + @staticmethod + def normalize_mode(mode): + # Normalize file mode: + # - keep file type (e.g. S_IFREG) + ret = stat.S_IFMT(mode) + # - expand user read and execute permissions to everyone + if mode & 0o0400: + ret |= 0o0444 + if mode & 0o0100: + ret |= 0o0111 + # - keep user write permissions + if mode & 0o0200: + ret |= 0o0200 + # - leave away sticky bit, setuid, setgid + return ret + + def copy(self, dest, skip_if_older=True): + """ + Copy the BaseFile content to the destination given as a string or a + Dest instance. Avoids replacing existing files if the BaseFile content + matches that of the destination, or in case of plain files, if the + destination is newer than the original file. This latter behaviour is + disabled when skip_if_older is False. + Returns whether a copy was actually performed (True) or not (False). + """ + if isinstance(dest, six.string_types): + dest = Dest(dest) + else: + assert isinstance(dest, Dest) + + can_skip_content_check = False + if not dest.exists(): + can_skip_content_check = True + elif getattr(self, "path", None) and getattr(dest, "path", None): + if skip_if_older and BaseFile.is_older(self.path, dest.path): + return False + elif os.path.getsize(self.path) != os.path.getsize(dest.path): + can_skip_content_check = True + + if can_skip_content_check: + if getattr(self, "path", None) and getattr(dest, "path", None): + # The destination directory must exist, or CopyFile will fail. + destdir = os.path.dirname(dest.path) + try: + os.makedirs(destdir) + except OSError as e: + if e.errno != errno.EEXIST: + raise + _copyfile(self.path, dest.path) + shutil.copystat(self.path, dest.path) + else: + # Ensure the file is always created + if not dest.exists(): + dest.write(b"") + shutil.copyfileobj(self.open(), dest) + return True + + src = self.open() + accumulated_src_content = [] + while True: + dest_content = dest.read(32768) + src_content = src.read(32768) + accumulated_src_content.append(src_content) + if len(dest_content) == len(src_content) == 0: + break + # If the read content differs between origin and destination, + # write what was read up to now, and copy the remainder. + if six.ensure_binary(dest_content) != six.ensure_binary(src_content): + dest.write(b"".join(accumulated_src_content)) + shutil.copyfileobj(src, dest) + break + if hasattr(self, "path") and hasattr(dest, "path"): + shutil.copystat(self.path, dest.path) + return True + + def open(self): + """ + Return a file-like object allowing to read() the content of the + associated file. This is meant to be overloaded in subclasses to return + a custom file-like object. + """ + assert self.path is not None + return open(self.path, "rb") + + def read(self): + raise NotImplementedError("BaseFile.read() not implemented. Bug 1170329.") + + def size(self): + """Returns size of the entry. + + Derived classes are highly encouraged to override this with a more + optimal implementation. + """ + return len(self.read()) + + @property + def mode(self): + """ + Return the file's unix mode, or None if it has no meaning. + """ + return None + + def inputs(self): + """ + Return an iterable of the input file paths that impact this output file. + """ + raise NotImplementedError("BaseFile.inputs() not implemented.") + + +class File(BaseFile): + """ + File class for plain files. + """ + + def __init__(self, path): + self.path = ensure_unicode(path) + + @property + def mode(self): + """ + Return the file's unix mode, as returned by os.stat().st_mode. + """ + if platform.system() == "Windows": + return None + assert self.path is not None + mode = os.stat(self.path).st_mode + return self.normalize_mode(mode) + + def read(self): + """Return the contents of the file.""" + with open(self.path, "rb") as fh: + return fh.read() + + def size(self): + return os.stat(self.path).st_size + + def inputs(self): + return (self.path,) + + +class ExecutableFile(File): + """ + File class for executable and library files on OS/2, OS/X and ELF systems. + (see mozpack.executables.is_executable documentation). + """ + + def __init__(self, path): + File.__init__(self, path) + + def copy(self, dest, skip_if_older=True): + real_dest = dest + if not isinstance(dest, six.string_types): + fd, dest = mkstemp() + os.close(fd) + os.remove(dest) + assert isinstance(dest, six.string_types) + # If File.copy didn't actually copy because dest is newer, check the + # file sizes. If dest is smaller, it means it is already stripped and + # elfhacked, so we can skip. + if not File.copy(self, dest, skip_if_older) and os.path.getsize( + self.path + ) > os.path.getsize(dest): + return False + try: + if may_strip(dest): + strip(dest) + if may_elfhack(dest): + elfhack(dest) + except ErrorMessage: + os.remove(dest) + raise + + if real_dest != dest: + f = File(dest) + ret = f.copy(real_dest, skip_if_older) + os.remove(dest) + return ret + return True + + +class AbsoluteSymlinkFile(File): + """File class that is copied by symlinking (if available). + + This class only works if the target path is absolute. + """ + + def __init__(self, path): + if not os.path.isabs(path): + raise ValueError("Symlink target not absolute: %s" % path) + + File.__init__(self, path) + + def copy(self, dest, skip_if_older=True): + assert isinstance(dest, six.string_types) + + # The logic in this function is complicated by the fact that symlinks + # aren't universally supported. So, where symlinks aren't supported, we + # fall back to file copying. Keep in mind that symlink support is + # per-filesystem, not per-OS. + + # Handle the simple case where symlinks are definitely not supported by + # falling back to file copy. + if not hasattr(os, "symlink"): + return File.copy(self, dest, skip_if_older=skip_if_older) + + # Always verify the symlink target path exists. + if not os.path.exists(self.path): + errors.fatal("Symlink target path does not exist: %s" % self.path) + + st = None + + try: + st = os.lstat(dest) + except OSError as ose: + if ose.errno != errno.ENOENT: + raise + + # If the dest is a symlink pointing to us, we have nothing to do. + # If it's the wrong symlink, the filesystem must support symlinks, + # so we replace with a proper symlink. + if st and stat.S_ISLNK(st.st_mode): + link = os.readlink(dest) + if link == self.path: + return False + + os.remove(dest) + os.symlink(self.path, dest) + return True + + # If the destination doesn't exist, we try to create a symlink. If that + # fails, we fall back to copy code. + if not st: + try: + os.symlink(self.path, dest) + return True + except OSError: + return File.copy(self, dest, skip_if_older=skip_if_older) + + # Now the complicated part. If the destination exists, we could be + # replacing a file with a symlink. Or, the filesystem may not support + # symlinks. We want to minimize I/O overhead for performance reasons, + # so we keep the existing destination file around as long as possible. + # A lot of the system calls would be eliminated if we cached whether + # symlinks are supported. However, even if we performed a single + # up-front test of whether the root of the destination directory + # supports symlinks, there's no guarantee that all operations for that + # dest (or source) would be on the same filesystem and would support + # symlinks. + # + # Our strategy is to attempt to create a new symlink with a random + # name. If that fails, we fall back to copy mode. If that works, we + # remove the old destination and move the newly-created symlink into + # its place. + + temp_dest = os.path.join(os.path.dirname(dest), str(uuid.uuid4())) + try: + os.symlink(self.path, temp_dest) + # TODO Figure out exactly how symlink creation fails and only trap + # that. + except EnvironmentError: + return File.copy(self, dest, skip_if_older=skip_if_older) + + # If removing the original file fails, don't forget to clean up the + # temporary symlink. + try: + os.remove(dest) + except EnvironmentError: + os.remove(temp_dest) + raise + + os.rename(temp_dest, dest) + return True + + +class HardlinkFile(File): + """File class that is copied by hard linking (if available) + + This is similar to the AbsoluteSymlinkFile, but with hard links. The symlink + implementation requires paths to be absolute, because they are resolved at + read time, which makes relative paths messy. Hard links resolve paths at + link-creation time, so relative paths are fine. + """ + + def copy(self, dest, skip_if_older=True): + assert isinstance(dest, six.string_types) + + if not hasattr(os, "link"): + return super(HardlinkFile, self).copy(dest, skip_if_older=skip_if_older) + + try: + path_st = os.stat(self.path) + except OSError as e: + if e.errno == errno.ENOENT: + errors.fatal("Hard link target path does not exist: %s" % self.path) + else: + raise + + st = None + try: + st = os.lstat(dest) + except OSError as e: + if e.errno != errno.ENOENT: + raise + + if st: + # The dest already points to the right place. + if st.st_dev == path_st.st_dev and st.st_ino == path_st.st_ino: + return False + # The dest exists and it points to the wrong place + os.remove(dest) + + # At this point, either the dest used to exist and we just deleted it, + # or it never existed. We can now safely create the hard link. + try: + os.link(self.path, dest) + except OSError: + # If we can't hard link, fall back to copying + return super(HardlinkFile, self).copy(dest, skip_if_older=skip_if_older) + return True + + +class ExistingFile(BaseFile): + """ + File class that represents a file that may exist but whose content comes + from elsewhere. + + This purpose of this class is to account for files that are installed via + external means. It is typically only used in manifests or in registries to + account for files. + + When asked to copy, this class does nothing because nothing is known about + the source file/data. + + Instances of this class come in two flavors: required and optional. If an + existing file is required, it must exist during copy() or an error is + raised. + """ + + def __init__(self, required): + self.required = required + + def copy(self, dest, skip_if_older=True): + if isinstance(dest, six.string_types): + dest = Dest(dest) + else: + assert isinstance(dest, Dest) + + if not self.required: + return + + if not dest.exists(): + errors.fatal("Required existing file doesn't exist: %s" % dest.path) + + def inputs(self): + return () + + +class PreprocessedFile(BaseFile): + """ + File class for a file that is preprocessed. PreprocessedFile.copy() runs + the preprocessor on the file to create the output. + """ + + def __init__( + self, + path, + depfile_path, + marker, + defines, + extra_depends=None, + silence_missing_directive_warnings=False, + ): + self.path = ensure_unicode(path) + self.depfile = ensure_unicode(depfile_path) + self.marker = marker + self.defines = defines + self.extra_depends = list(extra_depends or []) + self.silence_missing_directive_warnings = silence_missing_directive_warnings + + def inputs(self): + pp = Preprocessor(defines=self.defines, marker=self.marker) + pp.setSilenceDirectiveWarnings(self.silence_missing_directive_warnings) + + with _open(self.path, "r") as input: + with _open(os.devnull, "w") as output: + pp.processFile(input=input, output=output) + + # This always yields at least self.path. + return pp.includes + + def copy(self, dest, skip_if_older=True): + """ + Invokes the preprocessor to create the destination file. + """ + if isinstance(dest, six.string_types): + dest = Dest(dest) + else: + assert isinstance(dest, Dest) + + # We have to account for the case where the destination exists and is a + # symlink to something. Since we know the preprocessor is certainly not + # going to create a symlink, we can just remove the existing one. If the + # destination is not a symlink, we leave it alone, since we're going to + # overwrite its contents anyway. + # If symlinks aren't supported at all, we can skip this step. + # See comment in AbsoluteSymlinkFile about Windows. + if hasattr(os, "symlink") and platform.system() != "Windows": + if os.path.islink(dest.path): + os.remove(dest.path) + + pp_deps = set(self.extra_depends) + + # If a dependency file was specified, and it exists, add any + # dependencies from that file to our list. + if self.depfile and os.path.exists(self.depfile): + target = mozpath.normpath(dest.name) + with _open(self.depfile, "rt") as fileobj: + for rule in makeutil.read_dep_makefile(fileobj): + if target in rule.targets(): + pp_deps.update(rule.dependencies()) + + skip = False + if dest.exists() and skip_if_older: + # If a dependency file was specified, and it doesn't exist, + # assume that the preprocessor needs to be rerun. That will + # regenerate the dependency file. + if self.depfile and not os.path.exists(self.depfile): + skip = False + else: + skip = not BaseFile.any_newer(dest.path, pp_deps) + + if skip: + return False + + deps_out = None + if self.depfile: + deps_out = FileAvoidWrite(self.depfile) + pp = Preprocessor(defines=self.defines, marker=self.marker) + pp.setSilenceDirectiveWarnings(self.silence_missing_directive_warnings) + + with _open(self.path, "r") as input: + pp.processFile(input=input, output=dest, depfile=deps_out) + + dest.close() + if self.depfile: + deps_out.close() + + return True + + +class GeneratedFile(BaseFile): + """ + File class for content with no previous existence on the filesystem. + """ + + def __init__(self, content): + self._content = content + + @property + def content(self): + if inspect.isfunction(self._content): + self._content = self._content() + return six.ensure_binary(self._content) + + @content.setter + def content(self, content): + self._content = content + + def open(self): + return BytesIO(self.content) + + def read(self): + return self.content + + def size(self): + return len(self.content) + + def inputs(self): + return () + + +class DeflatedFile(BaseFile): + """ + File class for members of a jar archive. DeflatedFile.copy() effectively + extracts the file from the jar archive. + """ + + def __init__(self, file): + from mozpack.mozjar import JarFileReader + + assert isinstance(file, JarFileReader) + self.file = file + + def open(self): + self.file.seek(0) + return self.file + + +class ExtractedTarFile(GeneratedFile): + """ + File class for members of a tar archive. Contents of the underlying file + are extracted immediately and stored in memory. + """ + + def __init__(self, tar, info): + assert isinstance(info, TarInfo) + assert isinstance(tar, TarFile) + GeneratedFile.__init__(self, tar.extractfile(info).read()) + self._unix_mode = self.normalize_mode(info.mode) + + @property + def mode(self): + return self._unix_mode + + def read(self): + return self.content + + +class ManifestFile(BaseFile): + """ + File class for a manifest file. It takes individual manifest entries (using + the add() and remove() member functions), and adjusts them to be relative + to the base path for the manifest, given at creation. + Example: + There is a manifest entry "content foobar foobar/content/" relative + to "foobar/chrome". When packaging, the entry will be stored in + jar:foobar/omni.ja!/chrome/chrome.manifest, which means the entry + will have to be relative to "chrome" instead of "foobar/chrome". This + doesn't really matter when serializing the entry, since this base path + is not written out, but it matters when moving the entry at the same + time, e.g. to jar:foobar/omni.ja!/chrome.manifest, which we don't do + currently but could in the future. + """ + + def __init__(self, base, entries=None): + self._base = base + self._entries = [] + self._interfaces = [] + for e in entries or []: + self.add(e) + + def add(self, entry): + """ + Add the given entry to the manifest. Entries are rebased at open() time + instead of add() time so that they can be more easily remove()d. + """ + assert isinstance(entry, ManifestEntry) + if isinstance(entry, ManifestInterfaces): + self._interfaces.append(entry) + else: + self._entries.append(entry) + + def remove(self, entry): + """ + Remove the given entry from the manifest. + """ + assert isinstance(entry, ManifestEntry) + if isinstance(entry, ManifestInterfaces): + self._interfaces.remove(entry) + else: + self._entries.remove(entry) + + def open(self): + """ + Return a file-like object allowing to read() the serialized content of + the manifest. + """ + content = "".join( + "%s\n" % e.rebase(self._base) + for e in chain(self._entries, self._interfaces) + ) + return BytesIO(six.ensure_binary(content)) + + def __iter__(self): + """ + Iterate over entries in the manifest file. + """ + return chain(self._entries, self._interfaces) + + def isempty(self): + """ + Return whether there are manifest entries to write + """ + return len(self._entries) + len(self._interfaces) == 0 + + +class MinifiedCommentStripped(BaseFile): + """ + File class for content minified by stripping comments. This wraps around a + BaseFile instance, and removes lines starting with a # from its content. + """ + + def __init__(self, file): + assert isinstance(file, BaseFile) + self._file = file + + def open(self): + """ + Return a file-like object allowing to read() the minified content of + the underlying file. + """ + content = "".join( + l + for l in [six.ensure_text(s) for s in self._file.open().readlines()] + if not l.startswith("#") + ) + return BytesIO(six.ensure_binary(content)) + + +class MinifiedJavaScript(BaseFile): + """ + File class for minifying JavaScript files. + """ + + def __init__(self, file, verify_command=None): + assert isinstance(file, BaseFile) + self._file = file + self._verify_command = verify_command + + def open(self): + output = six.StringIO() + minify = JavascriptMinify( + codecs.getreader("utf-8")(self._file.open()), output, quote_chars="'\"`" + ) + minify.minify() + output.seek(0) + output_source = six.ensure_binary(output.getvalue()) + output = BytesIO(output_source) + + if not self._verify_command: + return output + + input_source = self._file.open().read() + + with NamedTemporaryFile("wb+") as fh1, NamedTemporaryFile("wb+") as fh2: + fh1.write(input_source) + fh2.write(output_source) + fh1.flush() + fh2.flush() + + try: + args = list(self._verify_command) + args.extend([fh1.name, fh2.name]) + subprocess.check_output( + args, stderr=subprocess.STDOUT, universal_newlines=True + ) + except subprocess.CalledProcessError as e: + errors.warn( + "JS minification verification failed for %s:" + % (getattr(self._file, "path", "")) + ) + # Prefix each line with "Warning:" so mozharness doesn't + # think these error messages are real errors. + for line in e.output.splitlines(): + errors.warn(line) + + return self._file.open() + + return output + + +class BaseFinder(object): + def __init__( + self, base, minify=False, minify_js=False, minify_js_verify_command=None + ): + """ + Initializes the instance with a reference base directory. + + The optional minify argument specifies whether minification of code + should occur. minify_js is an additional option to control minification + of JavaScript. It requires minify to be True. + + minify_js_verify_command can be used to optionally verify the results + of JavaScript minification. If defined, it is expected to be an iterable + that will constitute the first arguments to a called process which will + receive the filenames of the original and minified JavaScript files. + The invoked process can then verify the results. If minification is + rejected, the process exits with a non-0 exit code and the original + JavaScript source is used. An example value for this argument is + ('/path/to/js', '/path/to/verify/script.js'). + """ + if minify_js and not minify: + raise ValueError("minify_js requires minify.") + + self.base = base + self._minify = minify + self._minify_js = minify_js + self._minify_js_verify_command = minify_js_verify_command + + def find(self, pattern): + """ + Yield path, BaseFile_instance pairs for all files under the base + directory and its subdirectories that match the given pattern. See the + mozpack.path.match documentation for a description of the handled + patterns. + """ + while pattern.startswith("/"): + pattern = pattern[1:] + for p, f in self._find(pattern): + yield p, self._minify_file(p, f) + + def get(self, path): + """Obtain a single file. + + Where ``find`` is tailored towards matching multiple files, this method + is used for retrieving a single file. Use this method when performance + is critical. + + Returns a ``BaseFile`` if at most one file exists or ``None`` otherwise. + """ + files = list(self.find(path)) + if len(files) != 1: + return None + return files[0][1] + + def __iter__(self): + """ + Iterates over all files under the base directory (excluding files + starting with a '.' and files at any level under a directory starting + with a '.'). + for path, file in finder: + ... + """ + return self.find("") + + def __contains__(self, pattern): + raise RuntimeError( + "'in' operator forbidden for %s. Use contains()." % self.__class__.__name__ + ) + + def contains(self, pattern): + """ + Return whether some files under the base directory match the given + pattern. See the mozpack.path.match documentation for a description of + the handled patterns. + """ + return any(self.find(pattern)) + + def _minify_file(self, path, file): + """ + Return an appropriate MinifiedSomething wrapper for the given BaseFile + instance (file), according to the file type (determined by the given + path), if the FileFinder was created with minification enabled. + Otherwise, just return the given BaseFile instance. + """ + if not self._minify or isinstance(file, ExecutableFile): + return file + + if path.endswith((".ftl", ".properties")): + return MinifiedCommentStripped(file) + + if self._minify_js and path.endswith((".js", ".jsm")): + return MinifiedJavaScript(file, self._minify_js_verify_command) + + return file + + def _find_helper(self, pattern, files, file_getter): + """Generic implementation of _find. + + A few *Finder implementations share logic for returning results. + This function implements the custom logic. + + The ``file_getter`` argument is a callable that receives a path + that is known to exist. The callable should return a ``BaseFile`` + instance. + """ + if "*" in pattern: + for p in files: + if mozpath.match(p, pattern): + yield p, file_getter(p) + elif pattern == "": + for p in files: + yield p, file_getter(p) + elif pattern in files: + yield pattern, file_getter(pattern) + else: + for p in files: + if mozpath.basedir(p, [pattern]) == pattern: + yield p, file_getter(p) + + +class FileFinder(BaseFinder): + """ + Helper to get appropriate BaseFile instances from the file system. + """ + + def __init__( + self, + base, + find_executables=False, + ignore=(), + ignore_broken_symlinks=False, + find_dotfiles=False, + **kargs + ): + """ + Create a FileFinder for files under the given base directory. + + The find_executables argument determines whether the finder needs to + try to guess whether files are executables. Disabling this guessing + when not necessary can speed up the finder significantly. + + ``ignore`` accepts an iterable of patterns to ignore. Entries are + strings that match paths relative to ``base`` using + ``mozpath.match()``. This means if an entry corresponds + to a directory, all files under that directory will be ignored. If + an entry corresponds to a file, that particular file will be ignored. + ``ignore_broken_symlinks`` is passed by the packager to work around an + issue with the build system not cleaning up stale files in some common + cases. See bug 1297381. + """ + BaseFinder.__init__(self, base, **kargs) + self.find_dotfiles = find_dotfiles + self.find_executables = find_executables + self.ignore = ignore + self.ignore_broken_symlinks = ignore_broken_symlinks + + def _find(self, pattern): + """ + Actual implementation of FileFinder.find(), dispatching to specialized + member functions depending on what kind of pattern was given. + Note all files with a name starting with a '.' are ignored when + scanning directories, but are not ignored when explicitely requested. + """ + if "*" in pattern: + return self._find_glob("", mozpath.split(pattern)) + elif os.path.isdir(os.path.join(self.base, pattern)): + return self._find_dir(pattern) + else: + f = self.get(pattern) + return ((pattern, f),) if f else () + + def _find_dir(self, path): + """ + Actual implementation of FileFinder.find() when the given pattern + corresponds to an existing directory under the base directory. + Ignores file names starting with a '.' under the given path. If the + path itself has leafs starting with a '.', they are not ignored. + """ + for p in self.ignore: + if mozpath.match(path, p): + return + + # The sorted makes the output idempotent. Otherwise, we are + # likely dependent on filesystem implementation details, such as + # inode ordering. + for p in sorted(os.listdir(os.path.join(self.base, path))): + if p.startswith("."): + if p in (".", ".."): + continue + if not self.find_dotfiles: + continue + for p_, f in self._find(mozpath.join(path, p)): + yield p_, f + + def get(self, path): + srcpath = os.path.join(self.base, path) + if not os.path.lexists(srcpath): + return None + + if self.ignore_broken_symlinks and not os.path.exists(srcpath): + return None + + for p in self.ignore: + if mozpath.match(path, p): + return None + + if self.find_executables and is_executable(srcpath): + return ExecutableFile(srcpath) + else: + return File(srcpath) + + def _find_glob(self, base, pattern): + """ + Actual implementation of FileFinder.find() when the given pattern + contains globbing patterns ('*' or '**'). This is meant to be an + equivalent of: + for p, f in self: + if mozpath.match(p, pattern): + yield p, f + but avoids scanning the entire tree. + """ + if not pattern: + for p, f in self._find(base): + yield p, f + elif pattern[0] == "**": + for p, f in self._find(base): + if mozpath.match(p, mozpath.join(*pattern)): + yield p, f + elif "*" in pattern[0]: + if not os.path.exists(os.path.join(self.base, base)): + return + + for p in self.ignore: + if mozpath.match(base, p): + return + + # See above comment w.r.t. sorted() and idempotent behavior. + for p in sorted(os.listdir(os.path.join(self.base, base))): + if p.startswith(".") and not pattern[0].startswith("."): + continue + if mozpath.match(p, pattern[0]): + for p_, f in self._find_glob(mozpath.join(base, p), pattern[1:]): + yield p_, f + else: + for p, f in self._find_glob(mozpath.join(base, pattern[0]), pattern[1:]): + yield p, f + + +class JarFinder(BaseFinder): + """ + Helper to get appropriate DeflatedFile instances from a JarReader. + """ + + def __init__(self, base, reader, **kargs): + """ + Create a JarFinder for files in the given JarReader. The base argument + is used as an indication of the Jar file location. + """ + assert isinstance(reader, JarReader) + BaseFinder.__init__(self, base, **kargs) + self._files = OrderedDict((f.filename, f) for f in reader) + + def _find(self, pattern): + """ + Actual implementation of JarFinder.find(), dispatching to specialized + member functions depending on what kind of pattern was given. + """ + return self._find_helper( + pattern, self._files, lambda x: DeflatedFile(self._files[x]) + ) + + +class TarFinder(BaseFinder): + """ + Helper to get files from a TarFile. + """ + + def __init__(self, base, tar, **kargs): + """ + Create a TarFinder for files in the given TarFile. The base argument + is used as an indication of the Tar file location. + """ + assert isinstance(tar, TarFile) + self._tar = tar + BaseFinder.__init__(self, base, **kargs) + self._files = OrderedDict((f.name, f) for f in tar if f.isfile()) + + def _find(self, pattern): + """ + Actual implementation of TarFinder.find(), dispatching to specialized + member functions depending on what kind of pattern was given. + """ + return self._find_helper( + pattern, self._files, lambda x: ExtractedTarFile(self._tar, self._files[x]) + ) + + +class ComposedFinder(BaseFinder): + """ + Composes multiple File Finders in some sort of virtual file system. + + A ComposedFinder is initialized from a dictionary associating paths + to `*Finder instances.` + + Note this could be optimized to be smarter than getting all the files + in advance. + """ + + def __init__(self, finders): + # Can't import globally, because of the dependency of mozpack.copier + # on this module. + from mozpack.copier import FileRegistry + + self.files = FileRegistry() + + for base, finder in sorted(six.iteritems(finders)): + if self.files.contains(base): + self.files.remove(base) + for p, f in finder.find(""): + self.files.add(mozpath.join(base, p), f) + + def find(self, pattern): + for p in self.files.match(pattern): + yield p, self.files[p] + + +class MercurialFile(BaseFile): + """File class for holding data from Mercurial.""" + + def __init__(self, client, rev, path): + self._content = client.cat( + [six.ensure_binary(path)], rev=six.ensure_binary(rev) + ) + + def open(self): + return BytesIO(six.ensure_binary(self._content)) + + def read(self): + return self._content + + +class MercurialRevisionFinder(BaseFinder): + """A finder that operates on a specific Mercurial revision.""" + + def __init__(self, repo, rev=".", recognize_repo_paths=False, **kwargs): + """Create a finder attached to a specific revision in a repository. + + If no revision is given, open the parent of the working directory. + + ``recognize_repo_paths`` will enable a mode where ``.get()`` will + recognize full paths that include the repo's path. Typically Finder + instances are "bound" to a base directory and paths are relative to + that directory. This mode changes that. When this mode is activated, + ``.find()`` will not work! This mode exists to support the moz.build + reader, which uses absolute paths instead of relative paths. The reader + should eventually be rewritten to use relative paths and this hack + should be removed (TODO bug 1171069). + """ + if not hglib: + raise Exception("hglib package not found") + + super(MercurialRevisionFinder, self).__init__(base=repo, **kwargs) + + self._root = mozpath.normpath(repo).rstrip("/") + self._recognize_repo_paths = recognize_repo_paths + + # We change directories here otherwise we have to deal with relative + # paths. + oldcwd = os.getcwd() + os.chdir(self._root) + try: + self._client = hglib.open(path=repo, encoding=b"utf-8") + finally: + os.chdir(oldcwd) + self._rev = rev if rev is not None else "." + self._files = OrderedDict() + + # Immediately populate the list of files in the repo since nearly every + # operation requires this list. + out = self._client.rawcommand( + [ + b"files", + b"--rev", + six.ensure_binary(self._rev), + ] + ) + for relpath in out.splitlines(): + # Mercurial may use \ as path separator on Windows. So use + # normpath(). + self._files[six.ensure_text(mozpath.normpath(relpath))] = None + + def _find(self, pattern): + if self._recognize_repo_paths: + raise NotImplementedError("cannot use find with recognize_repo_path") + + return self._find_helper(pattern, self._files, self._get) + + def get(self, path): + path = mozpath.normpath(path) + if self._recognize_repo_paths: + if not path.startswith(self._root): + raise ValueError( + "lookups in recognize_repo_paths mode must be " + "prefixed with repo path: %s" % path + ) + path = path[len(self._root) + 1 :] + + try: + return self._get(path) + except KeyError: + return None + + def _get(self, path): + # We lazy populate self._files because potentially creating tens of + # thousands of MercurialFile instances for every file in the repo is + # inefficient. + f = self._files[path] + if not f: + f = MercurialFile(self._client, self._rev, path) + self._files[path] = f + + return f + + +class FileListFinder(BaseFinder): + """Finder for a literal list of file names.""" + + def __init__(self, files): + """files must be a sorted list.""" + self._files = files + + @memoize + def _match(self, pattern): + """Return a sorted list of all files matching the given pattern.""" + # We don't use the utility _find_helper method because it's not tuned + # for performance in the way that we would like this class to be. That's + # a possible avenue for refactoring here. + ret = [] + # We do this as an optimization to figure out where in the sorted list + # to search and where to stop searching. + components = pattern.split("/") + prefix = "/".join(takewhile(lambda s: "*" not in s, components)) + start = bisect.bisect_left(self._files, prefix) + for i in six.moves.range(start, len(self._files)): + f = self._files[i] + if not f.startswith(prefix): + break + # Skip hidden files while scanning. + if "/." in f[len(prefix) :]: + continue + if mozpath.match(f, pattern): + ret.append(f) + return ret + + def find(self, pattern): + pattern = pattern.strip("/") + for path in self._match(pattern): + yield path, File(path) diff --git a/python/mozbuild/mozpack/macpkg.py b/python/mozbuild/mozpack/macpkg.py new file mode 100644 index 0000000000..cbeacbb388 --- /dev/null +++ b/python/mozbuild/mozpack/macpkg.py @@ -0,0 +1,217 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# TODO: Eventually consolidate with mozpack.pkg module. This is kept separate +# for now because of the vast difference in API, and to avoid churn for the +# users of this module (docker images, macos SDK artifacts) when changes are +# necessary in mozpack.pkg +import bz2 +import concurrent.futures +import io +import lzma +import os +import struct +import zlib +from xml.etree.ElementTree import XML + +from mozbuild.util import ReadOnlyNamespace + + +class ZlibFile(object): + def __init__(self, fileobj): + self.fileobj = fileobj + self.decompressor = zlib.decompressobj() + self.buf = b"" + + def read(self, length): + cutoff = min(length, len(self.buf)) + result = self.buf[:cutoff] + self.buf = self.buf[cutoff:] + while len(result) < length: + buf = self.fileobj.read(io.DEFAULT_BUFFER_SIZE) + if not buf: + break + buf = self.decompressor.decompress(buf) + cutoff = min(length - len(result), len(buf)) + result += buf[:cutoff] + self.buf += buf[cutoff:] + return result + + +def unxar(fileobj): + magic = fileobj.read(4) + if magic != b"xar!": + raise Exception("Not a XAR?") + + header_size = fileobj.read(2) + header_size = struct.unpack(">H", header_size)[0] + if header_size > 64: + raise Exception( + f"Don't know how to handle a {header_size} bytes XAR header size" + ) + header_size -= 6 # what we've read so far. + header = fileobj.read(header_size) + if len(header) != header_size: + raise Exception("Failed to read XAR header") + ( + version, + compressed_toc_len, + uncompressed_toc_len, + checksum_type, + ) = struct.unpack(">HQQL", header[:22]) + if version != 1: + raise Exception(f"XAR version {version} not supported") + toc = fileobj.read(compressed_toc_len) + base = fileobj.tell() + if len(toc) != compressed_toc_len: + raise Exception("Failed to read XAR TOC") + toc = zlib.decompress(toc) + if len(toc) != uncompressed_toc_len: + raise Exception("Corrupted XAR?") + toc = XML(toc).find("toc") + for f in toc.findall("file"): + if f.find("type").text != "file": + continue + filename = f.find("name").text + data = f.find("data") + length = int(data.find("length").text) + size = int(data.find("size").text) + offset = int(data.find("offset").text) + encoding = data.find("encoding").get("style") + fileobj.seek(base + offset, os.SEEK_SET) + content = Take(fileobj, length) + if encoding == "application/octet-stream": + if length != size: + raise Exception(f"{length} != {size}") + elif encoding == "application/x-bzip2": + content = bz2.BZ2File(content) + elif encoding == "application/x-gzip": + # Despite the encoding saying gzip, it is in fact, a raw zlib stream. + content = ZlibFile(content) + else: + raise Exception(f"XAR encoding {encoding} not supported") + + yield filename, content + + +class Pbzx(object): + def __init__(self, fileobj): + magic = fileobj.read(4) + if magic != b"pbzx": + raise Exception("Not a PBZX payload?") + # The first thing in the file looks like the size of each + # decompressed chunk except the last one. It should match + # decompressed_size in all cases except last, but we don't + # check. + chunk_size = fileobj.read(8) + chunk_size = struct.unpack(">Q", chunk_size)[0] + executor = concurrent.futures.ThreadPoolExecutor(max_workers=os.cpu_count()) + self.chunk_getter = executor.map(self._uncompress_chunk, self._chunker(fileobj)) + self._init_one_chunk() + + @staticmethod + def _chunker(fileobj): + while True: + header = fileobj.read(16) + if header == b"": + break + if len(header) != 16: + raise Exception("Corrupted PBZX payload?") + decompressed_size, compressed_size = struct.unpack(">QQ", header) + chunk = fileobj.read(compressed_size) + yield decompressed_size, compressed_size, chunk + + @staticmethod + def _uncompress_chunk(data): + decompressed_size, compressed_size, chunk = data + if compressed_size != decompressed_size: + chunk = lzma.decompress(chunk) + if len(chunk) != decompressed_size: + raise Exception("Corrupted PBZX payload?") + return chunk + + def _init_one_chunk(self): + self.offset = 0 + self.chunk = next(self.chunk_getter, "") + + def read(self, length=None): + if length == 0: + return b"" + if length and len(self.chunk) >= self.offset + length: + start = self.offset + self.offset += length + return self.chunk[start : self.offset] + else: + result = self.chunk[self.offset :] + self._init_one_chunk() + if self.chunk: + # XXX: suboptimal if length is larger than the chunk size + result += self.read(None if length is None else length - len(result)) + return result + + +class Take(object): + """ + File object wrapper that allows to read at most a certain length. + """ + + def __init__(self, fileobj, limit): + self.fileobj = fileobj + self.limit = limit + + def read(self, length=None): + if length is None: + length = self.limit + else: + length = min(length, self.limit) + result = self.fileobj.read(length) + self.limit -= len(result) + return result + + +def uncpio(fileobj): + while True: + magic = fileobj.read(6) + # CPIO payloads in mac pkg files are using the portable ASCII format. + if magic != b"070707": + if magic.startswith(b"0707"): + raise Exception("Unsupported CPIO format") + raise Exception("Not a CPIO header") + header = fileobj.read(70) + ( + dev, + ino, + mode, + uid, + gid, + nlink, + rdev, + mtime, + namesize, + filesize, + ) = struct.unpack(">6s6s6s6s6s6s6s11s6s11s", header) + dev = int(dev, 8) + ino = int(ino, 8) + mode = int(mode, 8) + nlink = int(nlink, 8) + namesize = int(namesize, 8) + filesize = int(filesize, 8) + name = fileobj.read(namesize) + if name[-1] != 0: + raise Exception("File name is not NUL terminated") + name = name[:-1] + if name == b"TRAILER!!!": + break + + if b"/../" in name or name.startswith(b"../") or name == b"..": + raise Exception(".. is forbidden in file name") + if name.startswith(b"."): + name = name[1:] + if name.startswith(b"/"): + name = name[1:] + content = Take(fileobj, filesize) + yield name, ReadOnlyNamespace(mode=mode, nlink=nlink, dev=dev, ino=ino), content + # Ensure the content is totally consumed + while content.read(4096): + pass diff --git a/python/mozbuild/mozpack/manifests.py b/python/mozbuild/mozpack/manifests.py new file mode 100644 index 0000000000..2df6c729ea --- /dev/null +++ b/python/mozbuild/mozpack/manifests.py @@ -0,0 +1,483 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import json +from contextlib import contextmanager + +import six + +import mozpack.path as mozpath + +from .files import ( + AbsoluteSymlinkFile, + ExistingFile, + File, + FileFinder, + GeneratedFile, + HardlinkFile, + PreprocessedFile, +) + + +# This probably belongs in a more generic module. Where? +@contextmanager +def _auto_fileobj(path, fileobj, mode="r"): + if path and fileobj: + raise AssertionError("Only 1 of path or fileobj may be defined.") + + if not path and not fileobj: + raise AssertionError("Must specified 1 of path or fileobj.") + + if path: + fileobj = open(path, mode) + + try: + yield fileobj + finally: + if path: + fileobj.close() + + +class UnreadableInstallManifest(Exception): + """Raised when an invalid install manifest is parsed.""" + + +class InstallManifest(object): + """Describes actions to be used with a copier.FileCopier instance. + + This class facilitates serialization and deserialization of data used to + construct a copier.FileCopier and to perform copy operations. + + The manifest defines source paths, destination paths, and a mechanism by + which the destination file should come into existence. + + Entries in the manifest correspond to the following types: + + copy -- The file specified as the source path will be copied to the + destination path. + + link -- The destination path will be a symlink or hardlink to the source + path. If symlinks are not supported, a copy will be performed. + + exists -- The destination path is accounted for and won't be deleted by + the FileCopier. If the destination path doesn't exist, an error is + raised. + + optional -- The destination path is accounted for and won't be deleted by + the FileCopier. No error is raised if the destination path does not + exist. + + patternlink -- Paths matched by the expression in the source path + will be symlinked or hardlinked to the destination directory. + + patterncopy -- Similar to patternlink except files are copied, not + symlinked/hardlinked. + + preprocess -- The file specified at the source path will be run through + the preprocessor, and the output will be written to the destination + path. + + content -- The destination file will be created with the given content. + + Version 1 of the manifest was the initial version. + Version 2 added optional path support + Version 3 added support for pattern entries. + Version 4 added preprocessed file support. + Version 5 added content support. + """ + + CURRENT_VERSION = 5 + + FIELD_SEPARATOR = "\x1f" + + # Negative values are reserved for non-actionable items, that is, metadata + # that doesn't describe files in the destination. + LINK = 1 + COPY = 2 + REQUIRED_EXISTS = 3 + OPTIONAL_EXISTS = 4 + PATTERN_LINK = 5 + PATTERN_COPY = 6 + PREPROCESS = 7 + CONTENT = 8 + + def __init__(self, path=None, fileobj=None): + """Create a new InstallManifest entry. + + If path is defined, the manifest will be populated with data from the + file path. + + If fileobj is defined, the manifest will be populated with data read + from the specified file object. + + Both path and fileobj cannot be defined. + """ + self._dests = {} + self._source_files = set() + + if path or fileobj: + with _auto_fileobj(path, fileobj, "r") as fh: + self._source_files.add(fh.name) + self._load_from_fileobj(fh) + + def _load_from_fileobj(self, fileobj): + version = fileobj.readline().rstrip() + if version not in ("1", "2", "3", "4", "5"): + raise UnreadableInstallManifest("Unknown manifest version: %s" % version) + + for line in fileobj: + # Explicitly strip on \n so we don't strip out the FIELD_SEPARATOR + # as well. + line = line.rstrip("\n") + + fields = line.split(self.FIELD_SEPARATOR) + + record_type = int(fields[0]) + + if record_type == self.LINK: + dest, source = fields[1:] + self.add_link(source, dest) + continue + + if record_type == self.COPY: + dest, source = fields[1:] + self.add_copy(source, dest) + continue + + if record_type == self.REQUIRED_EXISTS: + _, path = fields + self.add_required_exists(path) + continue + + if record_type == self.OPTIONAL_EXISTS: + _, path = fields + self.add_optional_exists(path) + continue + + if record_type == self.PATTERN_LINK: + _, base, pattern, dest = fields[1:] + self.add_pattern_link(base, pattern, dest) + continue + + if record_type == self.PATTERN_COPY: + _, base, pattern, dest = fields[1:] + self.add_pattern_copy(base, pattern, dest) + continue + + if record_type == self.PREPROCESS: + dest, source, deps, marker, defines, warnings = fields[1:] + + self.add_preprocess( + source, + dest, + deps, + marker, + self._decode_field_entry(defines), + silence_missing_directive_warnings=bool(int(warnings)), + ) + continue + + if record_type == self.CONTENT: + dest, content = fields[1:] + + self.add_content( + six.ensure_text(self._decode_field_entry(content)), dest + ) + continue + + # Don't fail for non-actionable items, allowing + # forward-compatibility with those we will add in the future. + if record_type >= 0: + raise UnreadableInstallManifest("Unknown record type: %d" % record_type) + + def __len__(self): + return len(self._dests) + + def __contains__(self, item): + return item in self._dests + + def __eq__(self, other): + return isinstance(other, InstallManifest) and self._dests == other._dests + + def __neq__(self, other): + return not self.__eq__(other) + + def __ior__(self, other): + if not isinstance(other, InstallManifest): + raise ValueError("Can only | with another instance of InstallManifest.") + + self.add_entries_from(other) + + return self + + def _encode_field_entry(self, data): + """Converts an object into a format that can be stored in the manifest file. + + Complex data types, such as ``dict``, need to be converted into a text + representation before they can be written to a file. + """ + return json.dumps(data, sort_keys=True) + + def _decode_field_entry(self, data): + """Restores an object from a format that can be stored in the manifest file. + + Complex data types, such as ``dict``, need to be converted into a text + representation before they can be written to a file. + """ + return json.loads(data) + + def write(self, path=None, fileobj=None, expand_pattern=False): + """Serialize this manifest to a file or file object. + + If path is specified, that file will be written to. If fileobj is specified, + the serialized content will be written to that file object. + + It is an error if both are specified. + """ + with _auto_fileobj(path, fileobj, "wt") as fh: + fh.write("%d\n" % self.CURRENT_VERSION) + + for dest in sorted(self._dests): + entry = self._dests[dest] + + if expand_pattern and entry[0] in ( + self.PATTERN_LINK, + self.PATTERN_COPY, + ): + type, base, pattern, dest = entry + type = self.LINK if type == self.PATTERN_LINK else self.COPY + finder = FileFinder(base) + paths = [f[0] for f in finder.find(pattern)] + for path in paths: + source = mozpath.join(base, path) + parts = ["%d" % type, mozpath.join(dest, path), source] + fh.write( + "%s\n" + % self.FIELD_SEPARATOR.join( + six.ensure_text(p) for p in parts + ) + ) + else: + parts = ["%d" % entry[0], dest] + parts.extend(entry[1:]) + fh.write( + "%s\n" + % self.FIELD_SEPARATOR.join(six.ensure_text(p) for p in parts) + ) + + def add_link(self, source, dest): + """Add a link to this manifest. + + dest will be either a symlink or hardlink to source. + """ + self._add_entry(dest, (self.LINK, source)) + + def add_copy(self, source, dest): + """Add a copy to this manifest. + + source will be copied to dest. + """ + self._add_entry(dest, (self.COPY, source)) + + def add_required_exists(self, dest): + """Record that a destination file must exist. + + This effectively prevents the listed file from being deleted. + """ + self._add_entry(dest, (self.REQUIRED_EXISTS,)) + + def add_optional_exists(self, dest): + """Record that a destination file may exist. + + This effectively prevents the listed file from being deleted. Unlike a + "required exists" file, files of this type do not raise errors if the + destination file does not exist. + """ + self._add_entry(dest, (self.OPTIONAL_EXISTS,)) + + def add_pattern_link(self, base, pattern, dest): + """Add a pattern match that results in links being created. + + A ``FileFinder`` will be created with its base set to ``base`` + and ``FileFinder.find()`` will be called with ``pattern`` to discover + source files. Each source file will be either symlinked or hardlinked + under ``dest``. + + Filenames under ``dest`` are constructed by taking the path fragment + after ``base`` and concatenating it with ``dest``. e.g. + + /foo/bar.h -> /foo/bar.h + """ + self._add_entry( + mozpath.join(dest, pattern), (self.PATTERN_LINK, base, pattern, dest) + ) + + def add_pattern_copy(self, base, pattern, dest): + """Add a pattern match that results in copies. + + See ``add_pattern_link()`` for usage. + """ + self._add_entry( + mozpath.join(dest, pattern), (self.PATTERN_COPY, base, pattern, dest) + ) + + def add_preprocess( + self, + source, + dest, + deps, + marker="#", + defines={}, + silence_missing_directive_warnings=False, + ): + """Add a preprocessed file to this manifest. + + ``source`` will be passed through preprocessor.py, and the output will be + written to ``dest``. + """ + self._add_entry( + dest, + ( + self.PREPROCESS, + source, + deps, + marker, + self._encode_field_entry(defines), + "1" if silence_missing_directive_warnings else "0", + ), + ) + + def add_content(self, content, dest): + """Add a file with the given content.""" + self._add_entry( + dest, + ( + self.CONTENT, + self._encode_field_entry(content), + ), + ) + + def _add_entry(self, dest, entry): + if dest in self._dests: + raise ValueError("Item already in manifest: %s" % dest) + + self._dests[dest] = entry + + def add_entries_from(self, other, base=""): + """ + Copy data from another mozpack.copier.InstallManifest + instance, adding an optional base prefix to the destination. + + This allows to merge two manifests into a single manifest, or + two take the tagged union of two manifests. + """ + # We must copy source files to ourselves so extra dependencies from + # the preprocessor are taken into account. Ideally, we would track + # which source file each entry came from. However, this is more + # complicated and not yet implemented. The current implementation + # will result in over invalidation, possibly leading to performance + # loss. + self._source_files |= other._source_files + + for dest in sorted(other._dests): + new_dest = mozpath.join(base, dest) if base else dest + entry = other._dests[dest] + if entry[0] in (self.PATTERN_LINK, self.PATTERN_COPY): + entry_type, entry_base, entry_pattern, entry_dest = entry + new_entry_dest = mozpath.join(base, entry_dest) if base else entry_dest + new_entry = (entry_type, entry_base, entry_pattern, new_entry_dest) + else: + new_entry = tuple(entry) + + self._add_entry(new_dest, new_entry) + + def populate_registry(self, registry, defines_override={}, link_policy="symlink"): + """Populate a mozpack.copier.FileRegistry instance with data from us. + + The caller supplied a FileRegistry instance (or at least something that + conforms to its interface) and that instance is populated with data + from this manifest. + + Defines can be given to override the ones in the manifest for + preprocessing. + + The caller can set a link policy. This determines whether symlinks, + hardlinks, or copies are used for LINK and PATTERN_LINK. + """ + assert link_policy in ("symlink", "hardlink", "copy") + for dest in sorted(self._dests): + entry = self._dests[dest] + install_type = entry[0] + + if install_type == self.LINK: + if link_policy == "symlink": + cls = AbsoluteSymlinkFile + elif link_policy == "hardlink": + cls = HardlinkFile + else: + cls = File + registry.add(dest, cls(entry[1])) + continue + + if install_type == self.COPY: + registry.add(dest, File(entry[1])) + continue + + if install_type == self.REQUIRED_EXISTS: + registry.add(dest, ExistingFile(required=True)) + continue + + if install_type == self.OPTIONAL_EXISTS: + registry.add(dest, ExistingFile(required=False)) + continue + + if install_type in (self.PATTERN_LINK, self.PATTERN_COPY): + _, base, pattern, dest = entry + finder = FileFinder(base) + paths = [f[0] for f in finder.find(pattern)] + + if install_type == self.PATTERN_LINK: + if link_policy == "symlink": + cls = AbsoluteSymlinkFile + elif link_policy == "hardlink": + cls = HardlinkFile + else: + cls = File + else: + cls = File + + for path in paths: + source = mozpath.join(base, path) + registry.add(mozpath.join(dest, path), cls(source)) + + continue + + if install_type == self.PREPROCESS: + defines = self._decode_field_entry(entry[4]) + if defines_override: + defines.update(defines_override) + registry.add( + dest, + PreprocessedFile( + entry[1], + depfile_path=entry[2], + marker=entry[3], + defines=defines, + extra_depends=self._source_files, + silence_missing_directive_warnings=bool(int(entry[5])), + ), + ) + + continue + + if install_type == self.CONTENT: + # GeneratedFile expect the buffer interface, which the unicode + # type doesn't have, so encode to a str. + content = self._decode_field_entry(entry[1]).encode("utf-8") + registry.add(dest, GeneratedFile(content)) + continue + + raise Exception( + "Unknown install type defined in manifest: %d" % install_type + ) diff --git a/python/mozbuild/mozpack/mozjar.py b/python/mozbuild/mozpack/mozjar.py new file mode 100644 index 0000000000..6500ebfcec --- /dev/null +++ b/python/mozbuild/mozpack/mozjar.py @@ -0,0 +1,842 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import struct +import zlib +from collections import OrderedDict +from io import BytesIO, UnsupportedOperation +from zipfile import ZIP_DEFLATED, ZIP_STORED + +import six + +import mozpack.path as mozpath +from mozbuild.util import ensure_bytes + +JAR_STORED = ZIP_STORED +JAR_DEFLATED = ZIP_DEFLATED +MAX_WBITS = 15 + + +class JarReaderError(Exception): + """Error type for Jar reader errors.""" + + +class JarWriterError(Exception): + """Error type for Jar writer errors.""" + + +class JarStruct(object): + """ + Helper used to define ZIP archive raw data structures. Data structures + handled by this helper all start with a magic number, defined in + subclasses MAGIC field as a 32-bits unsigned integer, followed by data + structured as described in subclasses STRUCT field. + + The STRUCT field contains a list of (name, type) pairs where name is a + field name, and the type can be one of 'uint32', 'uint16' or one of the + field names. In the latter case, the field is considered to be a string + buffer with a length given in that field. + For example, + + .. code-block:: python + + STRUCT = [ + ('version', 'uint32'), + ('filename_size', 'uint16'), + ('filename', 'filename_size') + ] + + describes a structure with a 'version' 32-bits unsigned integer field, + followed by a 'filename_size' 16-bits unsigned integer field, followed by a + filename_size-long string buffer 'filename'. + + Fields that are used as other fields size are not stored in objects. In the + above example, an instance of such subclass would only have two attributes: + - obj['version'] + - obj['filename'] + + filename_size would be obtained with len(obj['filename']). + + JarStruct subclasses instances can be either initialized from existing data + (deserialized), or with empty fields. + """ + + TYPE_MAPPING = {"uint32": (b"I", 4), "uint16": (b"H", 2)} + + def __init__(self, data=None): + """ + Create an instance from the given data. Data may be omitted to create + an instance with empty fields. + """ + assert self.MAGIC and isinstance(self.STRUCT, OrderedDict) + self.size_fields = set( + t for t in six.itervalues(self.STRUCT) if t not in JarStruct.TYPE_MAPPING + ) + self._values = {} + if data: + self._init_data(data) + else: + self._init_empty() + + def _init_data(self, data): + """ + Initialize an instance from data, following the data structure + described in self.STRUCT. The self.MAGIC signature is expected at + data[:4]. + """ + assert data is not None + self.signature, size = JarStruct.get_data("uint32", data) + if self.signature != self.MAGIC: + raise JarReaderError("Bad magic") + offset = size + # For all fields used as other fields sizes, keep track of their value + # separately. + sizes = dict((t, 0) for t in self.size_fields) + for name, t in six.iteritems(self.STRUCT): + if t in JarStruct.TYPE_MAPPING: + value, size = JarStruct.get_data(t, data[offset:]) + else: + size = sizes[t] + value = data[offset : offset + size] + if isinstance(value, memoryview): + value = value.tobytes() + if name not in sizes: + self._values[name] = value + else: + sizes[name] = value + offset += size + + def _init_empty(self): + """ + Initialize an instance with empty fields. + """ + self.signature = self.MAGIC + for name, t in six.iteritems(self.STRUCT): + if name in self.size_fields: + continue + self._values[name] = 0 if t in JarStruct.TYPE_MAPPING else "" + + @staticmethod + def get_data(type, data): + """ + Deserialize a single field of given type (must be one of + JarStruct.TYPE_MAPPING) at the given offset in the given data. + """ + assert type in JarStruct.TYPE_MAPPING + assert data is not None + format, size = JarStruct.TYPE_MAPPING[type] + data = data[:size] + if isinstance(data, memoryview): + data = data.tobytes() + return struct.unpack(b"<" + format, data)[0], size + + def serialize(self): + """ + Serialize the data structure according to the data structure definition + from self.STRUCT. + """ + serialized = struct.pack(b"" % ( + self.__class__.__name__, + " ".join("%s=%s" % (n, v) for n, v in self), + ) + + +class JarCdirEnd(JarStruct): + """ + End of central directory record. + """ + + MAGIC = 0x06054B50 + STRUCT = OrderedDict( + [ + ("disk_num", "uint16"), + ("cdir_disk", "uint16"), + ("disk_entries", "uint16"), + ("cdir_entries", "uint16"), + ("cdir_size", "uint32"), + ("cdir_offset", "uint32"), + ("comment_size", "uint16"), + ("comment", "comment_size"), + ] + ) + + +CDIR_END_SIZE = JarCdirEnd().size + + +class JarCdirEntry(JarStruct): + """ + Central directory file header + """ + + MAGIC = 0x02014B50 + STRUCT = OrderedDict( + [ + ("creator_version", "uint16"), + ("min_version", "uint16"), + ("general_flag", "uint16"), + ("compression", "uint16"), + ("lastmod_time", "uint16"), + ("lastmod_date", "uint16"), + ("crc32", "uint32"), + ("compressed_size", "uint32"), + ("uncompressed_size", "uint32"), + ("filename_size", "uint16"), + ("extrafield_size", "uint16"), + ("filecomment_size", "uint16"), + ("disknum", "uint16"), + ("internal_attr", "uint16"), + ("external_attr", "uint32"), + ("offset", "uint32"), + ("filename", "filename_size"), + ("extrafield", "extrafield_size"), + ("filecomment", "filecomment_size"), + ] + ) + + +class JarLocalFileHeader(JarStruct): + """ + Local file header + """ + + MAGIC = 0x04034B50 + STRUCT = OrderedDict( + [ + ("min_version", "uint16"), + ("general_flag", "uint16"), + ("compression", "uint16"), + ("lastmod_time", "uint16"), + ("lastmod_date", "uint16"), + ("crc32", "uint32"), + ("compressed_size", "uint32"), + ("uncompressed_size", "uint32"), + ("filename_size", "uint16"), + ("extra_field_size", "uint16"), + ("filename", "filename_size"), + ("extra_field", "extra_field_size"), + ] + ) + + +class JarFileReader(object): + """ + File-like class for use by JarReader to give access to individual files + within a Jar archive. + """ + + def __init__(self, header, data): + """ + Initialize a JarFileReader. header is the local file header + corresponding to the file in the jar archive, data a buffer containing + the file data. + """ + assert header["compression"] in [JAR_DEFLATED, JAR_STORED] + self._data = data + # Copy some local file header fields. + for name in ["compressed_size", "uncompressed_size", "crc32"]: + setattr(self, name, header[name]) + self.filename = six.ensure_text(header["filename"]) + self.compressed = header["compression"] != JAR_STORED + self.compress = header["compression"] + + def readable(self): + return True + + def read(self, length=-1): + """ + Read some amount of uncompressed data. + """ + return self.uncompressed_data.read(length) + + def readinto(self, b): + """ + Read bytes into a pre-allocated, writable bytes-like object `b` and return + the number of bytes read. + """ + return self.uncompressed_data.readinto(b) + + def readlines(self): + """ + Return a list containing all the lines of data in the uncompressed + data. + """ + return self.read().splitlines(True) + + def __iter__(self): + """ + Iterator, to support the "for line in fileobj" constructs. + """ + return iter(self.readlines()) + + def seek(self, pos, whence=os.SEEK_SET): + """ + Change the current position in the uncompressed data. Subsequent reads + will start from there. + """ + return self.uncompressed_data.seek(pos, whence) + + def close(self): + """ + Free the uncompressed data buffer. + """ + self.uncompressed_data.close() + + @property + def closed(self): + return self.uncompressed_data.closed + + @property + def compressed_data(self): + """ + Return the raw compressed data. + """ + return self._data[: self.compressed_size] + + @property + def uncompressed_data(self): + """ + Return the uncompressed data. + """ + if hasattr(self, "_uncompressed_data"): + return self._uncompressed_data + data = self.compressed_data + if self.compress == JAR_STORED: + data = data.tobytes() + elif self.compress == JAR_DEFLATED: + data = zlib.decompress(data.tobytes(), -MAX_WBITS) + else: + assert False # Can't be another value per __init__ + if len(data) != self.uncompressed_size: + raise JarReaderError("Corrupted file? %s" % self.filename) + self._uncompressed_data = BytesIO(data) + return self._uncompressed_data + + +class JarReader(object): + """ + Class with methods to read Jar files. Can open standard jar files as well + as Mozilla jar files (see further details in the JarWriter documentation). + """ + + def __init__(self, file=None, fileobj=None, data=None): + """ + Opens the given file as a Jar archive. Use the given file-like object + if one is given instead of opening the given file name. + """ + if fileobj: + data = fileobj.read() + elif file: + data = open(file, "rb").read() + self._data = memoryview(data) + # The End of Central Directory Record has a variable size because of + # comments it may contain, so scan for it from the end of the file. + offset = -CDIR_END_SIZE + while True: + signature = JarStruct.get_data("uint32", self._data[offset:])[0] + if signature == JarCdirEnd.MAGIC: + break + if offset == -len(self._data): + raise JarReaderError("Not a jar?") + offset -= 1 + self._cdir_end = JarCdirEnd(self._data[offset:]) + + def close(self): + """ + Free some resources associated with the Jar. + """ + del self._data + + @property + def compression(self): + entries = self.entries + if not entries: + return JAR_STORED + return max(f["compression"] for f in six.itervalues(entries)) + + @property + def entries(self): + """ + Return an ordered dict of central directory entries, indexed by + filename, in the order they appear in the Jar archive central + directory. Directory entries are skipped. + """ + if hasattr(self, "_entries"): + return self._entries + preload = 0 + if self.is_optimized: + preload = JarStruct.get_data("uint32", self._data)[0] + entries = OrderedDict() + offset = self._cdir_end["cdir_offset"] + for e in six.moves.xrange(self._cdir_end["cdir_entries"]): + entry = JarCdirEntry(self._data[offset:]) + offset += entry.size + # Creator host system. 0 is MSDOS, 3 is Unix + host = entry["creator_version"] >> 8 + # External attributes values depend on host above. On Unix the + # higher bits are the stat.st_mode value. On MSDOS, the lower bits + # are the FAT attributes. + xattr = entry["external_attr"] + # Skip directories + if (host == 0 and xattr & 0x10) or (host == 3 and xattr & (0o040000 << 16)): + continue + entries[six.ensure_text(entry["filename"])] = entry + if entry["offset"] < preload: + self._last_preloaded = six.ensure_text(entry["filename"]) + self._entries = entries + return entries + + @property + def is_optimized(self): + """ + Return whether the jar archive is optimized. + """ + # In optimized jars, the central directory is at the beginning of the + # file, after a single 32-bits value, which is the length of data + # preloaded. + return self._cdir_end["cdir_offset"] == JarStruct.TYPE_MAPPING["uint32"][1] + + @property + def last_preloaded(self): + """ + Return the name of the last file that is set to be preloaded. + See JarWriter documentation for more details on preloading. + """ + if hasattr(self, "_last_preloaded"): + return self._last_preloaded + self._last_preloaded = None + self.entries + return self._last_preloaded + + def _getreader(self, entry): + """ + Helper to create a JarFileReader corresponding to the given central + directory entry. + """ + header = JarLocalFileHeader(self._data[entry["offset"] :]) + for key, value in entry: + if key in header and header[key] != value: + raise JarReaderError( + "Central directory and file header " + + "mismatch. Corrupted archive?" + ) + return JarFileReader(header, self._data[entry["offset"] + header.size :]) + + def __iter__(self): + """ + Iterate over all files in the Jar archive, in the form of + JarFileReaders. + for file in jarReader: + ... + """ + for entry in six.itervalues(self.entries): + yield self._getreader(entry) + + def __getitem__(self, name): + """ + Get a JarFileReader for the given file name. + """ + return self._getreader(self.entries[name]) + + def __contains__(self, name): + """ + Return whether the given file name appears in the Jar archive. + """ + return name in self.entries + + +class JarWriter(object): + """ + Class with methods to write Jar files. Can write more-or-less standard jar + archives as well as jar archives optimized for Gecko. See the documentation + for the close() member function for a description of both layouts. + """ + + def __init__(self, file=None, fileobj=None, compress=True, compress_level=9): + """ + Initialize a Jar archive in the given file. Use the given file-like + object if one is given instead of opening the given file name. + The compress option determines the default behavior for storing data + in the jar archive. The optimize options determines whether the jar + archive should be optimized for Gecko or not. ``compress_level`` + defines the zlib compression level. It must be a value between 0 and 9 + and defaults to 9, the highest and slowest level of compression. + """ + if fileobj: + self._data = fileobj + else: + self._data = open(file, "wb") + if compress is True: + compress = JAR_DEFLATED + self._compress = compress + self._compress_level = compress_level + self._contents = OrderedDict() + self._last_preloaded = None + + def __enter__(self): + """ + Context manager __enter__ method for JarWriter. + """ + return self + + def __exit__(self, type, value, tb): + """ + Context manager __exit__ method for JarWriter. + """ + self.finish() + + def finish(self): + """ + Flush and close the Jar archive. + + Standard jar archives are laid out like the following: + - Local file header 1 + - File data 1 + - Local file header 2 + - File data 2 + - (...) + - Central directory entry pointing at Local file header 1 + - Central directory entry pointing at Local file header 2 + - (...) + - End of central directory, pointing at first central directory + entry. + + Jar archives optimized for Gecko are laid out like the following: + - 32-bits unsigned integer giving the amount of data to preload. + - Central directory entry pointing at Local file header 1 + - Central directory entry pointing at Local file header 2 + - (...) + - End of central directory, pointing at first central directory + entry. + - Local file header 1 + - File data 1 + - Local file header 2 + - File data 2 + - (...) + - End of central directory, pointing at first central directory + entry. + + The duplication of the End of central directory is to accomodate some + Zip reading tools that want an end of central directory structure to + follow the central directory entries. + """ + offset = 0 + headers = {} + preload_size = 0 + # Prepare central directory entries + for entry, content in six.itervalues(self._contents): + header = JarLocalFileHeader() + for name in entry.STRUCT: + if name in header: + header[name] = entry[name] + entry["offset"] = offset + offset += len(content) + header.size + if six.ensure_text(entry["filename"]) == self._last_preloaded: + preload_size = offset + headers[entry] = header + # Prepare end of central directory + end = JarCdirEnd() + end["disk_entries"] = len(self._contents) + end["cdir_entries"] = end["disk_entries"] + end["cdir_size"] = six.moves.reduce( + lambda x, y: x + y[0].size, self._contents.values(), 0 + ) + # On optimized archives, store the preloaded size and the central + # directory entries, followed by the first end of central directory. + if preload_size: + end["cdir_offset"] = 4 + offset = end["cdir_size"] + end["cdir_offset"] + end.size + preload_size += offset + self._data.write(struct.pack(" 0: + errors.fatal('Malformed manifest: space in component name "%s"' % name) + self._name = name + self._destdir = destdir + + def __repr__(self): + s = self.name + if self.destdir: + s += ' destdir="%s"' % self.destdir + return s + + @property + def name(self): + return self._name + + @property + def destdir(self): + return self._destdir + + @staticmethod + def _triples(lst): + """ + Split [1, 2, 3, 4, 5, 6, 7] into [(1, 2, 3), (4, 5, 6)]. + """ + return zip(*[iter(lst)] * 3) + + KEY_VALUE_RE = re.compile( + r""" + \s* # optional whitespace. + ([a-zA-Z0-9_]+) # key. + \s*=\s* # optional space around =. + "([^"]*)" # value without surrounding quotes. + (?:\s+|$) + """, + re.VERBOSE, + ) + + @staticmethod + def _split_options(string): + """ + Split 'key1="value1" key2="value2"' into + {'key1':'value1', 'key2':'value2'}. + + Returned keys and values are all strings. + + Throws ValueError if the input is malformed. + """ + options = {} + splits = Component.KEY_VALUE_RE.split(string) + if len(splits) % 3 != 1: + # This should never happen -- we expect to always split + # into ['', ('key', 'val', '')*]. + raise ValueError("Bad input") + if splits[0]: + raise ValueError("Unrecognized input " + splits[0]) + for key, val, no_match in Component._triples(splits[1:]): + if no_match: + raise ValueError("Unrecognized input " + no_match) + options[key] = val + return options + + @staticmethod + def _split_component_and_options(string): + """ + Split 'name key1="value1" key2="value2"' into + ('name', {'key1':'value1', 'key2':'value2'}). + + Returned name, keys and values are all strings. + + Raises ValueError if the input is malformed. + """ + splits = string.strip().split(None, 1) + if not splits: + raise ValueError("No component found") + component = splits[0].strip() + if not component: + raise ValueError("No component found") + if not re.match("[a-zA-Z0-9_\-]+$", component): + raise ValueError("Bad component name " + component) + options = Component._split_options(splits[1]) if len(splits) > 1 else {} + return component, options + + @staticmethod + def from_string(string): + """ + Create a component from a string. + """ + try: + name, options = Component._split_component_and_options(string) + except ValueError as e: + errors.fatal("Malformed manifest: %s" % e) + return + destdir = options.pop("destdir", "") + if options: + errors.fatal( + "Malformed manifest: options %s not recognized" % options.keys() + ) + return Component(name, destdir=destdir) + + +class PackageManifestParser(object): + """ + Class for parsing of a package manifest, after preprocessing. + + A package manifest is a list of file paths, with some syntaxic sugar: + [] designates a toplevel component. Example: [xpcom] + - in front of a file specifies it to be removed + * wildcard support + ** expands to all files and zero or more directories + ; file comment + + The parser takes input from the preprocessor line by line, and pushes + parsed information to a sink object. + + The add and remove methods of the sink object are called with the + current Component instance and a path. + """ + + def __init__(self, sink): + """ + Initialize the package manifest parser with the given sink. + """ + self._component = Component("") + self._sink = sink + + def handle_line(self, str): + """ + Handle a line of input and push the parsed information to the sink + object. + """ + # Remove comments. + str = str.strip() + if not str or str.startswith(";"): + return + if str.startswith("[") and str.endswith("]"): + self._component = Component.from_string(str[1:-1]) + elif str.startswith("-"): + str = str[1:] + self._sink.remove(self._component, str) + elif "," in str: + errors.fatal("Incompatible syntax") + else: + self._sink.add(self._component, str) + + +class PreprocessorOutputWrapper(object): + """ + File-like helper to handle the preprocessor output and send it to a parser. + The parser's handle_line method is called in the relevant errors.context. + """ + + def __init__(self, preprocessor, parser): + self._parser = parser + self._pp = preprocessor + + def write(self, str): + with errors.context(self._pp.context["FILE"], self._pp.context["LINE"]): + self._parser.handle_line(str) + + +def preprocess(input, parser, defines={}): + """ + Preprocess the file-like input with the given defines, and send the + preprocessed output line by line to the given parser. + """ + pp = Preprocessor() + pp.context.update(defines) + pp.do_filter("substitution") + pp.out = PreprocessorOutputWrapper(pp, parser) + pp.do_include(input) + + +def preprocess_manifest(sink, manifest, defines={}): + """ + Preprocess the given file-like manifest with the given defines, and push + the parsed information to a sink. See PackageManifestParser documentation + for more details on the sink. + """ + preprocess(manifest, PackageManifestParser(sink), defines) + + +class CallDeque(deque): + """ + Queue of function calls to make. + """ + + def append(self, function, *args): + deque.append(self, (errors.get_context(), function, args)) + + def execute(self): + while True: + try: + context, function, args = self.popleft() + except IndexError: + return + if context: + with errors.context(context[0], context[1]): + function(*args) + else: + function(*args) + + +class SimplePackager(object): + """ + Helper used to translate and buffer instructions from the + SimpleManifestSink to a formatter. Formatters expect some information to be + given first that the simple manifest contents can't guarantee before the + end of the input. + """ + + def __init__(self, formatter): + self.formatter = formatter + # Queue for formatter.add_interfaces()/add_manifest() calls. + self._queue = CallDeque() + # Queue for formatter.add_manifest() calls for ManifestChrome. + self._chrome_queue = CallDeque() + # Queue for formatter.add() calls. + self._file_queue = CallDeque() + # All paths containing addons. (key is path, value is whether it + # should be packed or unpacked) + self._addons = {} + # All manifest paths imported. + self._manifests = set() + # All manifest paths included from some other manifest. + self._included_manifests = {} + self._closed = False + + # Parsing RDF is complex, and would require an external library to do + # properly. Just go with some hackish but probably sufficient regexp + UNPACK_ADDON_RE = re.compile( + r"""(?: + true + |em:unpack=(?P["']?)true(?P=quote) + )""", + re.VERBOSE, + ) + + def add(self, path, file): + """ + Add the given BaseFile instance with the given path. + """ + assert not self._closed + if is_manifest(path): + self._add_manifest_file(path, file) + elif path.endswith(".xpt"): + self._queue.append(self.formatter.add_interfaces, path, file) + else: + self._file_queue.append(self.formatter.add, path, file) + if mozpath.basename(path) == "install.rdf": + addon = True + install_rdf = six.ensure_text(file.open().read()) + if self.UNPACK_ADDON_RE.search(install_rdf): + addon = "unpacked" + self._add_addon(mozpath.dirname(path), addon) + elif mozpath.basename(path) == "manifest.json": + manifest = six.ensure_text(file.open().read()) + try: + parsed = json.loads(manifest) + except ValueError: + pass + if isinstance(parsed, dict) and "manifest_version" in parsed: + self._add_addon(mozpath.dirname(path), True) + + def _add_addon(self, path, addon_type): + """ + Add the given BaseFile to the collection of addons if a parent + directory is not already in the collection. + """ + if mozpath.basedir(path, self._addons) is not None: + return + + for dir in self._addons: + if mozpath.basedir(dir, [path]) is not None: + del self._addons[dir] + break + + self._addons[path] = addon_type + + def _add_manifest_file(self, path, file): + """ + Add the given BaseFile with manifest file contents with the given path. + """ + self._manifests.add(path) + base = "" + if hasattr(file, "path"): + # Find the directory the given path is relative to. + b = mozpath.normsep(file.path) + if b.endswith("/" + path) or b == path: + base = os.path.normpath(b[: -len(path)]) + for e in parse_manifest(base, path, codecs.getreader("utf-8")(file.open())): + # ManifestResources need to be given after ManifestChrome, so just + # put all ManifestChrome in a separate queue to make them first. + if isinstance(e, ManifestChrome): + # e.move(e.base) just returns a clone of the entry. + self._chrome_queue.append(self.formatter.add_manifest, e.move(e.base)) + elif not isinstance(e, (Manifest, ManifestInterfaces)): + self._queue.append(self.formatter.add_manifest, e.move(e.base)) + # If a binary component is added to an addon, prevent the addon + # from being packed. + if isinstance(e, ManifestBinaryComponent): + addon = mozpath.basedir(e.base, self._addons) + if addon: + self._addons[addon] = "unpacked" + if isinstance(e, Manifest): + if e.flags: + errors.fatal("Flags are not supported on " + '"manifest" entries') + self._included_manifests[e.path] = path + + def get_bases(self, addons=True): + """ + Return all paths under which root manifests have been found. Root + manifests are manifests that are included in no other manifest. + `addons` indicates whether to include addon bases as well. + """ + all_bases = set( + mozpath.dirname(m) for m in self._manifests - set(self._included_manifests) + ) + if not addons: + all_bases -= set(self._addons) + else: + # If for some reason some detected addon doesn't have a + # non-included manifest. + all_bases |= set(self._addons) + return all_bases + + def close(self): + """ + Push all instructions to the formatter. + """ + self._closed = True + + bases = self.get_bases() + broken_bases = sorted( + m + for m, includer in six.iteritems(self._included_manifests) + if mozpath.basedir(m, bases) != mozpath.basedir(includer, bases) + ) + for m in broken_bases: + errors.fatal( + '"%s" is included from "%s", which is outside "%s"' + % (m, self._included_manifests[m], mozpath.basedir(m, bases)) + ) + for base in sorted(bases): + self.formatter.add_base(base, self._addons.get(base, False)) + self._chrome_queue.execute() + self._queue.execute() + self._file_queue.execute() + + +class SimpleManifestSink(object): + """ + Parser sink for "simple" package manifests. Simple package manifests use + the format described in the PackageManifestParser documentation, but don't + support file removals, and require manifests, interfaces and chrome data to + be explicitely listed. + Entries starting with bin/ are searched under bin/ in the FileFinder, but + are packaged without the bin/ prefix. + """ + + def __init__(self, finder, formatter): + """ + Initialize the SimpleManifestSink. The given FileFinder is used to + get files matching the patterns given in the manifest. The given + formatter does the packaging job. + """ + self._finder = finder + self.packager = SimplePackager(formatter) + self._closed = False + self._manifests = set() + + @staticmethod + def normalize_path(path): + """ + Remove any bin/ prefix. + """ + if mozpath.basedir(path, ["bin"]) == "bin": + return mozpath.relpath(path, "bin") + return path + + def add(self, component, pattern): + """ + Add files with the given pattern in the given component. + """ + assert not self._closed + added = False + for p, f in self._finder.find(pattern): + added = True + if is_manifest(p): + self._manifests.add(p) + dest = mozpath.join(component.destdir, SimpleManifestSink.normalize_path(p)) + self.packager.add(dest, f) + if not added: + errors.error("Missing file(s): %s" % pattern) + + def remove(self, component, pattern): + """ + Remove files with the given pattern in the given component. + """ + assert not self._closed + errors.fatal("Removal is unsupported") + + def close(self, auto_root_manifest=True): + """ + Add possibly missing bits and push all instructions to the formatter. + """ + if auto_root_manifest: + # Simple package manifests don't contain the root manifests, so + # find and add them. + paths = [mozpath.dirname(m) for m in self._manifests] + path = mozpath.dirname(mozpath.commonprefix(paths)) + for p, f in self._finder.find(mozpath.join(path, "chrome.manifest")): + if p not in self._manifests: + self.packager.add(SimpleManifestSink.normalize_path(p), f) + self.packager.close() diff --git a/python/mozbuild/mozpack/packager/formats.py b/python/mozbuild/mozpack/packager/formats.py new file mode 100644 index 0000000000..95a6dee2f6 --- /dev/null +++ b/python/mozbuild/mozpack/packager/formats.py @@ -0,0 +1,354 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from six.moves.urllib.parse import urlparse + +import mozpack.path as mozpath +from mozpack.chrome.manifest import ( + Manifest, + ManifestBinaryComponent, + ManifestChrome, + ManifestInterfaces, + ManifestMultiContent, + ManifestResource, +) +from mozpack.copier import FileRegistry, FileRegistrySubtree, Jarrer +from mozpack.errors import errors +from mozpack.files import ManifestFile + +""" +Formatters are classes receiving packaging instructions and creating the +appropriate package layout. + +There are three distinct formatters, each handling one of the different chrome +formats: + - flat: essentially, copies files from the source with the same file system + layout. Manifests entries are grouped in a single manifest per directory, + as well as XPT interfaces. + - jar: chrome content is packaged in jar files. + - omni: chrome content, modules, non-binary components, and many other + elements are packaged in an omnijar file for each base directory. + +The base interface provides the following methods: + - add_base(path [, addon]) + Register a base directory for an application or GRE, or an addon. + Base directories usually contain a root manifest (manifests not + included in any other manifest) named chrome.manifest. + The optional addon argument tells whether the base directory + is that of a packed addon (True), unpacked addon ('unpacked') or + otherwise (False). + The method may only be called in sorted order of `path` (alphanumeric + order, parents before children). + - add(path, content) + Add the given content (BaseFile instance) at the given virtual path + - add_interfaces(path, content) + Add the given content (BaseFile instance) as an interface. Equivalent + to add(path, content) with the right add_manifest(). + - add_manifest(entry) + Add a ManifestEntry. + - contains(path) + Returns whether the given virtual path is known of the formatter. + +The virtual paths mentioned above are paths as they would be with a flat +chrome. + +Formatters all take a FileCopier instance they will fill with the packaged +data. +""" + + +class PiecemealFormatter(object): + """ + Generic formatter that dispatches across different sub-formatters + according to paths. + """ + + def __init__(self, copier): + assert isinstance(copier, (FileRegistry, FileRegistrySubtree)) + self.copier = copier + self._sub_formatter = {} + self._frozen_bases = False + + def add_base(self, base, addon=False): + # Only allow to add a base directory before calls to _get_base() + assert not self._frozen_bases + assert base not in self._sub_formatter + assert all(base > b for b in self._sub_formatter) + self._add_base(base, addon) + + def _get_base(self, path): + """ + Return the deepest base directory containing the given path. + """ + self._frozen_bases = True + base = mozpath.basedir(path, self._sub_formatter.keys()) + relpath = mozpath.relpath(path, base) if base else path + return base, relpath + + def add(self, path, content): + base, relpath = self._get_base(path) + if base is None: + return self.copier.add(relpath, content) + return self._sub_formatter[base].add(relpath, content) + + def add_manifest(self, entry): + base, relpath = self._get_base(entry.base) + assert base is not None + return self._sub_formatter[base].add_manifest(entry.move(relpath)) + + def add_interfaces(self, path, content): + base, relpath = self._get_base(path) + assert base is not None + return self._sub_formatter[base].add_interfaces(relpath, content) + + def contains(self, path): + assert "*" not in path + base, relpath = self._get_base(path) + if base is None: + return self.copier.contains(relpath) + return self._sub_formatter[base].contains(relpath) + + +class FlatFormatter(PiecemealFormatter): + """ + Formatter for the flat package format. + """ + + def _add_base(self, base, addon=False): + self._sub_formatter[base] = FlatSubFormatter( + FileRegistrySubtree(base, self.copier) + ) + + +class FlatSubFormatter(object): + """ + Sub-formatter for the flat package format. + """ + + def __init__(self, copier): + assert isinstance(copier, (FileRegistry, FileRegistrySubtree)) + self.copier = copier + self._chrome_db = {} + + def add(self, path, content): + self.copier.add(path, content) + + def add_manifest(self, entry): + # Store manifest entries in a single manifest per directory, named + # after their parent directory, except for root manifests, all named + # chrome.manifest. + if entry.base: + name = mozpath.basename(entry.base) + else: + name = "chrome" + path = mozpath.normpath(mozpath.join(entry.base, "%s.manifest" % name)) + if not self.copier.contains(path): + # Add a reference to the manifest file in the parent manifest, if + # the manifest file is not a root manifest. + if entry.base: + parent = mozpath.dirname(entry.base) + relbase = mozpath.basename(entry.base) + relpath = mozpath.join(relbase, mozpath.basename(path)) + self.add_manifest(Manifest(parent, relpath)) + self.copier.add(path, ManifestFile(entry.base)) + + if isinstance(entry, ManifestChrome): + data = self._chrome_db.setdefault(entry.name, {}) + if isinstance(entry, ManifestMultiContent): + entries = data.setdefault(entry.type, {}).setdefault(entry.id, []) + else: + entries = data.setdefault(entry.type, []) + for e in entries: + # Ideally, we'd actually check whether entry.flags are more + # specific than e.flags, but in practice the following test + # is enough for now. + if entry == e: + errors.warn('"%s" is duplicated. Skipping.' % entry) + return + if not entry.flags or e.flags and entry.flags == e.flags: + errors.fatal('"%s" overrides "%s"' % (entry, e)) + entries.append(entry) + + self.copier[path].add(entry) + + def add_interfaces(self, path, content): + self.copier.add(path, content) + self.add_manifest( + ManifestInterfaces(mozpath.dirname(path), mozpath.basename(path)) + ) + + def contains(self, path): + assert "*" not in path + return self.copier.contains(path) + + +class JarFormatter(PiecemealFormatter): + """ + Formatter for the jar package format. Assumes manifest entries related to + chrome are registered before the chrome data files are added. Also assumes + manifest entries for resources are registered after chrome manifest + entries. + """ + + def __init__(self, copier, compress=True): + PiecemealFormatter.__init__(self, copier) + self._compress = compress + + def _add_base(self, base, addon=False): + if addon is True: + jarrer = Jarrer(self._compress) + self.copier.add(base + ".xpi", jarrer) + self._sub_formatter[base] = FlatSubFormatter(jarrer) + else: + self._sub_formatter[base] = JarSubFormatter( + FileRegistrySubtree(base, self.copier), self._compress + ) + + +class JarSubFormatter(PiecemealFormatter): + """ + Sub-formatter for the jar package format. It is a PiecemealFormatter that + dispatches between further sub-formatter for each of the jar files it + dispatches the chrome data to, and a FlatSubFormatter for the non-chrome + files. + """ + + def __init__(self, copier, compress=True): + PiecemealFormatter.__init__(self, copier) + self._frozen_chrome = False + self._compress = compress + self._sub_formatter[""] = FlatSubFormatter(copier) + + def _jarize(self, entry, relpath): + """ + Transform a manifest entry in one pointing to chrome data in a jar. + Return the corresponding chrome path and the new entry. + """ + base = entry.base + basepath = mozpath.split(relpath)[0] + chromepath = mozpath.join(base, basepath) + entry = ( + entry.rebase(chromepath) + .move(mozpath.join(base, "jar:%s.jar!" % basepath)) + .rebase(base) + ) + return chromepath, entry + + def add_manifest(self, entry): + if isinstance(entry, ManifestChrome) and not urlparse(entry.relpath).scheme: + chromepath, entry = self._jarize(entry, entry.relpath) + assert not self._frozen_chrome + if chromepath not in self._sub_formatter: + jarrer = Jarrer(self._compress) + self.copier.add(chromepath + ".jar", jarrer) + self._sub_formatter[chromepath] = FlatSubFormatter(jarrer) + elif isinstance(entry, ManifestResource) and not urlparse(entry.target).scheme: + chromepath, new_entry = self._jarize(entry, entry.target) + if chromepath in self._sub_formatter: + entry = new_entry + PiecemealFormatter.add_manifest(self, entry) + + +class OmniJarFormatter(JarFormatter): + """ + Formatter for the omnijar package format. + """ + + def __init__(self, copier, omnijar_name, compress=True, non_resources=()): + JarFormatter.__init__(self, copier, compress) + self._omnijar_name = omnijar_name + self._non_resources = non_resources + + def _add_base(self, base, addon=False): + if addon: + # Because add_base is always called with parents before children, + # all the possible ancestry of `base` is already present in + # `_sub_formatter`. + parent_base = mozpath.basedir(base, self._sub_formatter.keys()) + rel_base = mozpath.relpath(base, parent_base) + # If the addon is under a resource directory, package it in the + # omnijar. + parent_sub_formatter = self._sub_formatter[parent_base] + if parent_sub_formatter.is_resource(rel_base): + omnijar_sub_formatter = parent_sub_formatter._sub_formatter[ + self._omnijar_name + ] + self._sub_formatter[base] = FlatSubFormatter( + FileRegistrySubtree(rel_base, omnijar_sub_formatter.copier) + ) + return + JarFormatter._add_base(self, base, addon) + else: + self._sub_formatter[base] = OmniJarSubFormatter( + FileRegistrySubtree(base, self.copier), + self._omnijar_name, + self._compress, + self._non_resources, + ) + + +class OmniJarSubFormatter(PiecemealFormatter): + """ + Sub-formatter for the omnijar package format. It is a PiecemealFormatter + that dispatches between a FlatSubFormatter for the resources data and + another FlatSubFormatter for the other files. + """ + + def __init__(self, copier, omnijar_name, compress=True, non_resources=()): + PiecemealFormatter.__init__(self, copier) + self._omnijar_name = omnijar_name + self._compress = compress + self._non_resources = non_resources + self._sub_formatter[""] = FlatSubFormatter(copier) + jarrer = Jarrer(self._compress) + self._sub_formatter[omnijar_name] = FlatSubFormatter(jarrer) + + def _get_base(self, path): + base = self._omnijar_name if self.is_resource(path) else "" + # Only add the omnijar file if something ends up in it. + if base and not self.copier.contains(base): + self.copier.add(base, self._sub_formatter[base].copier) + return base, path + + def add_manifest(self, entry): + base = "" + if not isinstance(entry, ManifestBinaryComponent): + base = self._omnijar_name + formatter = self._sub_formatter[base] + return formatter.add_manifest(entry) + + def is_resource(self, path): + """ + Return whether the given path corresponds to a resource to be put in an + omnijar archive. + """ + if any(mozpath.match(path, p.replace("*", "**")) for p in self._non_resources): + return False + path = mozpath.split(path) + if path[0] == "chrome": + return len(path) == 1 or path[1] != "icons" + if path[0] == "components": + return path[-1].endswith((".js", ".xpt")) + if path[0] == "res": + return len(path) == 1 or ( + path[1] != "cursors" + and path[1] != "touchbar" + and path[1] != "MainMenu.nib" + ) + if path[0] == "defaults": + return len(path) != 3 or not ( + path[2] == "channel-prefs.js" and path[1] in ["pref", "preferences"] + ) + if len(path) <= 2 and path[-1] == "greprefs.js": + # Accommodate `greprefs.js` and `$ANDROID_CPU_ARCH/greprefs.js`. + return True + return path[0] in [ + "modules", + "actors", + "dictionaries", + "hyphenation", + "localization", + "update.locale", + "contentaccessible", + ] diff --git a/python/mozbuild/mozpack/packager/l10n.py b/python/mozbuild/mozpack/packager/l10n.py new file mode 100644 index 0000000000..76871e15cd --- /dev/null +++ b/python/mozbuild/mozpack/packager/l10n.py @@ -0,0 +1,304 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +Replace localized parts of a packaged directory with data from a langpack +directory. +""" + +import json +import os + +import six +from createprecomplete import generate_precomplete + +import mozpack.path as mozpath +from mozpack.chrome.manifest import ( + Manifest, + ManifestChrome, + ManifestEntryWithRelPath, + ManifestLocale, + is_manifest, +) +from mozpack.copier import FileCopier, Jarrer +from mozpack.errors import errors +from mozpack.files import ComposedFinder, GeneratedFile, ManifestFile +from mozpack.mozjar import JAR_DEFLATED +from mozpack.packager import Component, SimpleManifestSink, SimplePackager +from mozpack.packager.formats import FlatFormatter, JarFormatter, OmniJarFormatter +from mozpack.packager.unpack import UnpackFinder + + +class LocaleManifestFinder(object): + def __init__(self, finder): + entries = self.entries = [] + bases = self.bases = [] + + class MockFormatter(object): + def add_interfaces(self, path, content): + pass + + def add(self, path, content): + pass + + def add_manifest(self, entry): + if entry.localized: + entries.append(entry) + + def add_base(self, base, addon=False): + bases.append(base) + + # SimplePackager rejects "manifest foo.manifest" entries with + # additional flags (such as "manifest foo.manifest application=bar"). + # Those type of entries are used by language packs to work as addons, + # but are not necessary for the purpose of l10n repacking. So we wrap + # the finder in order to remove those entries. + class WrapFinder(object): + def __init__(self, finder): + self._finder = finder + + def find(self, pattern): + for p, f in self._finder.find(pattern): + if isinstance(f, ManifestFile): + unwanted = [ + e for e in f._entries if isinstance(e, Manifest) and e.flags + ] + if unwanted: + f = ManifestFile( + f._base, [e for e in f._entries if e not in unwanted] + ) + yield p, f + + sink = SimpleManifestSink(WrapFinder(finder), MockFormatter()) + sink.add(Component(""), "*") + sink.close(False) + + # Find unique locales used in these manifest entries. + self.locales = list( + set(e.id for e in self.entries if isinstance(e, ManifestLocale)) + ) + + +class L10NRepackFormatterMixin(object): + def __init__(self, *args, **kwargs): + super(L10NRepackFormatterMixin, self).__init__(*args, **kwargs) + self._dictionaries = {} + + def add(self, path, file): + base, relpath = self._get_base(path) + if path.endswith(".dic"): + if relpath.startswith("dictionaries/"): + root, ext = mozpath.splitext(mozpath.basename(path)) + self._dictionaries[root] = path + elif path.endswith("/built_in_addons.json"): + data = json.loads(six.ensure_text(file.open().read())) + data["dictionaries"] = self._dictionaries + # The GeneratedFile content is only really generated after + # all calls to formatter.add. + file = GeneratedFile(lambda: json.dumps(data)) + elif relpath.startswith("META-INF/"): + # Ignore signatures inside omnijars. We drop these items: if we + # don't treat them as omnijar resources, they will be included in + # the top-level package, and that's not how omnijars are signed (Bug + # 1750676). If we treat them as omnijar resources, they will stay + # in the omnijar, as expected -- but the signatures won't be valid + # after repacking. Therefore, drop them. + return + super(L10NRepackFormatterMixin, self).add(path, file) + + +def L10NRepackFormatter(klass): + class L10NRepackFormatter(L10NRepackFormatterMixin, klass): + pass + + return L10NRepackFormatter + + +FlatFormatter = L10NRepackFormatter(FlatFormatter) +JarFormatter = L10NRepackFormatter(JarFormatter) +OmniJarFormatter = L10NRepackFormatter(OmniJarFormatter) + + +def _repack(app_finder, l10n_finder, copier, formatter, non_chrome=set()): + app = LocaleManifestFinder(app_finder) + l10n = LocaleManifestFinder(l10n_finder) + + # The code further below assumes there's only one locale replaced with + # another one. + if len(app.locales) > 1: + errors.fatal("Multiple app locales aren't supported: " + ",".join(app.locales)) + if len(l10n.locales) > 1: + errors.fatal( + "Multiple l10n locales aren't supported: " + ",".join(l10n.locales) + ) + locale = app.locales[0] + l10n_locale = l10n.locales[0] + + # For each base directory, store what path a locale chrome package name + # corresponds to. + # e.g., for the following entry under app/chrome: + # locale foo en-US path/to/files + # keep track that the locale path for foo in app is + # app/chrome/path/to/files. + # As there may be multiple locale entries with the same base, but with + # different flags, that tracking takes the flags into account when there + # are some. Example: + # locale foo en-US path/to/files/win os=Win + # locale foo en-US path/to/files/mac os=Darwin + def key(entry): + if entry.flags: + return "%s %s" % (entry.name, entry.flags) + return entry.name + + l10n_paths = {} + for e in l10n.entries: + if isinstance(e, ManifestChrome): + base = mozpath.basedir(e.path, app.bases) + l10n_paths.setdefault(base, {}) + l10n_paths[base][key(e)] = e.path + + # For chrome and non chrome files or directories, store what langpack path + # corresponds to a package path. + paths = {} + for e in app.entries: + if isinstance(e, ManifestEntryWithRelPath): + base = mozpath.basedir(e.path, app.bases) + if base not in l10n_paths: + errors.fatal("Locale doesn't contain %s/" % base) + # Allow errors to accumulate + continue + if key(e) not in l10n_paths[base]: + errors.fatal("Locale doesn't have a manifest entry for '%s'" % e.name) + # Allow errors to accumulate + continue + paths[e.path] = l10n_paths[base][key(e)] + + for pattern in non_chrome: + for base in app.bases: + path = mozpath.join(base, pattern) + left = set(p for p, f in app_finder.find(path)) + right = set(p for p, f in l10n_finder.find(path)) + for p in right: + paths[p] = p + for p in left - right: + paths[p] = None + + # Create a new package, with non localized bits coming from the original + # package, and localized bits coming from the langpack. + packager = SimplePackager(formatter) + for p, f in app_finder: + if is_manifest(p): + # Remove localized manifest entries. + for e in [e for e in f if e.localized]: + f.remove(e) + # If the path is one that needs a locale replacement, use the + # corresponding file from the langpack. + path = None + if p in paths: + path = paths[p] + if not path: + continue + else: + base = mozpath.basedir(p, paths.keys()) + if base: + subpath = mozpath.relpath(p, base) + path = mozpath.normpath(mozpath.join(paths[base], subpath)) + + if path: + files = [f for p, f in l10n_finder.find(path)] + if not len(files): + if base not in non_chrome: + finderBase = "" + if hasattr(l10n_finder, "base"): + finderBase = l10n_finder.base + errors.error("Missing file: %s" % os.path.join(finderBase, path)) + else: + packager.add(path, files[0]) + else: + packager.add(p, f) + + # Add localized manifest entries from the langpack. + l10n_manifests = [] + for base in set(e.base for e in l10n.entries): + m = ManifestFile(base, [e for e in l10n.entries if e.base == base]) + path = mozpath.join(base, "chrome.%s.manifest" % l10n_locale) + l10n_manifests.append((path, m)) + bases = packager.get_bases() + for path, m in l10n_manifests: + base = mozpath.basedir(path, bases) + packager.add(path, m) + # Add a "manifest $path" entry in the top manifest under that base. + m = ManifestFile(base) + m.add(Manifest(base, mozpath.relpath(path, base))) + packager.add(mozpath.join(base, "chrome.manifest"), m) + + packager.close() + + # Add any remaining non chrome files. + for pattern in non_chrome: + for base in bases: + for p, f in l10n_finder.find(mozpath.join(base, pattern)): + if not formatter.contains(p): + formatter.add(p, f) + + # Resources in `localization` directories are packaged from the source and then + # if localized versions are present in the l10n dir, we package them as well + # keeping the source dir resources as a runtime fallback. + for p, f in l10n_finder.find("**/localization"): + if not formatter.contains(p): + formatter.add(p, f) + + # Transplant jar preloading information. + for path, log in six.iteritems(app_finder.jarlogs): + assert isinstance(copier[path], Jarrer) + copier[path].preload([l.replace(locale, l10n_locale) for l in log]) + + +def repack( + source, l10n, extra_l10n={}, non_resources=[], non_chrome=set(), minify=False +): + """ + Replace localized data from the `source` directory with localized data + from `l10n` and `extra_l10n`. + + The `source` argument points to a directory containing a packaged + application (in omnijar, jar or flat form). + The `l10n` argument points to a directory containing the main localized + data (usually in the form of a language pack addon) to use to replace + in the packaged application. + The `extra_l10n` argument contains a dict associating relative paths in + the source to separate directories containing localized data for them. + This can be used to point at different language pack addons for different + parts of the package application. + The `non_resources` argument gives a list of relative paths in the source + that should not be added in an omnijar in case the packaged application + is in that format. + The `non_chrome` argument gives a list of file/directory patterns for + localized files that are not listed in a chrome.manifest. + If `minify`, `.properties` files are minified. + """ + app_finder = UnpackFinder(source, minify=minify) + l10n_finder = UnpackFinder(l10n, minify=minify) + if extra_l10n: + finders = { + "": l10n_finder, + } + for base, path in six.iteritems(extra_l10n): + finders[base] = UnpackFinder(path, minify=minify) + l10n_finder = ComposedFinder(finders) + copier = FileCopier() + compress = min(app_finder.compressed, JAR_DEFLATED) + if app_finder.kind == "flat": + formatter = FlatFormatter(copier) + elif app_finder.kind == "jar": + formatter = JarFormatter(copier, compress=compress) + elif app_finder.kind == "omni": + formatter = OmniJarFormatter( + copier, app_finder.omnijar, compress=compress, non_resources=non_resources + ) + + with errors.accumulate(): + _repack(app_finder, l10n_finder, copier, formatter, non_chrome) + copier.copy(source, skip_if_older=False) + generate_precomplete(source) diff --git a/python/mozbuild/mozpack/packager/unpack.py b/python/mozbuild/mozpack/packager/unpack.py new file mode 100644 index 0000000000..dff295eb9b --- /dev/null +++ b/python/mozbuild/mozpack/packager/unpack.py @@ -0,0 +1,200 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import codecs + +from six.moves.urllib.parse import urlparse + +import mozpack.path as mozpath +from mozpack.chrome.manifest import ( + ManifestEntryWithRelPath, + ManifestResource, + is_manifest, + parse_manifest, +) +from mozpack.copier import FileCopier, FileRegistry +from mozpack.files import BaseFinder, DeflatedFile, FileFinder, ManifestFile +from mozpack.mozjar import JarReader +from mozpack.packager import SimplePackager +from mozpack.packager.formats import FlatFormatter + + +class UnpackFinder(BaseFinder): + """ + Special Finder object that treats the source package directory as if it + were in the flat chrome format, whatever chrome format it actually is in. + + This means that for example, paths like chrome/browser/content/... match + files under jar:chrome/browser.jar!/content/... in case of jar chrome + format. + + The only argument to the constructor is a Finder instance or a path. + The UnpackFinder is populated with files from this Finder instance, + or with files from a FileFinder using the given path as its root. + """ + + def __init__(self, source, omnijar_name=None, unpack_xpi=True, **kwargs): + if isinstance(source, BaseFinder): + assert not kwargs + self._finder = source + else: + self._finder = FileFinder(source, **kwargs) + self.base = self._finder.base + self.files = FileRegistry() + self.kind = "flat" + if omnijar_name: + self.omnijar = omnijar_name + else: + # Can't include globally because of bootstrapping issues. + from buildconfig import substs + + self.omnijar = substs.get("OMNIJAR_NAME", "omni.ja") + self.jarlogs = {} + self.compressed = False + self._unpack_xpi = unpack_xpi + + jars = set() + + for p, f in self._finder.find("*"): + # Skip the precomplete file, which is generated at packaging time. + if p == "precomplete": + continue + base = mozpath.dirname(p) + # If the file matches the omnijar pattern, it is an omnijar. + # All the files it contains go in the directory containing the full + # pattern. Manifests are merged if there is a corresponding manifest + # in the directory. + if self._maybe_zip(f) and mozpath.match(p, "**/%s" % self.omnijar): + jar = self._open_jar(p, f) + if "chrome.manifest" in jar: + self.kind = "omni" + self._fill_with_jar(p[: -len(self.omnijar) - 1], jar) + continue + # If the file is a manifest, scan its entries for some referencing + # jar: urls. If there are some, the files contained in the jar they + # point to, go under a directory named after the jar. + if is_manifest(p): + m = self.files[p] if self.files.contains(p) else ManifestFile(base) + for e in parse_manifest( + self.base, p, codecs.getreader("utf-8")(f.open()) + ): + m.add(self._handle_manifest_entry(e, jars)) + if self.files.contains(p): + continue + f = m + # If we're unpacking packed addons and the file is a packed addon, + # unpack it under a directory named after the xpi. + if self._unpack_xpi and p.endswith(".xpi") and self._maybe_zip(f): + self._fill_with_jar(p[:-4], self._open_jar(p, f)) + continue + if p not in jars: + self.files.add(p, f) + + def _fill_with_jar(self, base, jar): + for j in jar: + path = mozpath.join(base, j.filename) + if is_manifest(j.filename): + m = ( + self.files[path] + if self.files.contains(path) + else ManifestFile(mozpath.dirname(path)) + ) + for e in parse_manifest(None, path, j): + m.add(e) + if not self.files.contains(path): + self.files.add(path, m) + continue + else: + self.files.add(path, DeflatedFile(j)) + + def _handle_manifest_entry(self, entry, jars): + jarpath = None + if ( + isinstance(entry, ManifestEntryWithRelPath) + and urlparse(entry.relpath).scheme == "jar" + ): + jarpath, entry = self._unjarize(entry, entry.relpath) + elif ( + isinstance(entry, ManifestResource) + and urlparse(entry.target).scheme == "jar" + ): + jarpath, entry = self._unjarize(entry, entry.target) + if jarpath: + # Don't defer unpacking the jar file. If we already saw + # it, take (and remove) it from the registry. If we + # haven't, try to find it now. + if self.files.contains(jarpath): + jar = self.files[jarpath] + self.files.remove(jarpath) + else: + jar = [f for p, f in self._finder.find(jarpath)] + assert len(jar) == 1 + jar = jar[0] + if jarpath not in jars: + base = mozpath.splitext(jarpath)[0] + for j in self._open_jar(jarpath, jar): + self.files.add(mozpath.join(base, j.filename), DeflatedFile(j)) + jars.add(jarpath) + self.kind = "jar" + return entry + + def _open_jar(self, path, file): + """ + Return a JarReader for the given BaseFile instance, keeping a log of + the preloaded entries it has. + """ + jar = JarReader(fileobj=file.open()) + self.compressed = max(self.compressed, jar.compression) + if jar.last_preloaded: + jarlog = list(jar.entries.keys()) + self.jarlogs[path] = jarlog[: jarlog.index(jar.last_preloaded) + 1] + return jar + + def find(self, path): + for p in self.files.match(path): + yield p, self.files[p] + + def _maybe_zip(self, file): + """ + Return whether the given BaseFile looks like a ZIP/Jar. + """ + header = file.open().read(8) + return len(header) == 8 and (header[0:2] == b"PK" or header[4:6] == b"PK") + + def _unjarize(self, entry, relpath): + """ + Transform a manifest entry pointing to chrome data in a jar in one + pointing to the corresponding unpacked path. Return the jar path and + the new entry. + """ + base = entry.base + jar, relpath = urlparse(relpath).path.split("!", 1) + entry = ( + entry.rebase(mozpath.join(base, "jar:%s!" % jar)) + .move(mozpath.join(base, mozpath.splitext(jar)[0])) + .rebase(base) + ) + return mozpath.join(base, jar), entry + + +def unpack_to_registry(source, registry, omnijar_name=None): + """ + Transform a jar chrome or omnijar packaged directory into a flat package. + + The given registry is filled with the flat package. + """ + finder = UnpackFinder(source, omnijar_name) + packager = SimplePackager(FlatFormatter(registry)) + for p, f in finder.find("*"): + packager.add(p, f) + packager.close() + + +def unpack(source, omnijar_name=None): + """ + Transform a jar chrome or omnijar packaged directory into a flat package. + """ + copier = FileCopier() + unpack_to_registry(source, copier, omnijar_name) + copier.copy(source, skip_if_older=False) diff --git a/python/mozbuild/mozpack/path.py b/python/mozbuild/mozpack/path.py new file mode 100644 index 0000000000..3e5af0a06b --- /dev/null +++ b/python/mozbuild/mozpack/path.py @@ -0,0 +1,246 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +Like :py:mod:`os.path`, with a reduced set of functions, and with normalized path +separators (always use forward slashes). +Also contains a few additional utilities not found in :py:mod:`os.path`. +""" + +import ctypes +import os +import posixpath +import re +import sys + + +def normsep(path): + """ + Normalize path separators, by using forward slashes instead of whatever + :py:const:`os.sep` is. + """ + if os.sep != "/": + # Python 2 is happy to do things like byte_string.replace(u'foo', + # u'bar'), but not Python 3. + if isinstance(path, bytes): + path = path.replace(os.sep.encode("ascii"), b"/") + else: + path = path.replace(os.sep, "/") + if os.altsep and os.altsep != "/": + if isinstance(path, bytes): + path = path.replace(os.altsep.encode("ascii"), b"/") + else: + path = path.replace(os.altsep, "/") + return path + + +def cargo_workaround(path): + unc = "//?/" + if path.startswith(unc): + return path[len(unc) :] + return path + + +def relpath(path, start): + path = normsep(path) + start = normsep(start) + if sys.platform == "win32": + # os.path.relpath can't handle relative paths between UNC and non-UNC + # paths, so strip a //?/ prefix if present (bug 1581248) + path = cargo_workaround(path) + start = cargo_workaround(start) + try: + rel = os.path.relpath(path, start) + except ValueError: + # On Windows this can throw a ValueError if the two paths are on + # different drives. In that case, just return the path. + return abspath(path) + rel = normsep(rel) + return "" if rel == "." else rel + + +def realpath(path): + return normsep(os.path.realpath(path)) + + +def abspath(path): + return normsep(os.path.abspath(path)) + + +def join(*paths): + return normsep(os.path.join(*paths)) + + +def normpath(path): + return posixpath.normpath(normsep(path)) + + +def dirname(path): + return posixpath.dirname(normsep(path)) + + +def commonprefix(paths): + return posixpath.commonprefix([normsep(path) for path in paths]) + + +def basename(path): + return os.path.basename(path) + + +def splitext(path): + return posixpath.splitext(normsep(path)) + + +def split(path): + """ + Return the normalized path as a list of its components. + + ``split('foo/bar/baz')`` returns ``['foo', 'bar', 'baz']`` + """ + return normsep(path).split("/") + + +def basedir(path, bases): + """ + Given a list of directories (`bases`), return which one contains the given + path. If several matches are found, the deepest base directory is returned. + + ``basedir('foo/bar/baz', ['foo', 'baz', 'foo/bar'])`` returns ``'foo/bar'`` + (`'foo'` and `'foo/bar'` both match, but `'foo/bar'` is the deepest match) + """ + path = normsep(path) + bases = [normsep(b) for b in bases] + if path in bases: + return path + for b in sorted(bases, reverse=True): + if b == "" or path.startswith(b + "/"): + return b + + +re_cache = {} +# Python versions < 3.7 return r'\/' for re.escape('/'). +if re.escape("/") == "/": + MATCH_STAR_STAR_RE = re.compile(r"(^|/)\\\*\\\*/") + MATCH_STAR_STAR_END_RE = re.compile(r"(^|/)\\\*\\\*$") +else: + MATCH_STAR_STAR_RE = re.compile(r"(^|\\\/)\\\*\\\*\\\/") + MATCH_STAR_STAR_END_RE = re.compile(r"(^|\\\/)\\\*\\\*$") + + +def match(path, pattern): + """ + Return whether the given path matches the given pattern. + An asterisk can be used to match any string, including the null string, in + one part of the path: + + ``foo`` matches ``*``, ``f*`` or ``fo*o`` + + However, an asterisk matching a subdirectory may not match the null string: + + ``foo/bar`` does *not* match ``foo/*/bar`` + + If the pattern matches one of the ancestor directories of the path, the + patch is considered matching: + + ``foo/bar`` matches ``foo`` + + Two adjacent asterisks can be used to match files and zero or more + directories and subdirectories. + + ``foo/bar`` matches ``foo/**/bar``, or ``**/bar`` + """ + if not pattern: + return True + if pattern not in re_cache: + p = re.escape(pattern) + p = MATCH_STAR_STAR_RE.sub(r"\1(?:.+/)?", p) + p = MATCH_STAR_STAR_END_RE.sub(r"(?:\1.+)?", p) + p = p.replace(r"\*", "[^/]*") + "(?:/.*)?$" + re_cache[pattern] = re.compile(p) + return re_cache[pattern].match(path) is not None + + +def rebase(oldbase, base, relativepath): + """ + Return `relativepath` relative to `base` instead of `oldbase`. + """ + if base == oldbase: + return relativepath + if len(base) < len(oldbase): + assert basedir(oldbase, [base]) == base + relbase = relpath(oldbase, base) + result = join(relbase, relativepath) + else: + assert basedir(base, [oldbase]) == oldbase + relbase = relpath(base, oldbase) + result = relpath(relativepath, relbase) + result = normpath(result) + if relativepath.endswith("/") and not result.endswith("/"): + result += "/" + return result + + +def readlink(path): + if hasattr(os, "readlink"): + return normsep(os.readlink(path)) + + # Unfortunately os.path.realpath doesn't support symlinks on Windows, and os.readlink + # is only available on Windows with Python 3.2+. We have to resort to ctypes... + + assert sys.platform == "win32" + + CreateFileW = ctypes.windll.kernel32.CreateFileW + CreateFileW.argtypes = [ + ctypes.wintypes.LPCWSTR, + ctypes.wintypes.DWORD, + ctypes.wintypes.DWORD, + ctypes.wintypes.LPVOID, + ctypes.wintypes.DWORD, + ctypes.wintypes.DWORD, + ctypes.wintypes.HANDLE, + ] + CreateFileW.restype = ctypes.wintypes.HANDLE + + GENERIC_READ = 0x80000000 + FILE_SHARE_READ = 0x00000001 + OPEN_EXISTING = 3 + FILE_FLAG_BACKUP_SEMANTICS = 0x02000000 + + handle = CreateFileW( + path, + GENERIC_READ, + FILE_SHARE_READ, + 0, + OPEN_EXISTING, + FILE_FLAG_BACKUP_SEMANTICS, + 0, + ) + assert handle != 1, "Failed getting a handle to: {}".format(path) + + MAX_PATH = 260 + + buf = ctypes.create_unicode_buffer(MAX_PATH) + GetFinalPathNameByHandleW = ctypes.windll.kernel32.GetFinalPathNameByHandleW + GetFinalPathNameByHandleW.argtypes = [ + ctypes.wintypes.HANDLE, + ctypes.wintypes.LPWSTR, + ctypes.wintypes.DWORD, + ctypes.wintypes.DWORD, + ] + GetFinalPathNameByHandleW.restype = ctypes.wintypes.DWORD + + FILE_NAME_NORMALIZED = 0x0 + + rv = GetFinalPathNameByHandleW(handle, buf, MAX_PATH, FILE_NAME_NORMALIZED) + assert rv != 0 and rv <= MAX_PATH, "Failed getting final path for: {}".format(path) + + CloseHandle = ctypes.windll.kernel32.CloseHandle + CloseHandle.argtypes = [ctypes.wintypes.HANDLE] + CloseHandle.restype = ctypes.wintypes.BOOL + + rv = CloseHandle(handle) + assert rv != 0, "Failed closing handle" + + # Remove leading '\\?\' from the result. + return normsep(buf.value[4:]) diff --git a/python/mozbuild/mozpack/pkg.py b/python/mozbuild/mozpack/pkg.py new file mode 100644 index 0000000000..75a63b9746 --- /dev/null +++ b/python/mozbuild/mozpack/pkg.py @@ -0,0 +1,299 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import concurrent.futures +import lzma +import os +import plistlib +import struct +import subprocess +from pathlib import Path +from string import Template +from typing import List +from urllib.parse import quote + +import mozfile + +TEMPLATE_DIRECTORY = Path(__file__).parent / "apple_pkg" +PBZX_CHUNK_SIZE = 16 * 1024 * 1024 # 16MB chunks + + +def get_apple_template(name: str) -> Template: + """ + Given , open file at /, read contents and + return as a Template + + Args: + name: str, Filename for the template + + Returns: + Template, loaded from file + """ + tmpl_path = TEMPLATE_DIRECTORY / name + if not tmpl_path.is_file(): + raise Exception(f"Could not find template: {tmpl_path}") + with tmpl_path.open("r") as tmpl: + contents = tmpl.read() + return Template(contents) + + +def save_text_file(content: str, destination: Path): + """ + Saves a text file to with provided + Note: Overwrites contents + + Args: + content: str, The desired contents of the file + destination: Path, The file path + """ + with destination.open("w") as out_fd: + out_fd.write(content) + print(f"Created text file at {destination}") + print(f"Created text file size: {destination.stat().st_size} bytes") + + +def get_app_info_plist(app_path: Path) -> dict: + """ + Retrieve most information from Info.plist file of an app. + The Info.plist file should be located in ?.app/Contents/Info.plist + + Note: Ignores properties that are not type + + Args: + app_path: Path, the .app file/directory path + + Returns: + dict, the dictionary of properties found in Info.plist + """ + info_plist = app_path / "Contents/Info.plist" + if not info_plist.is_file(): + raise Exception(f"Could not find Info.plist in {info_plist}") + + print(f"Reading app Info.plist from: {info_plist}") + + with info_plist.open("rb") as plist_fd: + data = plistlib.load(plist_fd) + + return data + + +def create_payload(destination: Path, root_path: Path, cpio_tool: str): + """ + Creates a payload at based on + + Args: + destination: Path, the destination Path + root_path: Path, the root directory Path + cpio_tool: str, + """ + # Files to be cpio'd are root folder + contents + file_list = ["./"] + get_relative_glob_list(root_path, "**/*") + + with mozfile.TemporaryDirectory() as tmp_dir: + tmp_payload_path = Path(tmp_dir) / "Payload" + print(f"Creating Payload with cpio from {root_path} to {tmp_payload_path}") + print(f"Found {len(file_list)} files") + with tmp_payload_path.open("wb") as tmp_payload: + process = subprocess.run( + [ + cpio_tool, + "-o", # copy-out mode + "--format", + "odc", # old POSIX .1 portable format + "--owner", + "0:80", # clean ownership + ], + stdout=tmp_payload, + stderr=subprocess.PIPE, + input="\n".join(file_list) + "\n", + encoding="ascii", + cwd=root_path, + ) + # cpio outputs number of blocks to stderr + print(f"[CPIO]: {process.stderr}") + if process.returncode: + raise Exception(f"CPIO error {process.returncode}") + + tmp_payload_size = tmp_payload_path.stat().st_size + print(f"Uncompressed Payload size: {tmp_payload_size // 1024}kb") + + def compress_chunk(chunk): + compressed_chunk = lzma.compress(chunk) + return len(chunk), compressed_chunk + + def chunker(fileobj, chunk_size): + while True: + chunk = fileobj.read(chunk_size) + if not chunk: + break + yield chunk + + with tmp_payload_path.open("rb") as f_in, destination.open( + "wb" + ) as f_out, concurrent.futures.ThreadPoolExecutor( + max_workers=os.cpu_count() + ) as executor: + f_out.write(b"pbzx") + f_out.write(struct.pack(">Q", PBZX_CHUNK_SIZE)) + chunks = chunker(f_in, PBZX_CHUNK_SIZE) + for uncompressed_size, compressed_chunk in executor.map( + compress_chunk, chunks + ): + f_out.write(struct.pack(">Q", uncompressed_size)) + if len(compressed_chunk) < uncompressed_size: + f_out.write(struct.pack(">Q", len(compressed_chunk))) + f_out.write(compressed_chunk) + else: + # Considering how unlikely this is, we prefer to just decompress + # here than to keep the original uncompressed chunk around + f_out.write(struct.pack(">Q", uncompressed_size)) + f_out.write(lzma.decompress(compressed_chunk)) + + print(f"Compressed Payload file to {destination}") + print(f"Compressed Payload size: {destination.stat().st_size // 1024}kb") + + +def create_bom(bom_path: Path, root_path: Path, mkbom_tool: Path): + """ + Creates a Bill Of Materials file at based on + + Args: + bom_path: Path, destination Path for the BOM file + root_path: Path, root directory Path + mkbom_tool: Path, mkbom tool Path + """ + print(f"Creating BOM file from {root_path} to {bom_path}") + subprocess.check_call( + [ + mkbom_tool, + "-u", + "0", + "-g", + "80", + str(root_path), + str(bom_path), + ] + ) + print(f"Created BOM File size: {bom_path.stat().st_size // 1024}kb") + + +def get_relative_glob_list(source: Path, glob: str) -> List[str]: + """ + Given a source path, return a list of relative path based on glob + + Args: + source: Path, source directory Path + glob: str, unix style glob + + Returns: + list[str], paths found in source directory + """ + return [f"./{c.relative_to(source)}" for c in source.glob(glob)] + + +def xar_package_folder(source_path: Path, destination: Path, xar_tool: Path): + """ + Create a pkg from to + The command is issued with as cwd + + Args: + source_path: Path, source absolute Path + destination: Path, destination absolute Path + xar_tool: Path, xar tool Path + """ + if not source_path.is_absolute() or not destination.is_absolute(): + raise Exception("Source and destination should be absolute.") + + print(f"Creating pkg from {source_path} to {destination}") + # Create a list of ./ - noting xar takes care of /** + file_list = get_relative_glob_list(source_path, "*") + + subprocess.check_call( + [ + xar_tool, + "--compression", + "none", + "-vcf", + destination, + *file_list, + ], + cwd=source_path, + ) + print(f"Created PKG file to {destination}") + print(f"Created PKG size: {destination.stat().st_size // 1024}kb") + + +def create_pkg( + source_app: Path, + output_pkg: Path, + mkbom_tool: Path, + xar_tool: Path, + cpio_tool: Path, +): + """ + Create a mac PKG installer from to + + Args: + source_app: Path, source .app file/directory Path + output_pkg: Path, destination .pkg file + mkbom_tool: Path, mkbom tool Path + xar_tool: Path, xar tool Path + cpio: Path, cpio tool Path + """ + + app_name = source_app.name.rsplit(".", maxsplit=1)[0] + + with mozfile.TemporaryDirectory() as tmpdir: + root_path = Path(tmpdir) / "darwin/root" + flat_path = Path(tmpdir) / "darwin/flat" + + # Create required directories + # TODO: Investigate Resources folder contents for other lproj? + (flat_path / "Resources/en.lproj").mkdir(parents=True, exist_ok=True) + (flat_path / f"{app_name}.pkg").mkdir(parents=True, exist_ok=True) + root_path.mkdir(parents=True, exist_ok=True) + + # Copy files over + subprocess.check_call( + [ + "cp", + "-R", + str(source_app), + str(root_path), + ] + ) + + # Count all files (innards + itself) + file_count = len(list(source_app.glob("**/*"))) + 1 + print(f"Calculated source files count: {file_count}") + # Get package contents size + package_size = sum(f.stat().st_size for f in source_app.glob("**/*")) // 1024 + print(f"Calculated source package size: {package_size}kb") + + app_info = get_app_info_plist(source_app) + app_info["numberOfFiles"] = file_count + app_info["installKBytes"] = package_size + app_info["app_name"] = app_name + app_info["app_name_url_encoded"] = quote(app_name) + + # This seems arbitrary, there might be another way of doing it, + # but Info.plist doesn't provide the simple version we need + major_version = app_info["CFBundleShortVersionString"].split(".")[0] + app_info["simple_version"] = f"{major_version}.0.0" + + pkg_info_tmpl = get_apple_template("PackageInfo.template") + pkg_info = pkg_info_tmpl.substitute(app_info) + save_text_file(pkg_info, flat_path / f"{app_name}.pkg/PackageInfo") + + distribution_tmp = get_apple_template("Distribution.template") + distribution = distribution_tmp.substitute(app_info) + save_text_file(distribution, flat_path / "Distribution") + + payload_path = flat_path / f"{app_name}.pkg/Payload" + create_payload(payload_path, root_path, cpio_tool) + + bom_path = flat_path / f"{app_name}.pkg/Bom" + create_bom(bom_path, root_path, mkbom_tool) + + xar_package_folder(flat_path, output_pkg, xar_tool) diff --git a/python/mozbuild/mozpack/test/__init__.py b/python/mozbuild/mozpack/test/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/python/mozbuild/mozpack/test/data/test_data b/python/mozbuild/mozpack/test/data/test_data new file mode 100644 index 0000000000..fb7f0c4fc2 --- /dev/null +++ b/python/mozbuild/mozpack/test/data/test_data @@ -0,0 +1 @@ +test_data \ No newline at end of file diff --git a/python/mozbuild/mozpack/test/python.ini b/python/mozbuild/mozpack/test/python.ini new file mode 100644 index 0000000000..2b229de945 --- /dev/null +++ b/python/mozbuild/mozpack/test/python.ini @@ -0,0 +1,18 @@ +[DEFAULT] +subsuite = mozbuild + +[test_archive.py] +[test_chrome_flags.py] +[test_chrome_manifest.py] +[test_copier.py] +[test_errors.py] +[test_files.py] +[test_manifests.py] +[test_mozjar.py] +[test_packager.py] +[test_packager_formats.py] +[test_packager_l10n.py] +[test_packager_unpack.py] +[test_path.py] +[test_pkg.py] +[test_unify.py] diff --git a/python/mozbuild/mozpack/test/support/minify_js_verify.py b/python/mozbuild/mozpack/test/support/minify_js_verify.py new file mode 100644 index 0000000000..88cc0ece0c --- /dev/null +++ b/python/mozbuild/mozpack/test/support/minify_js_verify.py @@ -0,0 +1,15 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import sys + +if len(sys.argv) != 4: + raise Exception("Usage: minify_js_verify ") + +retcode = int(sys.argv[1]) + +if retcode: + print("Error message", file=sys.stderr) + +sys.exit(retcode) diff --git a/python/mozbuild/mozpack/test/test_archive.py b/python/mozbuild/mozpack/test/test_archive.py new file mode 100644 index 0000000000..3417f279df --- /dev/null +++ b/python/mozbuild/mozpack/test/test_archive.py @@ -0,0 +1,197 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import hashlib +import os +import shutil +import stat +import tarfile +import tempfile +import unittest + +import pytest +from mozunit import main + +from mozpack.archive import ( + DEFAULT_MTIME, + create_tar_bz2_from_files, + create_tar_from_files, + create_tar_gz_from_files, +) +from mozpack.files import GeneratedFile + +MODE_STANDARD = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH + + +def file_hash(path): + h = hashlib.sha1() + with open(path, "rb") as fh: + while True: + data = fh.read(8192) + if not data: + break + h.update(data) + + return h.hexdigest() + + +class TestArchive(unittest.TestCase): + def _create_files(self, root): + files = {} + for i in range(10): + p = os.path.join(root, "file%02d" % i) + with open(p, "wb") as fh: + fh.write(b"file%02d" % i) + # Need to set permissions or umask may influence testing. + os.chmod(p, MODE_STANDARD) + files["file%02d" % i] = p + + for i in range(10): + files["file%02d" % (i + 10)] = GeneratedFile(b"file%02d" % (i + 10)) + + return files + + def _verify_basic_tarfile(self, tf): + self.assertEqual(len(tf.getmembers()), 20) + + names = ["file%02d" % i for i in range(20)] + self.assertEqual(tf.getnames(), names) + + for ti in tf.getmembers(): + self.assertEqual(ti.uid, 0) + self.assertEqual(ti.gid, 0) + self.assertEqual(ti.uname, "") + self.assertEqual(ti.gname, "") + self.assertEqual(ti.mode, MODE_STANDARD) + self.assertEqual(ti.mtime, DEFAULT_MTIME) + + @pytest.mark.xfail( + reason="ValueError is not thrown despite being provided directory." + ) + def test_dirs_refused(self): + d = tempfile.mkdtemp() + try: + tp = os.path.join(d, "test.tar") + with open(tp, "wb") as fh: + with self.assertRaisesRegexp(ValueError, "not a regular"): + create_tar_from_files(fh, {"test": d}) + finally: + shutil.rmtree(d) + + @pytest.mark.xfail(reason="ValueError is not thrown despite uid/gid being set.") + def test_setuid_setgid_refused(self): + d = tempfile.mkdtemp() + try: + uid = os.path.join(d, "setuid") + gid = os.path.join(d, "setgid") + with open(uid, "a"): + pass + with open(gid, "a"): + pass + + os.chmod(uid, MODE_STANDARD | stat.S_ISUID) + os.chmod(gid, MODE_STANDARD | stat.S_ISGID) + + tp = os.path.join(d, "test.tar") + with open(tp, "wb") as fh: + with self.assertRaisesRegexp(ValueError, "cannot add file with setuid"): + create_tar_from_files(fh, {"test": uid}) + with self.assertRaisesRegexp(ValueError, "cannot add file with setuid"): + create_tar_from_files(fh, {"test": gid}) + finally: + shutil.rmtree(d) + + def test_create_tar_basic(self): + d = tempfile.mkdtemp() + try: + files = self._create_files(d) + + tp = os.path.join(d, "test.tar") + with open(tp, "wb") as fh: + create_tar_from_files(fh, files) + + # Output should be deterministic. + self.assertEqual(file_hash(tp), "01cd314e277f060e98c7de6c8ea57f96b3a2065c") + + with tarfile.open(tp, "r") as tf: + self._verify_basic_tarfile(tf) + + finally: + shutil.rmtree(d) + + @pytest.mark.xfail(reason="hash mismatch") + def test_executable_preserved(self): + d = tempfile.mkdtemp() + try: + p = os.path.join(d, "exec") + with open(p, "wb") as fh: + fh.write("#!/bin/bash\n") + os.chmod(p, MODE_STANDARD | stat.S_IXUSR) + + tp = os.path.join(d, "test.tar") + with open(tp, "wb") as fh: + create_tar_from_files(fh, {"exec": p}) + + self.assertEqual(file_hash(tp), "357e1b81c0b6cfdfa5d2d118d420025c3c76ee93") + + with tarfile.open(tp, "r") as tf: + m = tf.getmember("exec") + self.assertEqual(m.mode, MODE_STANDARD | stat.S_IXUSR) + + finally: + shutil.rmtree(d) + + def test_create_tar_gz_basic(self): + d = tempfile.mkdtemp() + try: + files = self._create_files(d) + + gp = os.path.join(d, "test.tar.gz") + with open(gp, "wb") as fh: + create_tar_gz_from_files(fh, files) + + self.assertEqual(file_hash(gp), "7c4da5adc5088cdf00911d5daf9a67b15de714b7") + + with tarfile.open(gp, "r:gz") as tf: + self._verify_basic_tarfile(tf) + + finally: + shutil.rmtree(d) + + def test_tar_gz_name(self): + d = tempfile.mkdtemp() + try: + files = self._create_files(d) + + gp = os.path.join(d, "test.tar.gz") + with open(gp, "wb") as fh: + create_tar_gz_from_files(fh, files, filename="foobar") + + self.assertEqual(file_hash(gp), "721e00083c17d16df2edbddf40136298c06d0c49") + + with tarfile.open(gp, "r:gz") as tf: + self._verify_basic_tarfile(tf) + + finally: + shutil.rmtree(d) + + def test_create_tar_bz2_basic(self): + d = tempfile.mkdtemp() + try: + files = self._create_files(d) + + bp = os.path.join(d, "test.tar.bz2") + with open(bp, "wb") as fh: + create_tar_bz2_from_files(fh, files) + + self.assertEqual(file_hash(bp), "eb5096d2fbb71df7b3d690001a6f2e82a5aad6a7") + + with tarfile.open(bp, "r:bz2") as tf: + self._verify_basic_tarfile(tf) + finally: + shutil.rmtree(d) + + +if __name__ == "__main__": + main() diff --git a/python/mozbuild/mozpack/test/test_chrome_flags.py b/python/mozbuild/mozpack/test/test_chrome_flags.py new file mode 100644 index 0000000000..4f1a968dc2 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_chrome_flags.py @@ -0,0 +1,150 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import unittest + +import mozunit + +from mozpack.chrome.flags import Flag, Flags, StringFlag, VersionFlag +from mozpack.errors import ErrorMessage + + +class TestFlag(unittest.TestCase): + def test_flag(self): + flag = Flag("flag") + self.assertEqual(str(flag), "") + self.assertTrue(flag.matches(False)) + self.assertTrue(flag.matches("false")) + self.assertFalse(flag.matches("true")) + self.assertRaises(ErrorMessage, flag.add_definition, "flag=") + self.assertRaises(ErrorMessage, flag.add_definition, "flag=42") + self.assertRaises(ErrorMessage, flag.add_definition, "flag!=false") + + flag.add_definition("flag=1") + self.assertEqual(str(flag), "flag=1") + self.assertTrue(flag.matches(True)) + self.assertTrue(flag.matches("1")) + self.assertFalse(flag.matches("no")) + + flag.add_definition("flag=true") + self.assertEqual(str(flag), "flag=true") + self.assertTrue(flag.matches(True)) + self.assertTrue(flag.matches("true")) + self.assertFalse(flag.matches("0")) + + flag.add_definition("flag=no") + self.assertEqual(str(flag), "flag=no") + self.assertTrue(flag.matches("false")) + self.assertFalse(flag.matches("1")) + + flag.add_definition("flag") + self.assertEqual(str(flag), "flag") + self.assertFalse(flag.matches("false")) + self.assertTrue(flag.matches("true")) + self.assertFalse(flag.matches(False)) + + def test_string_flag(self): + flag = StringFlag("flag") + self.assertEqual(str(flag), "") + self.assertTrue(flag.matches("foo")) + self.assertRaises(ErrorMessage, flag.add_definition, "flag>=2") + + flag.add_definition("flag=foo") + self.assertEqual(str(flag), "flag=foo") + self.assertTrue(flag.matches("foo")) + self.assertFalse(flag.matches("bar")) + + flag.add_definition("flag=bar") + self.assertEqual(str(flag), "flag=foo flag=bar") + self.assertTrue(flag.matches("foo")) + self.assertTrue(flag.matches("bar")) + self.assertFalse(flag.matches("baz")) + + flag = StringFlag("flag") + flag.add_definition("flag!=bar") + self.assertEqual(str(flag), "flag!=bar") + self.assertTrue(flag.matches("foo")) + self.assertFalse(flag.matches("bar")) + + def test_version_flag(self): + flag = VersionFlag("flag") + self.assertEqual(str(flag), "") + self.assertTrue(flag.matches("1.0")) + self.assertRaises(ErrorMessage, flag.add_definition, "flag!=2") + + flag.add_definition("flag=1.0") + self.assertEqual(str(flag), "flag=1.0") + self.assertTrue(flag.matches("1.0")) + self.assertFalse(flag.matches("2.0")) + + flag.add_definition("flag=2.0") + self.assertEqual(str(flag), "flag=1.0 flag=2.0") + self.assertTrue(flag.matches("1.0")) + self.assertTrue(flag.matches("2.0")) + self.assertFalse(flag.matches("3.0")) + + flag = VersionFlag("flag") + flag.add_definition("flag>=2.0") + self.assertEqual(str(flag), "flag>=2.0") + self.assertFalse(flag.matches("1.0")) + self.assertTrue(flag.matches("2.0")) + self.assertTrue(flag.matches("3.0")) + + flag.add_definition("flag<1.10") + self.assertEqual(str(flag), "flag>=2.0 flag<1.10") + self.assertTrue(flag.matches("1.0")) + self.assertTrue(flag.matches("1.9")) + self.assertFalse(flag.matches("1.10")) + self.assertFalse(flag.matches("1.20")) + self.assertTrue(flag.matches("2.0")) + self.assertTrue(flag.matches("3.0")) + self.assertRaises(Exception, flag.add_definition, "flag<") + self.assertRaises(Exception, flag.add_definition, "flag>") + self.assertRaises(Exception, flag.add_definition, "flag>=") + self.assertRaises(Exception, flag.add_definition, "flag<=") + self.assertRaises(Exception, flag.add_definition, "flag!=1.0") + + +class TestFlags(unittest.TestCase): + def setUp(self): + self.flags = Flags( + "contentaccessible=yes", + "appversion>=3.5", + "application=foo", + "application=bar", + "appversion<2.0", + "platform", + "abi!=Linux_x86-gcc3", + ) + + def test_flags_str(self): + self.assertEqual( + str(self.flags), + "contentaccessible=yes " + + "appversion>=3.5 appversion<2.0 application=foo " + + "application=bar platform abi!=Linux_x86-gcc3", + ) + + def test_flags_match_unset(self): + self.assertTrue(self.flags.match(os="WINNT")) + + def test_flags_match(self): + self.assertTrue(self.flags.match(application="foo")) + self.assertFalse(self.flags.match(application="qux")) + + def test_flags_match_different(self): + self.assertTrue(self.flags.match(abi="WINNT_x86-MSVC")) + self.assertFalse(self.flags.match(abi="Linux_x86-gcc3")) + + def test_flags_match_version(self): + self.assertTrue(self.flags.match(appversion="1.0")) + self.assertTrue(self.flags.match(appversion="1.5")) + self.assertFalse(self.flags.match(appversion="2.0")) + self.assertFalse(self.flags.match(appversion="3.0")) + self.assertTrue(self.flags.match(appversion="3.5")) + self.assertTrue(self.flags.match(appversion="3.10")) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_chrome_manifest.py b/python/mozbuild/mozpack/test/test_chrome_manifest.py new file mode 100644 index 0000000000..c1d5826bbc --- /dev/null +++ b/python/mozbuild/mozpack/test/test_chrome_manifest.py @@ -0,0 +1,176 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import unittest + +import mozunit + +from mozpack.chrome.manifest import ( + MANIFESTS_TYPES, + Manifest, + ManifestBinaryComponent, + ManifestCategory, + ManifestComponent, + ManifestContent, + ManifestContract, + ManifestInterfaces, + ManifestLocale, + ManifestOverlay, + ManifestOverride, + ManifestResource, + ManifestSkin, + ManifestStyle, + parse_manifest, + parse_manifest_line, +) +from mozpack.errors import AccumulatedErrors, errors +from test_errors import TestErrors + + +class TestManifest(unittest.TestCase): + def test_parse_manifest(self): + manifest = [ + "content global content/global/", + "content global content/global/ application=foo application=bar" + + " platform", + "locale global en-US content/en-US/", + "locale global en-US content/en-US/ application=foo", + "skin global classic/1.0 content/skin/classic/", + "skin global classic/1.0 content/skin/classic/ application=foo" + + " os=WINNT", + "", + "manifest pdfjs/chrome.manifest", + "resource gre-resources toolkit/res/", + "override chrome://global/locale/netError.dtd" + + " chrome://browser/locale/netError.dtd", + "# Comment", + "component {b2bba4df-057d-41ea-b6b1-94a10a8ede68} foo.js", + "contract @mozilla.org/foo;1" + " {b2bba4df-057d-41ea-b6b1-94a10a8ede68}", + "interfaces foo.xpt", + "binary-component bar.so", + "category command-line-handler m-browser" + + " @mozilla.org/browser/clh;1" + + " application={ec8030f7-c20a-464f-9b0e-13a3a9e97384}", + "style chrome://global/content/viewSource.xul" + " chrome://browser/skin/", + "overlay chrome://global/content/viewSource.xul" + + " chrome://browser/content/viewSourceOverlay.xul", + ] + other_manifest = ["content global content/global/"] + expected_result = [ + ManifestContent("", "global", "content/global/"), + ManifestContent( + "", + "global", + "content/global/", + "application=foo", + "application=bar", + "platform", + ), + ManifestLocale("", "global", "en-US", "content/en-US/"), + ManifestLocale("", "global", "en-US", "content/en-US/", "application=foo"), + ManifestSkin("", "global", "classic/1.0", "content/skin/classic/"), + ManifestSkin( + "", + "global", + "classic/1.0", + "content/skin/classic/", + "application=foo", + "os=WINNT", + ), + Manifest("", "pdfjs/chrome.manifest"), + ManifestResource("", "gre-resources", "toolkit/res/"), + ManifestOverride( + "", + "chrome://global/locale/netError.dtd", + "chrome://browser/locale/netError.dtd", + ), + ManifestComponent("", "{b2bba4df-057d-41ea-b6b1-94a10a8ede68}", "foo.js"), + ManifestContract( + "", "@mozilla.org/foo;1", "{b2bba4df-057d-41ea-b6b1-94a10a8ede68}" + ), + ManifestInterfaces("", "foo.xpt"), + ManifestBinaryComponent("", "bar.so"), + ManifestCategory( + "", + "command-line-handler", + "m-browser", + "@mozilla.org/browser/clh;1", + "application=" + "{ec8030f7-c20a-464f-9b0e-13a3a9e97384}", + ), + ManifestStyle( + "", "chrome://global/content/viewSource.xul", "chrome://browser/skin/" + ), + ManifestOverlay( + "", + "chrome://global/content/viewSource.xul", + "chrome://browser/content/viewSourceOverlay.xul", + ), + ] + with mozunit.MockedOpen( + { + "manifest": "\n".join(manifest), + "other/manifest": "\n".join(other_manifest), + } + ): + # Ensure we have tests for all types of manifests. + self.assertEqual( + set(type(e) for e in expected_result), set(MANIFESTS_TYPES.values()) + ) + self.assertEqual( + list(parse_manifest(os.curdir, "manifest")), expected_result + ) + self.assertEqual( + list(parse_manifest(os.curdir, "other/manifest")), + [ManifestContent("other", "global", "content/global/")], + ) + + def test_manifest_rebase(self): + m = parse_manifest_line("chrome", "content global content/global/") + m = m.rebase("") + self.assertEqual(str(m), "content global chrome/content/global/") + m = m.rebase("chrome") + self.assertEqual(str(m), "content global content/global/") + + m = parse_manifest_line("chrome/foo", "content global content/global/") + m = m.rebase("chrome") + self.assertEqual(str(m), "content global foo/content/global/") + m = m.rebase("chrome/foo") + self.assertEqual(str(m), "content global content/global/") + + m = parse_manifest_line("modules/foo", "resource foo ./") + m = m.rebase("modules") + self.assertEqual(str(m), "resource foo foo/") + m = m.rebase("modules/foo") + self.assertEqual(str(m), "resource foo ./") + + m = parse_manifest_line("chrome", "content browser browser/content/") + m = m.rebase("chrome/browser").move("jar:browser.jar!").rebase("") + self.assertEqual(str(m), "content browser jar:browser.jar!/content/") + + +class TestManifestErrors(TestErrors, unittest.TestCase): + def test_parse_manifest_errors(self): + manifest = [ + "skin global classic/1.0 content/skin/classic/ platform", + "", + "binary-component bar.so", + "unsupported foo", + ] + with mozunit.MockedOpen({"manifest": "\n".join(manifest)}): + with self.assertRaises(AccumulatedErrors): + with errors.accumulate(): + list(parse_manifest(os.curdir, "manifest")) + out = self.get_output() + # Expecting 2 errors + self.assertEqual(len(out), 2) + path = os.path.abspath("manifest") + # First on line 1 + self.assertTrue(out[0].startswith("error: %s:1: " % path)) + # Second on line 4 + self.assertTrue(out[1].startswith("error: %s:4: " % path)) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_copier.py b/python/mozbuild/mozpack/test/test_copier.py new file mode 100644 index 0000000000..60ebd2c1e9 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_copier.py @@ -0,0 +1,548 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import stat +import unittest + +import mozunit +import six + +import mozpack.path as mozpath +from mozpack.copier import FileCopier, FileRegistry, FileRegistrySubtree, Jarrer +from mozpack.errors import ErrorMessage +from mozpack.files import ExistingFile, GeneratedFile +from mozpack.mozjar import JarReader +from mozpack.test.test_files import MatchTestTemplate, MockDest, TestWithTmpDir + + +class BaseTestFileRegistry(MatchTestTemplate): + def add(self, path): + self.registry.add(path, GeneratedFile(path)) + + def do_check(self, pattern, result): + self.checked = True + if result: + self.assertTrue(self.registry.contains(pattern)) + else: + self.assertFalse(self.registry.contains(pattern)) + self.assertEqual(self.registry.match(pattern), result) + + def do_test_file_registry(self, registry): + self.registry = registry + self.registry.add("foo", GeneratedFile(b"foo")) + bar = GeneratedFile(b"bar") + self.registry.add("bar", bar) + self.assertEqual(self.registry.paths(), ["foo", "bar"]) + self.assertEqual(self.registry["bar"], bar) + + self.assertRaises( + ErrorMessage, self.registry.add, "foo", GeneratedFile(b"foo2") + ) + + self.assertRaises(ErrorMessage, self.registry.remove, "qux") + + self.assertRaises( + ErrorMessage, self.registry.add, "foo/bar", GeneratedFile(b"foobar") + ) + self.assertRaises( + ErrorMessage, self.registry.add, "foo/bar/baz", GeneratedFile(b"foobar") + ) + + self.assertEqual(self.registry.paths(), ["foo", "bar"]) + + self.registry.remove("foo") + self.assertEqual(self.registry.paths(), ["bar"]) + self.registry.remove("bar") + self.assertEqual(self.registry.paths(), []) + + self.prepare_match_test() + self.do_match_test() + self.assertTrue(self.checked) + self.assertEqual( + self.registry.paths(), + [ + "bar", + "foo/bar", + "foo/baz", + "foo/qux/1", + "foo/qux/bar", + "foo/qux/2/test", + "foo/qux/2/test2", + ], + ) + + self.registry.remove("foo/qux") + self.assertEqual(self.registry.paths(), ["bar", "foo/bar", "foo/baz"]) + + self.registry.add("foo/qux", GeneratedFile(b"fooqux")) + self.assertEqual( + self.registry.paths(), ["bar", "foo/bar", "foo/baz", "foo/qux"] + ) + self.registry.remove("foo/b*") + self.assertEqual(self.registry.paths(), ["bar", "foo/qux"]) + + self.assertEqual([f for f, c in self.registry], ["bar", "foo/qux"]) + self.assertEqual(len(self.registry), 2) + + self.add("foo/.foo") + self.assertTrue(self.registry.contains("foo/.foo")) + + def do_test_registry_paths(self, registry): + self.registry = registry + + # Can't add a file if it requires a directory in place of a + # file we also require. + self.registry.add("foo", GeneratedFile(b"foo")) + self.assertRaises( + ErrorMessage, self.registry.add, "foo/bar", GeneratedFile(b"foobar") + ) + + # Can't add a file if we already have a directory there. + self.registry.add("bar/baz", GeneratedFile(b"barbaz")) + self.assertRaises(ErrorMessage, self.registry.add, "bar", GeneratedFile(b"bar")) + + # Bump the count of things that require bar/ to 2. + self.registry.add("bar/zot", GeneratedFile(b"barzot")) + self.assertRaises(ErrorMessage, self.registry.add, "bar", GeneratedFile(b"bar")) + + # Drop the count of things that require bar/ to 1. + self.registry.remove("bar/baz") + self.assertRaises(ErrorMessage, self.registry.add, "bar", GeneratedFile(b"bar")) + + # Drop the count of things that require bar/ to 0. + self.registry.remove("bar/zot") + self.registry.add("bar/zot", GeneratedFile(b"barzot")) + + +class TestFileRegistry(BaseTestFileRegistry, unittest.TestCase): + def test_partial_paths(self): + cases = { + "foo/bar/baz/zot": ["foo/bar/baz", "foo/bar", "foo"], + "foo/bar": ["foo"], + "bar": [], + } + reg = FileRegistry() + for path, parts in six.iteritems(cases): + self.assertEqual(reg._partial_paths(path), parts) + + def test_file_registry(self): + self.do_test_file_registry(FileRegistry()) + + def test_registry_paths(self): + self.do_test_registry_paths(FileRegistry()) + + def test_required_directories(self): + self.registry = FileRegistry() + + self.registry.add("foo", GeneratedFile(b"foo")) + self.assertEqual(self.registry.required_directories(), set()) + + self.registry.add("bar/baz", GeneratedFile(b"barbaz")) + self.assertEqual(self.registry.required_directories(), {"bar"}) + + self.registry.add("bar/zot", GeneratedFile(b"barzot")) + self.assertEqual(self.registry.required_directories(), {"bar"}) + + self.registry.add("bar/zap/zot", GeneratedFile(b"barzapzot")) + self.assertEqual(self.registry.required_directories(), {"bar", "bar/zap"}) + + self.registry.remove("bar/zap/zot") + self.assertEqual(self.registry.required_directories(), {"bar"}) + + self.registry.remove("bar/baz") + self.assertEqual(self.registry.required_directories(), {"bar"}) + + self.registry.remove("bar/zot") + self.assertEqual(self.registry.required_directories(), set()) + + self.registry.add("x/y/z", GeneratedFile(b"xyz")) + self.assertEqual(self.registry.required_directories(), {"x", "x/y"}) + + +class TestFileRegistrySubtree(BaseTestFileRegistry, unittest.TestCase): + def test_file_registry_subtree_base(self): + registry = FileRegistry() + self.assertEqual(registry, FileRegistrySubtree("", registry)) + self.assertNotEqual(registry, FileRegistrySubtree("base", registry)) + + def create_registry(self): + registry = FileRegistry() + registry.add("foo/bar", GeneratedFile(b"foo/bar")) + registry.add("baz/qux", GeneratedFile(b"baz/qux")) + return FileRegistrySubtree("base/root", registry) + + def test_file_registry_subtree(self): + self.do_test_file_registry(self.create_registry()) + + def test_registry_paths_subtree(self): + FileRegistry() + self.do_test_registry_paths(self.create_registry()) + + +class TestFileCopier(TestWithTmpDir): + def all_dirs(self, base): + all_dirs = set() + for root, dirs, files in os.walk(base): + if not dirs: + all_dirs.add(mozpath.relpath(root, base)) + return all_dirs + + def all_files(self, base): + all_files = set() + for root, dirs, files in os.walk(base): + for f in files: + all_files.add(mozpath.join(mozpath.relpath(root, base), f)) + return all_files + + def test_file_copier(self): + copier = FileCopier() + copier.add("foo/bar", GeneratedFile(b"foobar")) + copier.add("foo/qux", GeneratedFile(b"fooqux")) + copier.add("foo/deep/nested/directory/file", GeneratedFile(b"fooz")) + copier.add("bar", GeneratedFile(b"bar")) + copier.add("qux/foo", GeneratedFile(b"quxfoo")) + copier.add("qux/bar", GeneratedFile(b"")) + + result = copier.copy(self.tmpdir) + self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) + self.assertEqual( + self.all_dirs(self.tmpdir), set(["foo/deep/nested/directory", "qux"]) + ) + + self.assertEqual( + result.updated_files, + set(self.tmppath(p) for p in self.all_files(self.tmpdir)), + ) + self.assertEqual(result.existing_files, set()) + self.assertEqual(result.removed_files, set()) + self.assertEqual(result.removed_directories, set()) + + copier.remove("foo") + copier.add("test", GeneratedFile(b"test")) + result = copier.copy(self.tmpdir) + self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) + self.assertEqual(self.all_dirs(self.tmpdir), set(["qux"])) + self.assertEqual( + result.removed_files, + set( + self.tmppath(p) + for p in ("foo/bar", "foo/qux", "foo/deep/nested/directory/file") + ), + ) + + def test_symlink_directory_replaced(self): + """Directory symlinks in destination are replaced if they need to be + real directories.""" + if not self.symlink_supported: + return + + dest = self.tmppath("dest") + + copier = FileCopier() + copier.add("foo/bar/baz", GeneratedFile(b"foobarbaz")) + + os.makedirs(self.tmppath("dest/foo")) + dummy = self.tmppath("dummy") + os.mkdir(dummy) + link = self.tmppath("dest/foo/bar") + os.symlink(dummy, link) + + result = copier.copy(dest) + + st = os.lstat(link) + self.assertFalse(stat.S_ISLNK(st.st_mode)) + self.assertTrue(stat.S_ISDIR(st.st_mode)) + + self.assertEqual(self.all_files(dest), set(copier.paths())) + + self.assertEqual(result.removed_directories, set()) + self.assertEqual(len(result.updated_files), 1) + + def test_remove_unaccounted_directory_symlinks(self): + """Directory symlinks in destination that are not in the way are + deleted according to remove_unaccounted and + remove_all_directory_symlinks. + """ + if not self.symlink_supported: + return + + dest = self.tmppath("dest") + + copier = FileCopier() + copier.add("foo/bar/baz", GeneratedFile(b"foobarbaz")) + + os.makedirs(self.tmppath("dest/foo")) + dummy = self.tmppath("dummy") + os.mkdir(dummy) + + os.mkdir(self.tmppath("dest/zot")) + link = self.tmppath("dest/zot/zap") + os.symlink(dummy, link) + + # If not remove_unaccounted but remove_empty_directories, then + # the symlinked directory remains (as does its containing + # directory). + result = copier.copy( + dest, + remove_unaccounted=False, + remove_empty_directories=True, + remove_all_directory_symlinks=False, + ) + + st = os.lstat(link) + self.assertTrue(stat.S_ISLNK(st.st_mode)) + self.assertFalse(stat.S_ISDIR(st.st_mode)) + + self.assertEqual(self.all_files(dest), set(copier.paths())) + self.assertEqual(self.all_dirs(dest), set(["foo/bar"])) + + self.assertEqual(result.removed_directories, set()) + self.assertEqual(len(result.updated_files), 1) + + # If remove_unaccounted but not remove_empty_directories, then + # only the symlinked directory is removed. + result = copier.copy( + dest, + remove_unaccounted=True, + remove_empty_directories=False, + remove_all_directory_symlinks=False, + ) + + st = os.lstat(self.tmppath("dest/zot")) + self.assertFalse(stat.S_ISLNK(st.st_mode)) + self.assertTrue(stat.S_ISDIR(st.st_mode)) + + self.assertEqual(result.removed_files, set([link])) + self.assertEqual(result.removed_directories, set()) + + self.assertEqual(self.all_files(dest), set(copier.paths())) + self.assertEqual(self.all_dirs(dest), set(["foo/bar", "zot"])) + + # If remove_unaccounted and remove_empty_directories, then + # both the symlink and its containing directory are removed. + link = self.tmppath("dest/zot/zap") + os.symlink(dummy, link) + + result = copier.copy( + dest, + remove_unaccounted=True, + remove_empty_directories=True, + remove_all_directory_symlinks=False, + ) + + self.assertEqual(result.removed_files, set([link])) + self.assertEqual(result.removed_directories, set([self.tmppath("dest/zot")])) + + self.assertEqual(self.all_files(dest), set(copier.paths())) + self.assertEqual(self.all_dirs(dest), set(["foo/bar"])) + + def test_permissions(self): + """Ensure files without write permission can be deleted.""" + with open(self.tmppath("dummy"), "a"): + pass + + p = self.tmppath("no_perms") + with open(p, "a"): + pass + + # Make file and directory unwritable. Reminder: making a directory + # unwritable prevents modifications (including deletes) from the list + # of files in that directory. + os.chmod(p, 0o400) + os.chmod(self.tmpdir, 0o400) + + copier = FileCopier() + copier.add("dummy", GeneratedFile(b"content")) + result = copier.copy(self.tmpdir) + self.assertEqual(result.removed_files_count, 1) + self.assertFalse(os.path.exists(p)) + + def test_no_remove(self): + copier = FileCopier() + copier.add("foo", GeneratedFile(b"foo")) + + with open(self.tmppath("bar"), "a"): + pass + + os.mkdir(self.tmppath("emptydir")) + d = self.tmppath("populateddir") + os.mkdir(d) + + with open(self.tmppath("populateddir/foo"), "a"): + pass + + result = copier.copy(self.tmpdir, remove_unaccounted=False) + + self.assertEqual( + self.all_files(self.tmpdir), set(["foo", "bar", "populateddir/foo"]) + ) + self.assertEqual(self.all_dirs(self.tmpdir), set(["populateddir"])) + self.assertEqual(result.removed_files, set()) + self.assertEqual(result.removed_directories, set([self.tmppath("emptydir")])) + + def test_no_remove_empty_directories(self): + copier = FileCopier() + copier.add("foo", GeneratedFile(b"foo")) + + with open(self.tmppath("bar"), "a"): + pass + + os.mkdir(self.tmppath("emptydir")) + d = self.tmppath("populateddir") + os.mkdir(d) + + with open(self.tmppath("populateddir/foo"), "a"): + pass + + result = copier.copy( + self.tmpdir, remove_unaccounted=False, remove_empty_directories=False + ) + + self.assertEqual( + self.all_files(self.tmpdir), set(["foo", "bar", "populateddir/foo"]) + ) + self.assertEqual(self.all_dirs(self.tmpdir), set(["emptydir", "populateddir"])) + self.assertEqual(result.removed_files, set()) + self.assertEqual(result.removed_directories, set()) + + def test_optional_exists_creates_unneeded_directory(self): + """Demonstrate that a directory not strictly required, but specified + as the path to an optional file, will be unnecessarily created. + + This behaviour is wrong; fixing it is tracked by Bug 972432; + and this test exists to guard against unexpected changes in + behaviour. + """ + + dest = self.tmppath("dest") + + copier = FileCopier() + copier.add("foo/bar", ExistingFile(required=False)) + + result = copier.copy(dest) + + st = os.lstat(self.tmppath("dest/foo")) + self.assertFalse(stat.S_ISLNK(st.st_mode)) + self.assertTrue(stat.S_ISDIR(st.st_mode)) + + # What's worse, we have no record that dest was created. + self.assertEqual(len(result.updated_files), 0) + + # But we do have an erroneous record of an optional file + # existing when it does not. + self.assertIn(self.tmppath("dest/foo/bar"), result.existing_files) + + def test_remove_unaccounted_file_registry(self): + """Test FileCopier.copy(remove_unaccounted=FileRegistry())""" + + dest = self.tmppath("dest") + + copier = FileCopier() + copier.add("foo/bar/baz", GeneratedFile(b"foobarbaz")) + copier.add("foo/bar/qux", GeneratedFile(b"foobarqux")) + copier.add("foo/hoge/fuga", GeneratedFile(b"foohogefuga")) + copier.add("foo/toto/tata", GeneratedFile(b"footototata")) + + os.makedirs(os.path.join(dest, "bar")) + with open(os.path.join(dest, "bar", "bar"), "w") as fh: + fh.write("barbar") + os.makedirs(os.path.join(dest, "foo", "toto")) + with open(os.path.join(dest, "foo", "toto", "toto"), "w") as fh: + fh.write("foototototo") + + result = copier.copy(dest, remove_unaccounted=False) + + self.assertEqual( + self.all_files(dest), set(copier.paths()) | {"foo/toto/toto", "bar/bar"} + ) + self.assertEqual( + self.all_dirs(dest), {"foo/bar", "foo/hoge", "foo/toto", "bar"} + ) + + copier2 = FileCopier() + copier2.add("foo/hoge/fuga", GeneratedFile(b"foohogefuga")) + + # We expect only files copied from the first copier to be removed, + # not the extra file that was there beforehand. + result = copier2.copy(dest, remove_unaccounted=copier) + + self.assertEqual( + self.all_files(dest), set(copier2.paths()) | {"foo/toto/toto", "bar/bar"} + ) + self.assertEqual(self.all_dirs(dest), {"foo/hoge", "foo/toto", "bar"}) + self.assertEqual(result.updated_files, {self.tmppath("dest/foo/hoge/fuga")}) + self.assertEqual(result.existing_files, set()) + self.assertEqual( + result.removed_files, + { + self.tmppath(p) + for p in ("dest/foo/bar/baz", "dest/foo/bar/qux", "dest/foo/toto/tata") + }, + ) + self.assertEqual(result.removed_directories, {self.tmppath("dest/foo/bar")}) + + +class TestJarrer(unittest.TestCase): + def check_jar(self, dest, copier): + jar = JarReader(fileobj=dest) + self.assertEqual([f.filename for f in jar], copier.paths()) + for f in jar: + self.assertEqual(f.uncompressed_data.read(), copier[f.filename].content) + + def test_jarrer(self): + copier = Jarrer() + copier.add("foo/bar", GeneratedFile(b"foobar")) + copier.add("foo/qux", GeneratedFile(b"fooqux")) + copier.add("foo/deep/nested/directory/file", GeneratedFile(b"fooz")) + copier.add("bar", GeneratedFile(b"bar")) + copier.add("qux/foo", GeneratedFile(b"quxfoo")) + copier.add("qux/bar", GeneratedFile(b"")) + + dest = MockDest() + copier.copy(dest) + self.check_jar(dest, copier) + + copier.remove("foo") + copier.add("test", GeneratedFile(b"test")) + copier.copy(dest) + self.check_jar(dest, copier) + + copier.remove("test") + copier.add("test", GeneratedFile(b"replaced-content")) + copier.copy(dest) + self.check_jar(dest, copier) + + copier.copy(dest) + self.check_jar(dest, copier) + + preloaded = ["qux/bar", "bar"] + copier.preload(preloaded) + copier.copy(dest) + + dest.seek(0) + jar = JarReader(fileobj=dest) + self.assertEqual( + [f.filename for f in jar], + preloaded + [p for p in copier.paths() if p not in preloaded], + ) + self.assertEqual(jar.last_preloaded, preloaded[-1]) + + def test_jarrer_compress(self): + copier = Jarrer() + copier.add("foo/bar", GeneratedFile(b"ffffff")) + copier.add("foo/qux", GeneratedFile(b"ffffff"), compress=False) + + dest = MockDest() + copier.copy(dest) + self.check_jar(dest, copier) + + dest.seek(0) + jar = JarReader(fileobj=dest) + self.assertTrue(jar["foo/bar"].compressed) + self.assertFalse(jar["foo/qux"].compressed) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_errors.py b/python/mozbuild/mozpack/test/test_errors.py new file mode 100644 index 0000000000..411b1b54c3 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_errors.py @@ -0,0 +1,95 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import sys +import unittest + +import mozunit +import six + +from mozpack.errors import AccumulatedErrors, ErrorMessage, errors + + +class TestErrors(object): + def setUp(self): + errors.out = six.moves.cStringIO() + errors.ignore_errors(False) + + def tearDown(self): + errors.out = sys.stderr + + def get_output(self): + return [l.strip() for l in errors.out.getvalue().splitlines()] + + +class TestErrorsImpl(TestErrors, unittest.TestCase): + def test_plain_error(self): + errors.warn("foo") + self.assertRaises(ErrorMessage, errors.error, "foo") + self.assertRaises(ErrorMessage, errors.fatal, "foo") + self.assertEqual(self.get_output(), ["warning: foo"]) + + def test_ignore_errors(self): + errors.ignore_errors() + errors.warn("foo") + errors.error("bar") + self.assertRaises(ErrorMessage, errors.fatal, "foo") + self.assertEqual(self.get_output(), ["warning: foo", "warning: bar"]) + + def test_no_error(self): + with errors.accumulate(): + errors.warn("1") + + def test_simple_error(self): + with self.assertRaises(AccumulatedErrors): + with errors.accumulate(): + errors.error("1") + self.assertEqual(self.get_output(), ["error: 1"]) + + def test_error_loop(self): + with self.assertRaises(AccumulatedErrors): + with errors.accumulate(): + for i in range(3): + errors.error("%d" % i) + self.assertEqual(self.get_output(), ["error: 0", "error: 1", "error: 2"]) + + def test_multiple_errors(self): + with self.assertRaises(AccumulatedErrors): + with errors.accumulate(): + errors.error("foo") + for i in range(3): + if i == 2: + errors.warn("%d" % i) + else: + errors.error("%d" % i) + errors.error("bar") + self.assertEqual( + self.get_output(), + ["error: foo", "error: 0", "error: 1", "warning: 2", "error: bar"], + ) + + def test_errors_context(self): + with self.assertRaises(AccumulatedErrors): + with errors.accumulate(): + self.assertEqual(errors.get_context(), None) + with errors.context("foo", 1): + self.assertEqual(errors.get_context(), ("foo", 1)) + errors.error("a") + with errors.context("bar", 2): + self.assertEqual(errors.get_context(), ("bar", 2)) + errors.error("b") + self.assertEqual(errors.get_context(), ("foo", 1)) + errors.error("c") + self.assertEqual( + self.get_output(), + [ + "error: foo:1: a", + "error: bar:2: b", + "error: foo:1: c", + ], + ) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_files.py b/python/mozbuild/mozpack/test/test_files.py new file mode 100644 index 0000000000..1c86f2e0cc --- /dev/null +++ b/python/mozbuild/mozpack/test/test_files.py @@ -0,0 +1,1362 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from mozbuild.util import ensure_bytes, ensureParentDir +from mozpack.errors import ErrorMessage, errors +from mozpack.files import ( + AbsoluteSymlinkFile, + ComposedFinder, + DeflatedFile, + Dest, + ExistingFile, + ExtractedTarFile, + File, + FileFinder, + GeneratedFile, + HardlinkFile, + JarFinder, + ManifestFile, + MercurialFile, + MercurialRevisionFinder, + MinifiedCommentStripped, + MinifiedJavaScript, + PreprocessedFile, + TarFinder, +) + +# We don't have hglib installed everywhere. +try: + import hglib +except ImportError: + hglib = None + +import os +import platform +import random +import sys +import tarfile +import unittest +from io import BytesIO +from tempfile import mkdtemp + +import mozfile +import mozunit +import six + +import mozpack.path as mozpath +from mozpack.chrome.manifest import ( + ManifestContent, + ManifestLocale, + ManifestOverride, + ManifestResource, +) +from mozpack.mozjar import JarReader, JarWriter + + +class TestWithTmpDir(unittest.TestCase): + def setUp(self): + self.tmpdir = mkdtemp() + + self.symlink_supported = False + self.hardlink_supported = False + + # See comment in mozpack.files.AbsoluteSymlinkFile + if hasattr(os, "symlink") and platform.system() != "Windows": + dummy_path = self.tmppath("dummy_file") + with open(dummy_path, "a"): + pass + + try: + os.symlink(dummy_path, self.tmppath("dummy_symlink")) + os.remove(self.tmppath("dummy_symlink")) + except EnvironmentError: + pass + finally: + os.remove(dummy_path) + + self.symlink_supported = True + + if hasattr(os, "link"): + dummy_path = self.tmppath("dummy_file") + with open(dummy_path, "a"): + pass + + try: + os.link(dummy_path, self.tmppath("dummy_hardlink")) + os.remove(self.tmppath("dummy_hardlink")) + except EnvironmentError: + pass + finally: + os.remove(dummy_path) + + self.hardlink_supported = True + + def tearDown(self): + mozfile.rmtree(self.tmpdir) + + def tmppath(self, relpath): + return os.path.normpath(os.path.join(self.tmpdir, relpath)) + + +class MockDest(BytesIO, Dest): + def __init__(self): + BytesIO.__init__(self) + self.mode = None + + def read(self, length=-1): + if self.mode != "r": + self.seek(0) + self.mode = "r" + return BytesIO.read(self, length) + + def write(self, data): + if self.mode != "w": + self.seek(0) + self.truncate(0) + self.mode = "w" + return BytesIO.write(self, data) + + def exists(self): + return True + + def close(self): + if self.mode: + self.mode = None + + +class DestNoWrite(Dest): + def write(self, data): + raise RuntimeError + + +class TestDest(TestWithTmpDir): + def test_dest(self): + dest = Dest(self.tmppath("dest")) + self.assertFalse(dest.exists()) + dest.write(b"foo") + self.assertTrue(dest.exists()) + dest.write(b"foo") + self.assertEqual(dest.read(4), b"foof") + self.assertEqual(dest.read(), b"oo") + self.assertEqual(dest.read(), b"") + dest.write(b"bar") + self.assertEqual(dest.read(4), b"bar") + dest.close() + self.assertEqual(dest.read(), b"bar") + dest.write(b"foo") + dest.close() + dest.write(b"qux") + self.assertEqual(dest.read(), b"qux") + + +rand = bytes( + random.choice(b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ") + for i in six.moves.xrange(131597) +) +samples = [ + b"", + b"test", + b"fooo", + b"same", + b"same", + b"Different and longer", + rand, + rand, + rand[:-1] + b"_", + b"test", +] + + +class TestFile(TestWithTmpDir): + def test_file(self): + """ + Check that File.copy yields the proper content in the destination file + in all situations that trigger different code paths: + - different content + - different content of the same size + - same content + - long content + """ + src = self.tmppath("src") + dest = self.tmppath("dest") + + for content in samples: + with open(src, "wb") as tmp: + tmp.write(content) + # Ensure the destination file, when it exists, is older than the + # source + if os.path.exists(dest): + time = os.path.getmtime(src) - 1 + os.utime(dest, (time, time)) + f = File(src) + f.copy(dest) + self.assertEqual(content, open(dest, "rb").read()) + self.assertEqual(content, f.open().read()) + self.assertEqual(content, f.open().read()) + + def test_file_dest(self): + """ + Similar to test_file, but for a destination object instead of + a destination file. This ensures the destination object is being + used properly by File.copy, ensuring that other subclasses of Dest + will work. + """ + src = self.tmppath("src") + dest = MockDest() + + for content in samples: + with open(src, "wb") as tmp: + tmp.write(content) + f = File(src) + f.copy(dest) + self.assertEqual(content, dest.getvalue()) + + def test_file_open(self): + """ + Test whether File.open returns an appropriately reset file object. + """ + src = self.tmppath("src") + content = b"".join(samples) + with open(src, "wb") as tmp: + tmp.write(content) + + f = File(src) + self.assertEqual(content[:42], f.open().read(42)) + self.assertEqual(content, f.open().read()) + + def test_file_no_write(self): + """ + Test various conditions where File.copy is expected not to write + in the destination file. + """ + src = self.tmppath("src") + dest = self.tmppath("dest") + + with open(src, "wb") as tmp: + tmp.write(b"test") + + # Initial copy + f = File(src) + f.copy(dest) + + # Ensure subsequent copies won't trigger writes + f.copy(DestNoWrite(dest)) + self.assertEqual(b"test", open(dest, "rb").read()) + + # When the source file is newer, but with the same content, no copy + # should occur + time = os.path.getmtime(src) - 1 + os.utime(dest, (time, time)) + f.copy(DestNoWrite(dest)) + self.assertEqual(b"test", open(dest, "rb").read()) + + # When the source file is older than the destination file, even with + # different content, no copy should occur. + with open(src, "wb") as tmp: + tmp.write(b"fooo") + time = os.path.getmtime(dest) - 1 + os.utime(src, (time, time)) + f.copy(DestNoWrite(dest)) + self.assertEqual(b"test", open(dest, "rb").read()) + + # Double check that under conditions where a copy occurs, we would get + # an exception. + time = os.path.getmtime(src) - 1 + os.utime(dest, (time, time)) + self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest)) + + # skip_if_older=False is expected to force a copy in this situation. + f.copy(dest, skip_if_older=False) + self.assertEqual(b"fooo", open(dest, "rb").read()) + + +class TestAbsoluteSymlinkFile(TestWithTmpDir): + def test_absolute_relative(self): + AbsoluteSymlinkFile("/foo") + + with self.assertRaisesRegexp(ValueError, "Symlink target not absolute"): + AbsoluteSymlinkFile("./foo") + + def test_symlink_file(self): + source = self.tmppath("test_path") + with open(source, "wt") as fh: + fh.write("Hello world") + + s = AbsoluteSymlinkFile(source) + dest = self.tmppath("symlink") + self.assertTrue(s.copy(dest)) + + if self.symlink_supported: + self.assertTrue(os.path.islink(dest)) + link = os.readlink(dest) + self.assertEqual(link, source) + else: + self.assertTrue(os.path.isfile(dest)) + content = open(dest).read() + self.assertEqual(content, "Hello world") + + def test_replace_file_with_symlink(self): + # If symlinks are supported, an existing file should be replaced by a + # symlink. + source = self.tmppath("test_path") + with open(source, "wt") as fh: + fh.write("source") + + dest = self.tmppath("dest") + with open(dest, "a"): + pass + + s = AbsoluteSymlinkFile(source) + s.copy(dest, skip_if_older=False) + + if self.symlink_supported: + self.assertTrue(os.path.islink(dest)) + link = os.readlink(dest) + self.assertEqual(link, source) + else: + self.assertTrue(os.path.isfile(dest)) + content = open(dest).read() + self.assertEqual(content, "source") + + def test_replace_symlink(self): + if not self.symlink_supported: + return + + source = self.tmppath("source") + with open(source, "a"): + pass + + dest = self.tmppath("dest") + + os.symlink(self.tmppath("bad"), dest) + self.assertTrue(os.path.islink(dest)) + + s = AbsoluteSymlinkFile(source) + self.assertTrue(s.copy(dest)) + + self.assertTrue(os.path.islink(dest)) + link = os.readlink(dest) + self.assertEqual(link, source) + + def test_noop(self): + if not hasattr(os, "symlink") or sys.platform == "win32": + return + + source = self.tmppath("source") + dest = self.tmppath("dest") + + with open(source, "a"): + pass + + os.symlink(source, dest) + link = os.readlink(dest) + self.assertEqual(link, source) + + s = AbsoluteSymlinkFile(source) + self.assertFalse(s.copy(dest)) + + link = os.readlink(dest) + self.assertEqual(link, source) + + +class TestHardlinkFile(TestWithTmpDir): + def test_absolute_relative(self): + HardlinkFile("/foo") + HardlinkFile("./foo") + + def test_hardlink_file(self): + source = self.tmppath("test_path") + with open(source, "wt") as fh: + fh.write("Hello world") + + s = HardlinkFile(source) + dest = self.tmppath("hardlink") + self.assertTrue(s.copy(dest)) + + if self.hardlink_supported: + source_stat = os.stat(source) + dest_stat = os.stat(dest) + self.assertEqual(source_stat.st_dev, dest_stat.st_dev) + self.assertEqual(source_stat.st_ino, dest_stat.st_ino) + else: + self.assertTrue(os.path.isfile(dest)) + with open(dest) as f: + content = f.read() + self.assertEqual(content, "Hello world") + + def test_replace_file_with_hardlink(self): + # If hardlink are supported, an existing file should be replaced by a + # symlink. + source = self.tmppath("test_path") + with open(source, "wt") as fh: + fh.write("source") + + dest = self.tmppath("dest") + with open(dest, "a"): + pass + + s = HardlinkFile(source) + s.copy(dest, skip_if_older=False) + + if self.hardlink_supported: + source_stat = os.stat(source) + dest_stat = os.stat(dest) + self.assertEqual(source_stat.st_dev, dest_stat.st_dev) + self.assertEqual(source_stat.st_ino, dest_stat.st_ino) + else: + self.assertTrue(os.path.isfile(dest)) + with open(dest) as f: + content = f.read() + self.assertEqual(content, "source") + + def test_replace_hardlink(self): + if not self.hardlink_supported: + raise unittest.SkipTest("hardlink not supported") + + source = self.tmppath("source") + with open(source, "a"): + pass + + dest = self.tmppath("dest") + + os.link(source, dest) + + s = HardlinkFile(source) + self.assertFalse(s.copy(dest)) + + source_stat = os.lstat(source) + dest_stat = os.lstat(dest) + self.assertEqual(source_stat.st_dev, dest_stat.st_dev) + self.assertEqual(source_stat.st_ino, dest_stat.st_ino) + + def test_noop(self): + if not self.hardlink_supported: + raise unittest.SkipTest("hardlink not supported") + + source = self.tmppath("source") + dest = self.tmppath("dest") + + with open(source, "a"): + pass + + os.link(source, dest) + + s = HardlinkFile(source) + self.assertFalse(s.copy(dest)) + + source_stat = os.lstat(source) + dest_stat = os.lstat(dest) + self.assertEqual(source_stat.st_dev, dest_stat.st_dev) + self.assertEqual(source_stat.st_ino, dest_stat.st_ino) + + +class TestPreprocessedFile(TestWithTmpDir): + def test_preprocess(self): + """ + Test that copying the file invokes the preprocessor + """ + src = self.tmppath("src") + dest = self.tmppath("dest") + + with open(src, "wb") as tmp: + tmp.write(b"#ifdef FOO\ntest\n#endif") + + f = PreprocessedFile(src, depfile_path=None, marker="#", defines={"FOO": True}) + self.assertTrue(f.copy(dest)) + + self.assertEqual(b"test\n", open(dest, "rb").read()) + + def test_preprocess_file_no_write(self): + """ + Test various conditions where PreprocessedFile.copy is expected not to + write in the destination file. + """ + src = self.tmppath("src") + dest = self.tmppath("dest") + depfile = self.tmppath("depfile") + + with open(src, "wb") as tmp: + tmp.write(b"#ifdef FOO\ntest\n#endif") + + # Initial copy + f = PreprocessedFile( + src, depfile_path=depfile, marker="#", defines={"FOO": True} + ) + self.assertTrue(f.copy(dest)) + + # Ensure subsequent copies won't trigger writes + self.assertFalse(f.copy(DestNoWrite(dest))) + self.assertEqual(b"test\n", open(dest, "rb").read()) + + # When the source file is older than the destination file, even with + # different content, no copy should occur. + with open(src, "wb") as tmp: + tmp.write(b"#ifdef FOO\nfooo\n#endif") + time = os.path.getmtime(dest) - 1 + os.utime(src, (time, time)) + self.assertFalse(f.copy(DestNoWrite(dest))) + self.assertEqual(b"test\n", open(dest, "rb").read()) + + # skip_if_older=False is expected to force a copy in this situation. + self.assertTrue(f.copy(dest, skip_if_older=False)) + self.assertEqual(b"fooo\n", open(dest, "rb").read()) + + def test_preprocess_file_dependencies(self): + """ + Test that the preprocess runs if the dependencies of the source change + """ + src = self.tmppath("src") + dest = self.tmppath("dest") + incl = self.tmppath("incl") + deps = self.tmppath("src.pp") + + with open(src, "wb") as tmp: + tmp.write(b"#ifdef FOO\ntest\n#endif") + + with open(incl, "wb") as tmp: + tmp.write(b"foo bar") + + # Initial copy + f = PreprocessedFile(src, depfile_path=deps, marker="#", defines={"FOO": True}) + self.assertTrue(f.copy(dest)) + + # Update the source so it #includes the include file. + with open(src, "wb") as tmp: + tmp.write(b"#include incl\n") + time = os.path.getmtime(dest) + 1 + os.utime(src, (time, time)) + self.assertTrue(f.copy(dest)) + self.assertEqual(b"foo bar", open(dest, "rb").read()) + + # If one of the dependencies changes, the file should be updated. The + # mtime of the dependency is set after the destination file, to avoid + # both files having the same time. + with open(incl, "wb") as tmp: + tmp.write(b"quux") + time = os.path.getmtime(dest) + 1 + os.utime(incl, (time, time)) + self.assertTrue(f.copy(dest)) + self.assertEqual(b"quux", open(dest, "rb").read()) + + # Perform one final copy to confirm that we don't run the preprocessor + # again. We update the mtime of the destination so it's newer than the + # input files. This would "just work" if we weren't changing + time = os.path.getmtime(incl) + 1 + os.utime(dest, (time, time)) + self.assertFalse(f.copy(DestNoWrite(dest))) + + def test_replace_symlink(self): + """ + Test that if the destination exists, and is a symlink, the target of + the symlink is not overwritten by the preprocessor output. + """ + if not self.symlink_supported: + return + + source = self.tmppath("source") + dest = self.tmppath("dest") + pp_source = self.tmppath("pp_in") + deps = self.tmppath("deps") + + with open(source, "a"): + pass + + os.symlink(source, dest) + self.assertTrue(os.path.islink(dest)) + + with open(pp_source, "wb") as tmp: + tmp.write(b"#define FOO\nPREPROCESSED") + + f = PreprocessedFile( + pp_source, depfile_path=deps, marker="#", defines={"FOO": True} + ) + self.assertTrue(f.copy(dest)) + + self.assertEqual(b"PREPROCESSED", open(dest, "rb").read()) + self.assertFalse(os.path.islink(dest)) + self.assertEqual(b"", open(source, "rb").read()) + + +class TestExistingFile(TestWithTmpDir): + def test_required_missing_dest(self): + with self.assertRaisesRegexp(ErrorMessage, "Required existing file"): + f = ExistingFile(required=True) + f.copy(self.tmppath("dest")) + + def test_required_existing_dest(self): + p = self.tmppath("dest") + with open(p, "a"): + pass + + f = ExistingFile(required=True) + f.copy(p) + + def test_optional_missing_dest(self): + f = ExistingFile(required=False) + f.copy(self.tmppath("dest")) + + def test_optional_existing_dest(self): + p = self.tmppath("dest") + with open(p, "a"): + pass + + f = ExistingFile(required=False) + f.copy(p) + + +class TestGeneratedFile(TestWithTmpDir): + def test_generated_file(self): + """ + Check that GeneratedFile.copy yields the proper content in the + destination file in all situations that trigger different code paths + (see TestFile.test_file) + """ + dest = self.tmppath("dest") + + for content in samples: + f = GeneratedFile(content) + f.copy(dest) + self.assertEqual(content, open(dest, "rb").read()) + + def test_generated_file_open(self): + """ + Test whether GeneratedFile.open returns an appropriately reset file + object. + """ + content = b"".join(samples) + f = GeneratedFile(content) + self.assertEqual(content[:42], f.open().read(42)) + self.assertEqual(content, f.open().read()) + + def test_generated_file_no_write(self): + """ + Test various conditions where GeneratedFile.copy is expected not to + write in the destination file. + """ + dest = self.tmppath("dest") + + # Initial copy + f = GeneratedFile(b"test") + f.copy(dest) + + # Ensure subsequent copies won't trigger writes + f.copy(DestNoWrite(dest)) + self.assertEqual(b"test", open(dest, "rb").read()) + + # When using a new instance with the same content, no copy should occur + f = GeneratedFile(b"test") + f.copy(DestNoWrite(dest)) + self.assertEqual(b"test", open(dest, "rb").read()) + + # Double check that under conditions where a copy occurs, we would get + # an exception. + f = GeneratedFile(b"fooo") + self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest)) + + def test_generated_file_function(self): + """ + Test GeneratedFile behavior with functions. + """ + dest = self.tmppath("dest") + data = { + "num_calls": 0, + } + + def content(): + data["num_calls"] += 1 + return b"content" + + f = GeneratedFile(content) + self.assertEqual(data["num_calls"], 0) + f.copy(dest) + self.assertEqual(data["num_calls"], 1) + self.assertEqual(b"content", open(dest, "rb").read()) + self.assertEqual(b"content", f.open().read()) + self.assertEqual(b"content", f.read()) + self.assertEqual(len(b"content"), f.size()) + self.assertEqual(data["num_calls"], 1) + + f.content = b"modified" + f.copy(dest) + self.assertEqual(data["num_calls"], 1) + self.assertEqual(b"modified", open(dest, "rb").read()) + self.assertEqual(b"modified", f.open().read()) + self.assertEqual(b"modified", f.read()) + self.assertEqual(len(b"modified"), f.size()) + + f.content = content + self.assertEqual(data["num_calls"], 1) + self.assertEqual(b"content", f.read()) + self.assertEqual(data["num_calls"], 2) + + +class TestDeflatedFile(TestWithTmpDir): + def test_deflated_file(self): + """ + Check that DeflatedFile.copy yields the proper content in the + destination file in all situations that trigger different code paths + (see TestFile.test_file) + """ + src = self.tmppath("src.jar") + dest = self.tmppath("dest") + + contents = {} + with JarWriter(src) as jar: + for content in samples: + name = "".join( + random.choice( + "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" + ) + for i in range(8) + ) + jar.add(name, content, compress=True) + contents[name] = content + + for j in JarReader(src): + f = DeflatedFile(j) + f.copy(dest) + self.assertEqual(contents[j.filename], open(dest, "rb").read()) + + def test_deflated_file_open(self): + """ + Test whether DeflatedFile.open returns an appropriately reset file + object. + """ + src = self.tmppath("src.jar") + content = b"".join(samples) + with JarWriter(src) as jar: + jar.add("content", content) + + f = DeflatedFile(JarReader(src)["content"]) + self.assertEqual(content[:42], f.open().read(42)) + self.assertEqual(content, f.open().read()) + + def test_deflated_file_no_write(self): + """ + Test various conditions where DeflatedFile.copy is expected not to + write in the destination file. + """ + src = self.tmppath("src.jar") + dest = self.tmppath("dest") + + with JarWriter(src) as jar: + jar.add("test", b"test") + jar.add("test2", b"test") + jar.add("fooo", b"fooo") + + jar = JarReader(src) + # Initial copy + f = DeflatedFile(jar["test"]) + f.copy(dest) + + # Ensure subsequent copies won't trigger writes + f.copy(DestNoWrite(dest)) + self.assertEqual(b"test", open(dest, "rb").read()) + + # When using a different file with the same content, no copy should + # occur + f = DeflatedFile(jar["test2"]) + f.copy(DestNoWrite(dest)) + self.assertEqual(b"test", open(dest, "rb").read()) + + # Double check that under conditions where a copy occurs, we would get + # an exception. + f = DeflatedFile(jar["fooo"]) + self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest)) + + +class TestManifestFile(TestWithTmpDir): + def test_manifest_file(self): + f = ManifestFile("chrome") + f.add(ManifestContent("chrome", "global", "toolkit/content/global/")) + f.add(ManifestResource("chrome", "gre-resources", "toolkit/res/")) + f.add(ManifestResource("chrome/pdfjs", "pdfjs", "./")) + f.add(ManifestContent("chrome/pdfjs", "pdfjs", "pdfjs")) + f.add(ManifestLocale("chrome", "browser", "en-US", "en-US/locale/browser/")) + + f.copy(self.tmppath("chrome.manifest")) + self.assertEqual( + open(self.tmppath("chrome.manifest")).readlines(), + [ + "content global toolkit/content/global/\n", + "resource gre-resources toolkit/res/\n", + "resource pdfjs pdfjs/\n", + "content pdfjs pdfjs/pdfjs\n", + "locale browser en-US en-US/locale/browser/\n", + ], + ) + + self.assertRaises( + ValueError, + f.remove, + ManifestContent("", "global", "toolkit/content/global/"), + ) + self.assertRaises( + ValueError, + f.remove, + ManifestOverride( + "chrome", + "chrome://global/locale/netError.dtd", + "chrome://browser/locale/netError.dtd", + ), + ) + + f.remove(ManifestContent("chrome", "global", "toolkit/content/global/")) + self.assertRaises( + ValueError, + f.remove, + ManifestContent("chrome", "global", "toolkit/content/global/"), + ) + + f.copy(self.tmppath("chrome.manifest")) + content = open(self.tmppath("chrome.manifest"), "rb").read() + self.assertEqual(content[:42], f.open().read(42)) + self.assertEqual(content, f.open().read()) + + +# Compiled typelib for the following IDL: +# interface foo; +# [scriptable, uuid(5f70da76-519c-4858-b71e-e3c92333e2d6)] +# interface bar { +# void bar(in foo f); +# }; +# We need to make this [scriptable] so it doesn't get deleted from the +# typelib. We don't need to make the foo interfaces below [scriptable], +# because they will be automatically included by virtue of being an +# argument to a method of |bar|. +bar_xpt = GeneratedFile( + b"\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A" + + b"\x01\x02\x00\x02\x00\x00\x00\x7B\x00\x00\x00\x24\x00\x00\x00\x5C" + + b"\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + + b"\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x5F" + + b"\x70\xDA\x76\x51\x9C\x48\x58\xB7\x1E\xE3\xC9\x23\x33\xE2\xD6\x00" + + b"\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x0D\x00\x66\x6F\x6F\x00" + + b"\x62\x61\x72\x00\x62\x61\x72\x00\x00\x00\x00\x01\x00\x00\x00\x00" + + b"\x09\x01\x80\x92\x00\x01\x80\x06\x00\x00\x80" +) + +# Compiled typelib for the following IDL: +# [uuid(3271bebc-927e-4bef-935e-44e0aaf3c1e5)] +# interface foo { +# void foo(); +# }; +foo_xpt = GeneratedFile( + b"\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A" + + b"\x01\x02\x00\x01\x00\x00\x00\x57\x00\x00\x00\x24\x00\x00\x00\x40" + + b"\x80\x00\x00\x32\x71\xBE\xBC\x92\x7E\x4B\xEF\x93\x5E\x44\xE0\xAA" + + b"\xF3\xC1\xE5\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x09\x00" + + b"\x66\x6F\x6F\x00\x66\x6F\x6F\x00\x00\x00\x00\x01\x00\x00\x00\x00" + + b"\x05\x00\x80\x06\x00\x00\x00" +) + +# Compiled typelib for the following IDL: +# [uuid(7057f2aa-fdc2-4559-abde-08d939f7e80d)] +# interface foo { +# void foo(); +# }; +foo2_xpt = GeneratedFile( + b"\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A" + + b"\x01\x02\x00\x01\x00\x00\x00\x57\x00\x00\x00\x24\x00\x00\x00\x40" + + b"\x80\x00\x00\x70\x57\xF2\xAA\xFD\xC2\x45\x59\xAB\xDE\x08\xD9\x39" + + b"\xF7\xE8\x0D\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x09\x00" + + b"\x66\x6F\x6F\x00\x66\x6F\x6F\x00\x00\x00\x00\x01\x00\x00\x00\x00" + + b"\x05\x00\x80\x06\x00\x00\x00" +) + + +class TestMinifiedCommentStripped(TestWithTmpDir): + def test_minified_comment_stripped(self): + propLines = [ + "# Comments are removed", + "foo = bar", + "", + "# Another comment", + ] + prop = GeneratedFile("\n".join(propLines)) + self.assertEqual( + MinifiedCommentStripped(prop).open().readlines(), [b"foo = bar\n", b"\n"] + ) + open(self.tmppath("prop"), "w").write("\n".join(propLines)) + MinifiedCommentStripped(File(self.tmppath("prop"))).copy(self.tmppath("prop2")) + self.assertEqual(open(self.tmppath("prop2")).readlines(), ["foo = bar\n", "\n"]) + + +class TestMinifiedJavaScript(TestWithTmpDir): + orig_lines = [ + "// Comment line", + 'let foo = "bar";', + "var bar = true;", + "", + "// Another comment", + ] + + def test_minified_javascript(self): + orig_f = GeneratedFile("\n".join(self.orig_lines)) + min_f = MinifiedJavaScript(orig_f) + + mini_lines = min_f.open().readlines() + self.assertTrue(mini_lines) + self.assertTrue(len(mini_lines) < len(self.orig_lines)) + + def _verify_command(self, code): + our_dir = os.path.abspath(os.path.dirname(__file__)) + return [ + sys.executable, + os.path.join(our_dir, "support", "minify_js_verify.py"), + code, + ] + + def test_minified_verify_success(self): + orig_f = GeneratedFile("\n".join(self.orig_lines)) + min_f = MinifiedJavaScript(orig_f, verify_command=self._verify_command("0")) + + mini_lines = [six.ensure_text(s) for s in min_f.open().readlines()] + self.assertTrue(mini_lines) + self.assertTrue(len(mini_lines) < len(self.orig_lines)) + + def test_minified_verify_failure(self): + orig_f = GeneratedFile("\n".join(self.orig_lines)) + errors.out = six.StringIO() + min_f = MinifiedJavaScript(orig_f, verify_command=self._verify_command("1")) + + mini_lines = min_f.open().readlines() + output = errors.out.getvalue() + errors.out = sys.stderr + self.assertEqual( + output, + "warning: JS minification verification failed for :\n" + "warning: Error message\n", + ) + self.assertEqual(mini_lines, orig_f.open().readlines()) + + +class MatchTestTemplate(object): + def prepare_match_test(self, with_dotfiles=False): + self.add("bar") + self.add("foo/bar") + self.add("foo/baz") + self.add("foo/qux/1") + self.add("foo/qux/bar") + self.add("foo/qux/2/test") + self.add("foo/qux/2/test2") + if with_dotfiles: + self.add("foo/.foo") + self.add("foo/.bar/foo") + + def do_match_test(self): + self.do_check( + "", + [ + "bar", + "foo/bar", + "foo/baz", + "foo/qux/1", + "foo/qux/bar", + "foo/qux/2/test", + "foo/qux/2/test2", + ], + ) + self.do_check( + "*", + [ + "bar", + "foo/bar", + "foo/baz", + "foo/qux/1", + "foo/qux/bar", + "foo/qux/2/test", + "foo/qux/2/test2", + ], + ) + self.do_check( + "foo/qux", ["foo/qux/1", "foo/qux/bar", "foo/qux/2/test", "foo/qux/2/test2"] + ) + self.do_check("foo/b*", ["foo/bar", "foo/baz"]) + self.do_check("baz", []) + self.do_check("foo/foo", []) + self.do_check("foo/*ar", ["foo/bar"]) + self.do_check("*ar", ["bar"]) + self.do_check("*/bar", ["foo/bar"]) + self.do_check( + "foo/*ux", ["foo/qux/1", "foo/qux/bar", "foo/qux/2/test", "foo/qux/2/test2"] + ) + self.do_check( + "foo/q*ux", + ["foo/qux/1", "foo/qux/bar", "foo/qux/2/test", "foo/qux/2/test2"], + ) + self.do_check("foo/*/2/test*", ["foo/qux/2/test", "foo/qux/2/test2"]) + self.do_check("**/bar", ["bar", "foo/bar", "foo/qux/bar"]) + self.do_check("foo/**/test", ["foo/qux/2/test"]) + self.do_check( + "foo", + [ + "foo/bar", + "foo/baz", + "foo/qux/1", + "foo/qux/bar", + "foo/qux/2/test", + "foo/qux/2/test2", + ], + ) + self.do_check( + "foo/**", + [ + "foo/bar", + "foo/baz", + "foo/qux/1", + "foo/qux/bar", + "foo/qux/2/test", + "foo/qux/2/test2", + ], + ) + self.do_check("**/2/test*", ["foo/qux/2/test", "foo/qux/2/test2"]) + self.do_check( + "**/foo", + [ + "foo/bar", + "foo/baz", + "foo/qux/1", + "foo/qux/bar", + "foo/qux/2/test", + "foo/qux/2/test2", + ], + ) + self.do_check("**/barbaz", []) + self.do_check("f**/bar", ["foo/bar"]) + + def do_finder_test(self, finder): + self.assertTrue(finder.contains("foo/.foo")) + self.assertTrue(finder.contains("foo/.bar")) + self.assertTrue("foo/.foo" in [f for f, c in finder.find("foo/.foo")]) + self.assertTrue("foo/.bar/foo" in [f for f, c in finder.find("foo/.bar")]) + self.assertEqual( + sorted([f for f, c in finder.find("foo/.*")]), ["foo/.bar/foo", "foo/.foo"] + ) + for pattern in ["foo", "**", "**/*", "**/foo", "foo/*"]: + self.assertFalse("foo/.foo" in [f for f, c in finder.find(pattern)]) + self.assertFalse("foo/.bar/foo" in [f for f, c in finder.find(pattern)]) + self.assertEqual( + sorted([f for f, c in finder.find(pattern)]), + sorted([f for f, c in finder if mozpath.match(f, pattern)]), + ) + + +def do_check(test, finder, pattern, result): + if result: + test.assertTrue(finder.contains(pattern)) + else: + test.assertFalse(finder.contains(pattern)) + test.assertEqual(sorted(list(f for f, c in finder.find(pattern))), sorted(result)) + + +class TestFileFinder(MatchTestTemplate, TestWithTmpDir): + def add(self, path): + ensureParentDir(self.tmppath(path)) + open(self.tmppath(path), "wb").write(six.ensure_binary(path)) + + def do_check(self, pattern, result): + do_check(self, self.finder, pattern, result) + + def test_file_finder(self): + self.prepare_match_test(with_dotfiles=True) + self.finder = FileFinder(self.tmpdir) + self.do_match_test() + self.do_finder_test(self.finder) + + def test_get(self): + self.prepare_match_test() + finder = FileFinder(self.tmpdir) + + self.assertIsNone(finder.get("does-not-exist")) + res = finder.get("bar") + self.assertIsInstance(res, File) + self.assertEqual(mozpath.normpath(res.path), mozpath.join(self.tmpdir, "bar")) + + def test_ignored_dirs(self): + """Ignored directories should not have results returned.""" + self.prepare_match_test() + self.add("fooz") + + # Present to ensure prefix matching doesn't exclude. + self.add("foo/quxz") + + self.finder = FileFinder(self.tmpdir, ignore=["foo/qux"]) + + self.do_check("**", ["bar", "foo/bar", "foo/baz", "foo/quxz", "fooz"]) + self.do_check("foo/*", ["foo/bar", "foo/baz", "foo/quxz"]) + self.do_check("foo/**", ["foo/bar", "foo/baz", "foo/quxz"]) + self.do_check("foo/qux/**", []) + self.do_check("foo/qux/*", []) + self.do_check("foo/qux/bar", []) + self.do_check("foo/quxz", ["foo/quxz"]) + self.do_check("fooz", ["fooz"]) + + def test_ignored_files(self): + """Ignored files should not have results returned.""" + self.prepare_match_test() + + # Be sure prefix match doesn't get ignored. + self.add("barz") + + self.finder = FileFinder(self.tmpdir, ignore=["foo/bar", "bar"]) + self.do_check( + "**", + [ + "barz", + "foo/baz", + "foo/qux/1", + "foo/qux/2/test", + "foo/qux/2/test2", + "foo/qux/bar", + ], + ) + self.do_check( + "foo/**", + [ + "foo/baz", + "foo/qux/1", + "foo/qux/2/test", + "foo/qux/2/test2", + "foo/qux/bar", + ], + ) + + def test_ignored_patterns(self): + """Ignore entries with patterns should be honored.""" + self.prepare_match_test() + + self.add("foo/quxz") + + self.finder = FileFinder(self.tmpdir, ignore=["foo/qux/*"]) + self.do_check("**", ["foo/bar", "foo/baz", "foo/quxz", "bar"]) + self.do_check("foo/**", ["foo/bar", "foo/baz", "foo/quxz"]) + + def test_dotfiles(self): + """Finder can find files beginning with . is configured.""" + self.prepare_match_test(with_dotfiles=True) + self.finder = FileFinder(self.tmpdir, find_dotfiles=True) + self.do_check( + "**", + [ + "bar", + "foo/.foo", + "foo/.bar/foo", + "foo/bar", + "foo/baz", + "foo/qux/1", + "foo/qux/bar", + "foo/qux/2/test", + "foo/qux/2/test2", + ], + ) + + def test_dotfiles_plus_ignore(self): + self.prepare_match_test(with_dotfiles=True) + self.finder = FileFinder( + self.tmpdir, find_dotfiles=True, ignore=["foo/.bar/**"] + ) + self.do_check( + "foo/**", + [ + "foo/.foo", + "foo/bar", + "foo/baz", + "foo/qux/1", + "foo/qux/bar", + "foo/qux/2/test", + "foo/qux/2/test2", + ], + ) + + +class TestJarFinder(MatchTestTemplate, TestWithTmpDir): + def add(self, path): + self.jar.add(path, ensure_bytes(path), compress=True) + + def do_check(self, pattern, result): + do_check(self, self.finder, pattern, result) + + def test_jar_finder(self): + self.jar = JarWriter(file=self.tmppath("test.jar")) + self.prepare_match_test() + self.jar.finish() + reader = JarReader(file=self.tmppath("test.jar")) + self.finder = JarFinder(self.tmppath("test.jar"), reader) + self.do_match_test() + + self.assertIsNone(self.finder.get("does-not-exist")) + self.assertIsInstance(self.finder.get("bar"), DeflatedFile) + + +class TestTarFinder(MatchTestTemplate, TestWithTmpDir): + def add(self, path): + self.tar.addfile(tarfile.TarInfo(name=path)) + + def do_check(self, pattern, result): + do_check(self, self.finder, pattern, result) + + def test_tar_finder(self): + self.tar = tarfile.open(name=self.tmppath("test.tar.bz2"), mode="w:bz2") + self.prepare_match_test() + self.tar.close() + with tarfile.open(name=self.tmppath("test.tar.bz2"), mode="r:bz2") as tarreader: + self.finder = TarFinder(self.tmppath("test.tar.bz2"), tarreader) + self.do_match_test() + + self.assertIsNone(self.finder.get("does-not-exist")) + self.assertIsInstance(self.finder.get("bar"), ExtractedTarFile) + + +class TestComposedFinder(MatchTestTemplate, TestWithTmpDir): + def add(self, path, content=None): + # Put foo/qux files under $tmp/b. + if path.startswith("foo/qux/"): + real_path = mozpath.join("b", path[8:]) + else: + real_path = mozpath.join("a", path) + ensureParentDir(self.tmppath(real_path)) + if not content: + content = six.ensure_binary(path) + open(self.tmppath(real_path), "wb").write(content) + + def do_check(self, pattern, result): + if "*" in pattern: + return + do_check(self, self.finder, pattern, result) + + def test_composed_finder(self): + self.prepare_match_test() + # Also add files in $tmp/a/foo/qux because ComposedFinder is + # expected to mask foo/qux entirely with content from $tmp/b. + ensureParentDir(self.tmppath("a/foo/qux/hoge")) + open(self.tmppath("a/foo/qux/hoge"), "wb").write(b"hoge") + open(self.tmppath("a/foo/qux/bar"), "wb").write(b"not the right content") + self.finder = ComposedFinder( + { + "": FileFinder(self.tmppath("a")), + "foo/qux": FileFinder(self.tmppath("b")), + } + ) + self.do_match_test() + + self.assertIsNone(self.finder.get("does-not-exist")) + self.assertIsInstance(self.finder.get("bar"), File) + + +@unittest.skipUnless(hglib, "hglib not available") +@unittest.skipIf( + six.PY3 and os.name == "nt", "Does not currently work in Python3 on Windows" +) +class TestMercurialRevisionFinder(MatchTestTemplate, TestWithTmpDir): + def setUp(self): + super(TestMercurialRevisionFinder, self).setUp() + hglib.init(self.tmpdir) + self._clients = [] + + def tearDown(self): + # Ensure the hg client process is closed. Otherwise, Windows + # may have trouble removing the repo directory because the process + # has an open handle on it. + for client in getattr(self, "_clients", []): + if client.server: + client.close() + + self._clients[:] = [] + + super(TestMercurialRevisionFinder, self).tearDown() + + def _client(self): + configs = ( + # b'' because py2 needs !unicode + b'ui.username="Dummy User "', + ) + client = hglib.open( + six.ensure_binary(self.tmpdir), + encoding=b"UTF-8", # b'' because py2 needs !unicode + configs=configs, + ) + self._clients.append(client) + return client + + def add(self, path): + with self._client() as c: + ensureParentDir(self.tmppath(path)) + with open(self.tmppath(path), "wb") as fh: + fh.write(six.ensure_binary(path)) + c.add(six.ensure_binary(self.tmppath(path))) + + def do_check(self, pattern, result): + do_check(self, self.finder, pattern, result) + + def _get_finder(self, *args, **kwargs): + f = MercurialRevisionFinder(*args, **kwargs) + self._clients.append(f._client) + return f + + def test_default_revision(self): + self.prepare_match_test() + with self._client() as c: + c.commit("initial commit") + + self.finder = self._get_finder(self.tmpdir) + self.do_match_test() + + self.assertIsNone(self.finder.get("does-not-exist")) + self.assertIsInstance(self.finder.get("bar"), MercurialFile) + + def test_old_revision(self): + with self._client() as c: + with open(self.tmppath("foo"), "wb") as fh: + fh.write(b"foo initial") + c.add(six.ensure_binary(self.tmppath("foo"))) + c.commit("initial") + + with open(self.tmppath("foo"), "wb") as fh: + fh.write(b"foo second") + with open(self.tmppath("bar"), "wb") as fh: + fh.write(b"bar second") + c.add(six.ensure_binary(self.tmppath("bar"))) + c.commit("second") + # This wipes out the working directory, ensuring the finder isn't + # finding anything from the filesystem. + c.rawcommand([b"update", b"null"]) + + finder = self._get_finder(self.tmpdir, "0") + f = finder.get("foo") + self.assertEqual(f.read(), b"foo initial") + self.assertEqual(f.read(), b"foo initial", "read again for good measure") + self.assertIsNone(finder.get("bar")) + + finder = self._get_finder(self.tmpdir, rev="1") + f = finder.get("foo") + self.assertEqual(f.read(), b"foo second") + f = finder.get("bar") + self.assertEqual(f.read(), b"bar second") + f = None + + def test_recognize_repo_paths(self): + with self._client() as c: + with open(self.tmppath("foo"), "wb") as fh: + fh.write(b"initial") + c.add(six.ensure_binary(self.tmppath("foo"))) + c.commit("initial") + c.rawcommand([b"update", b"null"]) + + finder = self._get_finder(self.tmpdir, "0", recognize_repo_paths=True) + with self.assertRaises(NotImplementedError): + list(finder.find("")) + + with self.assertRaises(ValueError): + finder.get("foo") + with self.assertRaises(ValueError): + finder.get("") + + f = finder.get(self.tmppath("foo")) + self.assertIsInstance(f, MercurialFile) + self.assertEqual(f.read(), b"initial") + f = None + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_manifests.py b/python/mozbuild/mozpack/test/test_manifests.py new file mode 100644 index 0000000000..a5db53b58c --- /dev/null +++ b/python/mozbuild/mozpack/test/test_manifests.py @@ -0,0 +1,465 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os + +import mozunit + +from mozpack.copier import FileCopier, FileRegistry +from mozpack.manifests import InstallManifest, UnreadableInstallManifest +from mozpack.test.test_files import TestWithTmpDir + + +class TestInstallManifest(TestWithTmpDir): + def test_construct(self): + m = InstallManifest() + self.assertEqual(len(m), 0) + + def test_malformed(self): + f = self.tmppath("manifest") + open(f, "wt").write("junk\n") + with self.assertRaises(UnreadableInstallManifest): + InstallManifest(f) + + def test_adds(self): + m = InstallManifest() + m.add_link("s_source", "s_dest") + m.add_copy("c_source", "c_dest") + m.add_required_exists("e_dest") + m.add_optional_exists("o_dest") + m.add_pattern_link("ps_base", "ps/*", "ps_dest") + m.add_pattern_copy("pc_base", "pc/**", "pc_dest") + m.add_preprocess("p_source", "p_dest", "p_source.pp") + m.add_content("content", "content") + + self.assertEqual(len(m), 8) + self.assertIn("s_dest", m) + self.assertIn("c_dest", m) + self.assertIn("p_dest", m) + self.assertIn("e_dest", m) + self.assertIn("o_dest", m) + self.assertIn("content", m) + + with self.assertRaises(ValueError): + m.add_link("s_other", "s_dest") + + with self.assertRaises(ValueError): + m.add_copy("c_other", "c_dest") + + with self.assertRaises(ValueError): + m.add_preprocess("p_other", "p_dest", "p_other.pp") + + with self.assertRaises(ValueError): + m.add_required_exists("e_dest") + + with self.assertRaises(ValueError): + m.add_optional_exists("o_dest") + + with self.assertRaises(ValueError): + m.add_pattern_link("ps_base", "ps/*", "ps_dest") + + with self.assertRaises(ValueError): + m.add_pattern_copy("pc_base", "pc/**", "pc_dest") + + with self.assertRaises(ValueError): + m.add_content("content", "content") + + def _get_test_manifest(self): + m = InstallManifest() + m.add_link(self.tmppath("s_source"), "s_dest") + m.add_copy(self.tmppath("c_source"), "c_dest") + m.add_preprocess( + self.tmppath("p_source"), + "p_dest", + self.tmppath("p_source.pp"), + "#", + {"FOO": "BAR", "BAZ": "QUX"}, + ) + m.add_required_exists("e_dest") + m.add_optional_exists("o_dest") + m.add_pattern_link("ps_base", "*", "ps_dest") + m.add_pattern_copy("pc_base", "**", "pc_dest") + m.add_content("the content\non\nmultiple lines", "content") + + return m + + def test_serialization(self): + m = self._get_test_manifest() + + p = self.tmppath("m") + m.write(path=p) + self.assertTrue(os.path.isfile(p)) + + with open(p, "r") as fh: + c = fh.read() + + self.assertEqual(c.count("\n"), 9) + + lines = c.splitlines() + self.assertEqual(len(lines), 9) + + self.assertEqual(lines[0], "5") + + m2 = InstallManifest(path=p) + self.assertEqual(m, m2) + p2 = self.tmppath("m2") + m2.write(path=p2) + + with open(p2, "r") as fh: + c2 = fh.read() + + self.assertEqual(c, c2) + + def test_populate_registry(self): + m = self._get_test_manifest() + r = FileRegistry() + m.populate_registry(r) + + self.assertEqual(len(r), 6) + self.assertEqual( + r.paths(), ["c_dest", "content", "e_dest", "o_dest", "p_dest", "s_dest"] + ) + + def test_pattern_expansion(self): + source = self.tmppath("source") + os.mkdir(source) + os.mkdir("%s/base" % source) + os.mkdir("%s/base/foo" % source) + + with open("%s/base/foo/file1" % source, "a"): + pass + + with open("%s/base/foo/file2" % source, "a"): + pass + + m = InstallManifest() + m.add_pattern_link("%s/base" % source, "**", "dest") + + c = FileCopier() + m.populate_registry(c) + self.assertEqual(c.paths(), ["dest/foo/file1", "dest/foo/file2"]) + + def test_write_expand_pattern(self): + source = self.tmppath("source") + os.mkdir(source) + os.mkdir("%s/base" % source) + os.mkdir("%s/base/foo" % source) + + with open("%s/base/foo/file1" % source, "a"): + pass + + with open("%s/base/foo/file2" % source, "a"): + pass + + m = InstallManifest() + m.add_pattern_link("%s/base" % source, "**", "dest") + + track = self.tmppath("track") + m.write(path=track, expand_pattern=True) + + m = InstallManifest(path=track) + self.assertEqual( + sorted(dest for dest in m._dests), ["dest/foo/file1", "dest/foo/file2"] + ) + + def test_or(self): + m1 = self._get_test_manifest() + orig_length = len(m1) + m2 = InstallManifest() + m2.add_link("s_source2", "s_dest2") + m2.add_copy("c_source2", "c_dest2") + + m1 |= m2 + + self.assertEqual(len(m2), 2) + self.assertEqual(len(m1), orig_length + 2) + + self.assertIn("s_dest2", m1) + self.assertIn("c_dest2", m1) + + def test_copier_application(self): + dest = self.tmppath("dest") + os.mkdir(dest) + + to_delete = self.tmppath("dest/to_delete") + with open(to_delete, "a"): + pass + + with open(self.tmppath("s_source"), "wt") as fh: + fh.write("symlink!") + + with open(self.tmppath("c_source"), "wt") as fh: + fh.write("copy!") + + with open(self.tmppath("p_source"), "wt") as fh: + fh.write("#define FOO 1\npreprocess!") + + with open(self.tmppath("dest/e_dest"), "a"): + pass + + with open(self.tmppath("dest/o_dest"), "a"): + pass + + m = self._get_test_manifest() + c = FileCopier() + m.populate_registry(c) + result = c.copy(dest) + + self.assertTrue(os.path.exists(self.tmppath("dest/s_dest"))) + self.assertTrue(os.path.exists(self.tmppath("dest/c_dest"))) + self.assertTrue(os.path.exists(self.tmppath("dest/p_dest"))) + self.assertTrue(os.path.exists(self.tmppath("dest/e_dest"))) + self.assertTrue(os.path.exists(self.tmppath("dest/o_dest"))) + self.assertTrue(os.path.exists(self.tmppath("dest/content"))) + self.assertFalse(os.path.exists(to_delete)) + + with open(self.tmppath("dest/s_dest"), "rt") as fh: + self.assertEqual(fh.read(), "symlink!") + + with open(self.tmppath("dest/c_dest"), "rt") as fh: + self.assertEqual(fh.read(), "copy!") + + with open(self.tmppath("dest/p_dest"), "rt") as fh: + self.assertEqual(fh.read(), "preprocess!") + + self.assertEqual( + result.updated_files, + set( + self.tmppath(p) + for p in ("dest/s_dest", "dest/c_dest", "dest/p_dest", "dest/content") + ), + ) + self.assertEqual( + result.existing_files, + set([self.tmppath("dest/e_dest"), self.tmppath("dest/o_dest")]), + ) + self.assertEqual(result.removed_files, {to_delete}) + self.assertEqual(result.removed_directories, set()) + + def test_preprocessor(self): + manifest = self.tmppath("m") + deps = self.tmppath("m.pp") + dest = self.tmppath("dest") + include = self.tmppath("p_incl") + + with open(include, "wt") as fh: + fh.write("#define INCL\n") + time = os.path.getmtime(include) - 3 + os.utime(include, (time, time)) + + with open(self.tmppath("p_source"), "wt") as fh: + fh.write("#ifdef FOO\n#if BAZ == QUX\nPASS1\n#endif\n#endif\n") + fh.write("#ifdef DEPTEST\nPASS2\n#endif\n") + fh.write("#include p_incl\n#ifdef INCLTEST\nPASS3\n#endif\n") + time = os.path.getmtime(self.tmppath("p_source")) - 3 + os.utime(self.tmppath("p_source"), (time, time)) + + # Create and write a manifest with the preprocessed file, then apply it. + # This should write out our preprocessed file. + m = InstallManifest() + m.add_preprocess( + self.tmppath("p_source"), "p_dest", deps, "#", {"FOO": "BAR", "BAZ": "QUX"} + ) + m.write(path=manifest) + + m = InstallManifest(path=manifest) + c = FileCopier() + m.populate_registry(c) + c.copy(dest) + + self.assertTrue(os.path.exists(self.tmppath("dest/p_dest"))) + + with open(self.tmppath("dest/p_dest"), "rt") as fh: + self.assertEqual(fh.read(), "PASS1\n") + + # Create a second manifest with the preprocessed file, then apply it. + # Since this manifest does not exist on the disk, there should not be a + # dependency on it, and the preprocessed file should not be modified. + m2 = InstallManifest() + m2.add_preprocess( + self.tmppath("p_source"), "p_dest", deps, "#", {"DEPTEST": True} + ) + c = FileCopier() + m2.populate_registry(c) + result = c.copy(dest) + + self.assertFalse(self.tmppath("dest/p_dest") in result.updated_files) + self.assertTrue(self.tmppath("dest/p_dest") in result.existing_files) + + # Write out the second manifest, then load it back in from the disk. + # This should add the dependency on the manifest file, so our + # preprocessed file should be regenerated with the new defines. + # We also set the mtime on the destination file back, so it will be + # older than the manifest file. + m2.write(path=manifest) + time = os.path.getmtime(manifest) - 1 + os.utime(self.tmppath("dest/p_dest"), (time, time)) + m2 = InstallManifest(path=manifest) + c = FileCopier() + m2.populate_registry(c) + self.assertTrue(c.copy(dest)) + + with open(self.tmppath("dest/p_dest"), "rt") as fh: + self.assertEqual(fh.read(), "PASS2\n") + + # Set the time on the manifest back, so it won't be picked up as + # modified in the next test + time = os.path.getmtime(manifest) - 1 + os.utime(manifest, (time, time)) + + # Update the contents of a file included by the source file. This should + # cause the destination to be regenerated. + with open(include, "wt") as fh: + fh.write("#define INCLTEST\n") + + time = os.path.getmtime(include) - 1 + os.utime(self.tmppath("dest/p_dest"), (time, time)) + c = FileCopier() + m2.populate_registry(c) + self.assertTrue(c.copy(dest)) + + with open(self.tmppath("dest/p_dest"), "rt") as fh: + self.assertEqual(fh.read(), "PASS2\nPASS3\n") + + def test_preprocessor_dependencies(self): + manifest = self.tmppath("m") + deps = self.tmppath("m.pp") + dest = self.tmppath("dest") + source = self.tmppath("p_source") + destfile = self.tmppath("dest/p_dest") + include = self.tmppath("p_incl") + os.mkdir(dest) + + with open(source, "wt") as fh: + fh.write("#define SRC\nSOURCE\n") + time = os.path.getmtime(source) - 3 + os.utime(source, (time, time)) + + with open(include, "wt") as fh: + fh.write("INCLUDE\n") + time = os.path.getmtime(source) - 3 + os.utime(include, (time, time)) + + # Create and write a manifest with the preprocessed file. + m = InstallManifest() + m.add_preprocess(source, "p_dest", deps, "#", {"FOO": "BAR", "BAZ": "QUX"}) + m.write(path=manifest) + + time = os.path.getmtime(source) - 5 + os.utime(manifest, (time, time)) + + # Now read the manifest back in, and apply it. This should write out + # our preprocessed file. + m = InstallManifest(path=manifest) + c = FileCopier() + m.populate_registry(c) + self.assertTrue(c.copy(dest)) + + with open(destfile, "rt") as fh: + self.assertEqual(fh.read(), "SOURCE\n") + + # Next, modify the source to #INCLUDE another file. + with open(source, "wt") as fh: + fh.write("SOURCE\n#include p_incl\n") + time = os.path.getmtime(source) - 1 + os.utime(destfile, (time, time)) + + # Apply the manifest, and confirm that it also reads the newly included + # file. + m = InstallManifest(path=manifest) + c = FileCopier() + m.populate_registry(c) + c.copy(dest) + + with open(destfile, "rt") as fh: + self.assertEqual(fh.read(), "SOURCE\nINCLUDE\n") + + # Set the time on the source file back, so it won't be picked up as + # modified in the next test. + time = os.path.getmtime(source) - 1 + os.utime(source, (time, time)) + + # Now, modify the include file (but not the original source). + with open(include, "wt") as fh: + fh.write("INCLUDE MODIFIED\n") + time = os.path.getmtime(include) - 1 + os.utime(destfile, (time, time)) + + # Apply the manifest, and confirm that the change to the include file + # is detected. That should cause the preprocessor to run again. + m = InstallManifest(path=manifest) + c = FileCopier() + m.populate_registry(c) + c.copy(dest) + + with open(destfile, "rt") as fh: + self.assertEqual(fh.read(), "SOURCE\nINCLUDE MODIFIED\n") + + # ORing an InstallManifest should copy file dependencies + m = InstallManifest() + m |= InstallManifest(path=manifest) + c = FileCopier() + m.populate_registry(c) + e = c._files["p_dest"] + self.assertEqual(e.extra_depends, [manifest]) + + def test_add_entries_from(self): + source = self.tmppath("source") + os.mkdir(source) + os.mkdir("%s/base" % source) + os.mkdir("%s/base/foo" % source) + + with open("%s/base/foo/file1" % source, "a"): + pass + + with open("%s/base/foo/file2" % source, "a"): + pass + + m = InstallManifest() + m.add_pattern_link("%s/base" % source, "**", "dest") + + p = InstallManifest() + p.add_entries_from(m) + self.assertEqual(len(p), 1) + + c = FileCopier() + p.populate_registry(c) + self.assertEqual(c.paths(), ["dest/foo/file1", "dest/foo/file2"]) + + q = InstallManifest() + q.add_entries_from(m, base="target") + self.assertEqual(len(q), 1) + + d = FileCopier() + q.populate_registry(d) + self.assertEqual(d.paths(), ["target/dest/foo/file1", "target/dest/foo/file2"]) + + # Some of the values in an InstallManifest include destination + # information that is present in the keys. Verify that we can + # round-trip serialization. + r = InstallManifest() + r.add_entries_from(m) + r.add_entries_from(m, base="target") + self.assertEqual(len(r), 2) + + temp_path = self.tmppath("temp_path") + r.write(path=temp_path) + + s = InstallManifest(path=temp_path) + e = FileCopier() + s.populate_registry(e) + + self.assertEqual( + e.paths(), + [ + "dest/foo/file1", + "dest/foo/file2", + "target/dest/foo/file1", + "target/dest/foo/file2", + ], + ) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_mozjar.py b/python/mozbuild/mozpack/test/test_mozjar.py new file mode 100644 index 0000000000..e96c59238f --- /dev/null +++ b/python/mozbuild/mozpack/test/test_mozjar.py @@ -0,0 +1,350 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import unittest +from collections import OrderedDict + +import mozunit +import six + +import mozpack.path as mozpath +from mozpack.files import FileFinder +from mozpack.mozjar import ( + Deflater, + JarLog, + JarReader, + JarReaderError, + JarStruct, + JarWriter, + JarWriterError, +) +from mozpack.test.test_files import MockDest + +test_data_path = mozpath.abspath(mozpath.dirname(__file__)) +test_data_path = mozpath.join(test_data_path, "data") + + +class TestJarStruct(unittest.TestCase): + class Foo(JarStruct): + MAGIC = 0x01020304 + STRUCT = OrderedDict( + [ + ("foo", "uint32"), + ("bar", "uint16"), + ("qux", "uint16"), + ("length", "uint16"), + ("length2", "uint16"), + ("string", "length"), + ("string2", "length2"), + ] + ) + + def test_jar_struct(self): + foo = TestJarStruct.Foo() + self.assertEqual(foo.signature, TestJarStruct.Foo.MAGIC) + self.assertEqual(foo["foo"], 0) + self.assertEqual(foo["bar"], 0) + self.assertEqual(foo["qux"], 0) + self.assertFalse("length" in foo) + self.assertFalse("length2" in foo) + self.assertEqual(foo["string"], "") + self.assertEqual(foo["string2"], "") + + self.assertEqual(foo.size, 16) + + foo["foo"] = 0x42434445 + foo["bar"] = 0xABCD + foo["qux"] = 0xEF01 + foo["string"] = "abcde" + foo["string2"] = "Arbitrarily long string" + + serialized = ( + b"\x04\x03\x02\x01\x45\x44\x43\x42\xcd\xab\x01\xef" + + b"\x05\x00\x17\x00abcdeArbitrarily long string" + ) + self.assertEqual(foo.size, len(serialized)) + foo_serialized = foo.serialize() + self.assertEqual(foo_serialized, serialized) + + def do_test_read_jar_struct(self, data): + self.assertRaises(JarReaderError, TestJarStruct.Foo, data) + self.assertRaises(JarReaderError, TestJarStruct.Foo, data[2:]) + + foo = TestJarStruct.Foo(data[1:]) + self.assertEqual(foo["foo"], 0x45444342) + self.assertEqual(foo["bar"], 0xCDAB) + self.assertEqual(foo["qux"], 0x01EF) + self.assertFalse("length" in foo) + self.assertFalse("length2" in foo) + self.assertEqual(foo["string"], b"012345") + self.assertEqual(foo["string2"], b"67") + + def test_read_jar_struct(self): + data = ( + b"\x00\x04\x03\x02\x01\x42\x43\x44\x45\xab\xcd\xef" + + b"\x01\x06\x00\x02\x0001234567890" + ) + self.do_test_read_jar_struct(data) + + def test_read_jar_struct_memoryview(self): + data = ( + b"\x00\x04\x03\x02\x01\x42\x43\x44\x45\xab\xcd\xef" + + b"\x01\x06\x00\x02\x0001234567890" + ) + self.do_test_read_jar_struct(memoryview(data)) + + +class TestDeflater(unittest.TestCase): + def wrap(self, data): + return data + + def test_deflater_no_compress(self): + deflater = Deflater(False) + deflater.write(self.wrap(b"abc")) + self.assertFalse(deflater.compressed) + self.assertEqual(deflater.uncompressed_size, 3) + self.assertEqual(deflater.compressed_size, deflater.uncompressed_size) + self.assertEqual(deflater.compressed_data, b"abc") + self.assertEqual(deflater.crc32, 0x352441C2) + + def test_deflater_compress_no_gain(self): + deflater = Deflater(True) + deflater.write(self.wrap(b"abc")) + self.assertFalse(deflater.compressed) + self.assertEqual(deflater.uncompressed_size, 3) + self.assertEqual(deflater.compressed_size, deflater.uncompressed_size) + self.assertEqual(deflater.compressed_data, b"abc") + self.assertEqual(deflater.crc32, 0x352441C2) + + def test_deflater_compress(self): + deflater = Deflater(True) + deflater.write(self.wrap(b"aaaaaaaaaaaaanopqrstuvwxyz")) + self.assertTrue(deflater.compressed) + self.assertEqual(deflater.uncompressed_size, 26) + self.assertNotEqual(deflater.compressed_size, deflater.uncompressed_size) + self.assertEqual(deflater.crc32, 0xD46B97ED) + # The CRC is the same as when not compressed + deflater = Deflater(False) + self.assertFalse(deflater.compressed) + deflater.write(self.wrap(b"aaaaaaaaaaaaanopqrstuvwxyz")) + self.assertEqual(deflater.crc32, 0xD46B97ED) + + def test_deflater_empty(self): + deflater = Deflater(False) + self.assertFalse(deflater.compressed) + self.assertEqual(deflater.uncompressed_size, 0) + self.assertEqual(deflater.compressed_size, deflater.uncompressed_size) + self.assertEqual(deflater.compressed_data, b"") + self.assertEqual(deflater.crc32, 0) + + +class TestDeflaterMemoryView(TestDeflater): + def wrap(self, data): + return memoryview(data) + + +class TestJar(unittest.TestCase): + def test_jar(self): + s = MockDest() + with JarWriter(fileobj=s) as jar: + jar.add("foo", b"foo") + self.assertRaises(JarWriterError, jar.add, "foo", b"bar") + jar.add("bar", b"aaaaaaaaaaaaanopqrstuvwxyz") + jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz", False) + jar.add("baz\\backslash", b"aaaaaaaaaaaaaaa") + + files = [j for j in JarReader(fileobj=s)] + + self.assertEqual(files[0].filename, "foo") + self.assertFalse(files[0].compressed) + self.assertEqual(files[0].read(), b"foo") + + self.assertEqual(files[1].filename, "bar") + self.assertTrue(files[1].compressed) + self.assertEqual(files[1].read(), b"aaaaaaaaaaaaanopqrstuvwxyz") + + self.assertEqual(files[2].filename, "baz/qux") + self.assertFalse(files[2].compressed) + self.assertEqual(files[2].read(), b"aaaaaaaaaaaaanopqrstuvwxyz") + + if os.sep == "\\": + self.assertEqual( + files[3].filename, + "baz/backslash", + "backslashes in filenames on Windows should get normalized", + ) + else: + self.assertEqual( + files[3].filename, + "baz\\backslash", + "backslashes in filenames on POSIX platform are untouched", + ) + + s = MockDest() + with JarWriter(fileobj=s, compress=False) as jar: + jar.add("bar", b"aaaaaaaaaaaaanopqrstuvwxyz") + jar.add("foo", b"foo") + jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz", True) + + jar = JarReader(fileobj=s) + files = [j for j in jar] + + self.assertEqual(files[0].filename, "bar") + self.assertFalse(files[0].compressed) + self.assertEqual(files[0].read(), b"aaaaaaaaaaaaanopqrstuvwxyz") + + self.assertEqual(files[1].filename, "foo") + self.assertFalse(files[1].compressed) + self.assertEqual(files[1].read(), b"foo") + + self.assertEqual(files[2].filename, "baz/qux") + self.assertTrue(files[2].compressed) + self.assertEqual(files[2].read(), b"aaaaaaaaaaaaanopqrstuvwxyz") + + self.assertTrue("bar" in jar) + self.assertTrue("foo" in jar) + self.assertFalse("baz" in jar) + self.assertTrue("baz/qux" in jar) + self.assertTrue(jar["bar"], files[1]) + self.assertTrue(jar["foo"], files[0]) + self.assertTrue(jar["baz/qux"], files[2]) + + s.seek(0) + jar = JarReader(fileobj=s) + self.assertTrue("bar" in jar) + self.assertTrue("foo" in jar) + self.assertFalse("baz" in jar) + self.assertTrue("baz/qux" in jar) + + files[0].seek(0) + self.assertEqual(jar["bar"].filename, files[0].filename) + self.assertEqual(jar["bar"].compressed, files[0].compressed) + self.assertEqual(jar["bar"].read(), files[0].read()) + + files[1].seek(0) + self.assertEqual(jar["foo"].filename, files[1].filename) + self.assertEqual(jar["foo"].compressed, files[1].compressed) + self.assertEqual(jar["foo"].read(), files[1].read()) + + files[2].seek(0) + self.assertEqual(jar["baz/qux"].filename, files[2].filename) + self.assertEqual(jar["baz/qux"].compressed, files[2].compressed) + self.assertEqual(jar["baz/qux"].read(), files[2].read()) + + def test_rejar(self): + s = MockDest() + with JarWriter(fileobj=s) as jar: + jar.add("foo", b"foo") + jar.add("bar", b"aaaaaaaaaaaaanopqrstuvwxyz") + jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz", False) + + new = MockDest() + with JarWriter(fileobj=new) as jar: + for j in JarReader(fileobj=s): + jar.add(j.filename, j) + + jar = JarReader(fileobj=new) + files = [j for j in jar] + + self.assertEqual(files[0].filename, "foo") + self.assertFalse(files[0].compressed) + self.assertEqual(files[0].read(), b"foo") + + self.assertEqual(files[1].filename, "bar") + self.assertTrue(files[1].compressed) + self.assertEqual(files[1].read(), b"aaaaaaaaaaaaanopqrstuvwxyz") + + self.assertEqual(files[2].filename, "baz/qux") + self.assertTrue(files[2].compressed) + self.assertEqual(files[2].read(), b"aaaaaaaaaaaaanopqrstuvwxyz") + + def test_add_from_finder(self): + s = MockDest() + with JarWriter(fileobj=s) as jar: + finder = FileFinder(test_data_path) + for p, f in finder.find("test_data"): + jar.add("test_data", f) + + jar = JarReader(fileobj=s) + files = [j for j in jar] + + self.assertEqual(files[0].filename, "test_data") + self.assertFalse(files[0].compressed) + self.assertEqual(files[0].read(), b"test_data") + + +class TestPreload(unittest.TestCase): + def test_preload(self): + s = MockDest() + with JarWriter(fileobj=s) as jar: + jar.add("foo", b"foo") + jar.add("bar", b"abcdefghijklmnopqrstuvwxyz") + jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz") + + jar = JarReader(fileobj=s) + self.assertEqual(jar.last_preloaded, None) + + with JarWriter(fileobj=s) as jar: + jar.add("foo", b"foo") + jar.add("bar", b"abcdefghijklmnopqrstuvwxyz") + jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz") + jar.preload(["baz/qux", "bar"]) + + jar = JarReader(fileobj=s) + self.assertEqual(jar.last_preloaded, "bar") + files = [j for j in jar] + + self.assertEqual(files[0].filename, "baz/qux") + self.assertEqual(files[1].filename, "bar") + self.assertEqual(files[2].filename, "foo") + + +class TestJarLog(unittest.TestCase): + def test_jarlog(self): + s = six.moves.cStringIO( + "\n".join( + [ + "bar/baz.jar first", + "bar/baz.jar second", + "bar/baz.jar third", + "bar/baz.jar second", + "bar/baz.jar second", + "omni.ja stuff", + "bar/baz.jar first", + "omni.ja other/stuff", + "omni.ja stuff", + "bar/baz.jar third", + ] + ) + ) + log = JarLog(fileobj=s) + self.assertEqual( + set(log.keys()), + set( + [ + "bar/baz.jar", + "omni.ja", + ] + ), + ) + self.assertEqual( + log["bar/baz.jar"], + [ + "first", + "second", + "third", + ], + ) + self.assertEqual( + log["omni.ja"], + [ + "stuff", + "other/stuff", + ], + ) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_packager.py b/python/mozbuild/mozpack/test/test_packager.py new file mode 100644 index 0000000000..266902ebb2 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_packager.py @@ -0,0 +1,630 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import unittest + +import mozunit +from buildconfig import topobjdir +from mozunit import MockedOpen + +import mozpack.path as mozpath +from mozbuild.preprocessor import Preprocessor +from mozpack.chrome.manifest import ( + ManifestBinaryComponent, + ManifestContent, + ManifestResource, +) +from mozpack.errors import ErrorMessage, errors +from mozpack.files import GeneratedFile +from mozpack.packager import ( + CallDeque, + Component, + SimpleManifestSink, + SimplePackager, + preprocess_manifest, +) + +MANIFEST = """ +bar/* +[foo] +foo/* +-foo/bar +chrome.manifest +[zot destdir="destdir"] +foo/zot +; comment +#ifdef baz +[baz] +baz@SUFFIX@ +#endif +""" + + +class TestPreprocessManifest(unittest.TestCase): + MANIFEST_PATH = mozpath.join("$OBJDIR", "manifest") + + EXPECTED_LOG = [ + ((MANIFEST_PATH, 2), "add", "", "bar/*"), + ((MANIFEST_PATH, 4), "add", "foo", "foo/*"), + ((MANIFEST_PATH, 5), "remove", "foo", "foo/bar"), + ((MANIFEST_PATH, 6), "add", "foo", "chrome.manifest"), + ((MANIFEST_PATH, 8), "add", 'zot destdir="destdir"', "foo/zot"), + ] + + def setUp(self): + class MockSink(object): + def __init__(self): + self.log = [] + + def add(self, component, path): + self._log(errors.get_context(), "add", repr(component), path) + + def remove(self, component, path): + self._log(errors.get_context(), "remove", repr(component), path) + + def _log(self, *args): + self.log.append(args) + + self.sink = MockSink() + self.cwd = os.getcwd() + os.chdir(topobjdir) + + def tearDown(self): + os.chdir(self.cwd) + + def test_preprocess_manifest(self): + with MockedOpen({"manifest": MANIFEST}): + preprocess_manifest(self.sink, "manifest") + self.assertEqual(self.sink.log, self.EXPECTED_LOG) + + def test_preprocess_manifest_missing_define(self): + with MockedOpen({"manifest": MANIFEST}): + self.assertRaises( + Preprocessor.Error, + preprocess_manifest, + self.sink, + "manifest", + {"baz": 1}, + ) + + def test_preprocess_manifest_defines(self): + with MockedOpen({"manifest": MANIFEST}): + preprocess_manifest(self.sink, "manifest", {"baz": 1, "SUFFIX": ".exe"}) + self.assertEqual( + self.sink.log, + self.EXPECTED_LOG + [((self.MANIFEST_PATH, 12), "add", "baz", "baz.exe")], + ) + + +class MockFinder(object): + def __init__(self, files): + self.files = files + self.log = [] + + def find(self, path): + self.log.append(path) + for f in sorted(self.files): + if mozpath.match(f, path): + yield f, self.files[f] + + def __iter__(self): + return self.find("") + + +class MockFormatter(object): + def __init__(self): + self.log = [] + + def add_base(self, *args): + self._log(errors.get_context(), "add_base", *args) + + def add_manifest(self, *args): + self._log(errors.get_context(), "add_manifest", *args) + + def add_interfaces(self, *args): + self._log(errors.get_context(), "add_interfaces", *args) + + def add(self, *args): + self._log(errors.get_context(), "add", *args) + + def _log(self, *args): + self.log.append(args) + + +class TestSimplePackager(unittest.TestCase): + def test_simple_packager(self): + class GeneratedFileWithPath(GeneratedFile): + def __init__(self, path, content): + GeneratedFile.__init__(self, content) + self.path = path + + formatter = MockFormatter() + packager = SimplePackager(formatter) + curdir = os.path.abspath(os.curdir) + file = GeneratedFileWithPath( + os.path.join(curdir, "foo", "bar.manifest"), + b"resource bar bar/\ncontent bar bar/", + ) + with errors.context("manifest", 1): + packager.add("foo/bar.manifest", file) + + file = GeneratedFileWithPath( + os.path.join(curdir, "foo", "baz.manifest"), b"resource baz baz/" + ) + with errors.context("manifest", 2): + packager.add("bar/baz.manifest", file) + + with errors.context("manifest", 3): + packager.add( + "qux/qux.manifest", + GeneratedFile( + b"".join( + [ + b"resource qux qux/\n", + b"binary-component qux.so\n", + ] + ) + ), + ) + bar_xpt = GeneratedFile(b"bar.xpt") + qux_xpt = GeneratedFile(b"qux.xpt") + foo_html = GeneratedFile(b"foo_html") + bar_html = GeneratedFile(b"bar_html") + with errors.context("manifest", 4): + packager.add("foo/bar.xpt", bar_xpt) + with errors.context("manifest", 5): + packager.add("foo/bar/foo.html", foo_html) + packager.add("foo/bar/bar.html", bar_html) + + file = GeneratedFileWithPath( + os.path.join(curdir, "foo.manifest"), + b"".join( + [ + b"manifest foo/bar.manifest\n", + b"manifest bar/baz.manifest\n", + ] + ), + ) + with errors.context("manifest", 6): + packager.add("foo.manifest", file) + with errors.context("manifest", 7): + packager.add("foo/qux.xpt", qux_xpt) + + file = GeneratedFileWithPath( + os.path.join(curdir, "addon", "chrome.manifest"), b"resource hoge hoge/" + ) + with errors.context("manifest", 8): + packager.add("addon/chrome.manifest", file) + + install_rdf = GeneratedFile(b"") + with errors.context("manifest", 9): + packager.add("addon/install.rdf", install_rdf) + + with errors.context("manifest", 10): + packager.add("addon2/install.rdf", install_rdf) + packager.add( + "addon2/chrome.manifest", GeneratedFile(b"binary-component addon2.so") + ) + + with errors.context("manifest", 11): + packager.add("addon3/install.rdf", install_rdf) + packager.add( + "addon3/chrome.manifest", + GeneratedFile(b"manifest components/components.manifest"), + ) + packager.add( + "addon3/components/components.manifest", + GeneratedFile(b"binary-component addon3.so"), + ) + + with errors.context("manifest", 12): + install_rdf_addon4 = GeneratedFile( + b"\n<...>\ntrue\n<...>\n" + ) + packager.add("addon4/install.rdf", install_rdf_addon4) + + with errors.context("manifest", 13): + install_rdf_addon5 = GeneratedFile( + b"\n<...>\nfalse\n<...>\n" + ) + packager.add("addon5/install.rdf", install_rdf_addon5) + + with errors.context("manifest", 14): + install_rdf_addon6 = GeneratedFile( + b"\n<... em:unpack=true>\n<...>\n" + ) + packager.add("addon6/install.rdf", install_rdf_addon6) + + with errors.context("manifest", 15): + install_rdf_addon7 = GeneratedFile( + b"\n<... em:unpack=false>\n<...>\n" + ) + packager.add("addon7/install.rdf", install_rdf_addon7) + + with errors.context("manifest", 16): + install_rdf_addon8 = GeneratedFile( + b'\n<... em:unpack="true">\n<...>\n' + ) + packager.add("addon8/install.rdf", install_rdf_addon8) + + with errors.context("manifest", 17): + install_rdf_addon9 = GeneratedFile( + b'\n<... em:unpack="false">\n<...>\n' + ) + packager.add("addon9/install.rdf", install_rdf_addon9) + + with errors.context("manifest", 18): + install_rdf_addon10 = GeneratedFile( + b"\n<... em:unpack='true'>\n<...>\n" + ) + packager.add("addon10/install.rdf", install_rdf_addon10) + + with errors.context("manifest", 19): + install_rdf_addon11 = GeneratedFile( + b"\n<... em:unpack='false'>\n<...>\n" + ) + packager.add("addon11/install.rdf", install_rdf_addon11) + + we_manifest = GeneratedFile( + b'{"manifest_version": 2, "name": "Test WebExtension", "version": "1.0"}' + ) + # hybrid and hybrid2 are both bootstrapped extensions with + # embedded webextensions, they differ in the order in which + # the manifests are added to the packager. + with errors.context("manifest", 20): + packager.add("hybrid/install.rdf", install_rdf) + + with errors.context("manifest", 21): + packager.add("hybrid/webextension/manifest.json", we_manifest) + + with errors.context("manifest", 22): + packager.add("hybrid2/webextension/manifest.json", we_manifest) + + with errors.context("manifest", 23): + packager.add("hybrid2/install.rdf", install_rdf) + + with errors.context("manifest", 24): + packager.add("webextension/manifest.json", we_manifest) + + non_we_manifest = GeneratedFile(b'{"not a webextension": true}') + with errors.context("manifest", 25): + packager.add("nonwebextension/manifest.json", non_we_manifest) + + self.assertEqual(formatter.log, []) + + with errors.context("dummy", 1): + packager.close() + self.maxDiff = None + # The formatter is expected to reorder the manifest entries so that + # chrome entries appear before the others. + self.assertEqual( + formatter.log, + [ + (("dummy", 1), "add_base", "", False), + (("dummy", 1), "add_base", "addon", True), + (("dummy", 1), "add_base", "addon10", "unpacked"), + (("dummy", 1), "add_base", "addon11", True), + (("dummy", 1), "add_base", "addon2", "unpacked"), + (("dummy", 1), "add_base", "addon3", "unpacked"), + (("dummy", 1), "add_base", "addon4", "unpacked"), + (("dummy", 1), "add_base", "addon5", True), + (("dummy", 1), "add_base", "addon6", "unpacked"), + (("dummy", 1), "add_base", "addon7", True), + (("dummy", 1), "add_base", "addon8", "unpacked"), + (("dummy", 1), "add_base", "addon9", True), + (("dummy", 1), "add_base", "hybrid", True), + (("dummy", 1), "add_base", "hybrid2", True), + (("dummy", 1), "add_base", "qux", False), + (("dummy", 1), "add_base", "webextension", True), + ( + (os.path.join(curdir, "foo", "bar.manifest"), 2), + "add_manifest", + ManifestContent("foo", "bar", "bar/"), + ), + ( + (os.path.join(curdir, "foo", "bar.manifest"), 1), + "add_manifest", + ManifestResource("foo", "bar", "bar/"), + ), + ( + ("bar/baz.manifest", 1), + "add_manifest", + ManifestResource("bar", "baz", "baz/"), + ), + ( + ("qux/qux.manifest", 1), + "add_manifest", + ManifestResource("qux", "qux", "qux/"), + ), + ( + ("qux/qux.manifest", 2), + "add_manifest", + ManifestBinaryComponent("qux", "qux.so"), + ), + (("manifest", 4), "add_interfaces", "foo/bar.xpt", bar_xpt), + (("manifest", 7), "add_interfaces", "foo/qux.xpt", qux_xpt), + ( + (os.path.join(curdir, "addon", "chrome.manifest"), 1), + "add_manifest", + ManifestResource("addon", "hoge", "hoge/"), + ), + ( + ("addon2/chrome.manifest", 1), + "add_manifest", + ManifestBinaryComponent("addon2", "addon2.so"), + ), + ( + ("addon3/components/components.manifest", 1), + "add_manifest", + ManifestBinaryComponent("addon3/components", "addon3.so"), + ), + (("manifest", 5), "add", "foo/bar/foo.html", foo_html), + (("manifest", 5), "add", "foo/bar/bar.html", bar_html), + (("manifest", 9), "add", "addon/install.rdf", install_rdf), + (("manifest", 10), "add", "addon2/install.rdf", install_rdf), + (("manifest", 11), "add", "addon3/install.rdf", install_rdf), + (("manifest", 12), "add", "addon4/install.rdf", install_rdf_addon4), + (("manifest", 13), "add", "addon5/install.rdf", install_rdf_addon5), + (("manifest", 14), "add", "addon6/install.rdf", install_rdf_addon6), + (("manifest", 15), "add", "addon7/install.rdf", install_rdf_addon7), + (("manifest", 16), "add", "addon8/install.rdf", install_rdf_addon8), + (("manifest", 17), "add", "addon9/install.rdf", install_rdf_addon9), + (("manifest", 18), "add", "addon10/install.rdf", install_rdf_addon10), + (("manifest", 19), "add", "addon11/install.rdf", install_rdf_addon11), + (("manifest", 20), "add", "hybrid/install.rdf", install_rdf), + ( + ("manifest", 21), + "add", + "hybrid/webextension/manifest.json", + we_manifest, + ), + ( + ("manifest", 22), + "add", + "hybrid2/webextension/manifest.json", + we_manifest, + ), + (("manifest", 23), "add", "hybrid2/install.rdf", install_rdf), + (("manifest", 24), "add", "webextension/manifest.json", we_manifest), + ( + ("manifest", 25), + "add", + "nonwebextension/manifest.json", + non_we_manifest, + ), + ], + ) + + self.assertEqual( + packager.get_bases(), + set( + [ + "", + "addon", + "addon2", + "addon3", + "addon4", + "addon5", + "addon6", + "addon7", + "addon8", + "addon9", + "addon10", + "addon11", + "qux", + "hybrid", + "hybrid2", + "webextension", + ] + ), + ) + self.assertEqual(packager.get_bases(addons=False), set(["", "qux"])) + + def test_simple_packager_manifest_consistency(self): + formatter = MockFormatter() + # bar/ is detected as an addon because of install.rdf, but top-level + # includes a manifest inside bar/. + packager = SimplePackager(formatter) + packager.add( + "base.manifest", + GeneratedFile( + b"manifest foo/bar.manifest\n" b"manifest bar/baz.manifest\n" + ), + ) + packager.add("foo/bar.manifest", GeneratedFile(b"resource bar bar")) + packager.add("bar/baz.manifest", GeneratedFile(b"resource baz baz")) + packager.add("bar/install.rdf", GeneratedFile(b"")) + + with self.assertRaises(ErrorMessage) as e: + packager.close() + + self.assertEqual( + str(e.exception), + 'error: "bar/baz.manifest" is included from "base.manifest", ' + 'which is outside "bar"', + ) + + # bar/ is detected as a separate base because of chrome.manifest that + # is included nowhere, but top-level includes another manifest inside + # bar/. + packager = SimplePackager(formatter) + packager.add( + "base.manifest", + GeneratedFile( + b"manifest foo/bar.manifest\n" b"manifest bar/baz.manifest\n" + ), + ) + packager.add("foo/bar.manifest", GeneratedFile(b"resource bar bar")) + packager.add("bar/baz.manifest", GeneratedFile(b"resource baz baz")) + packager.add("bar/chrome.manifest", GeneratedFile(b"resource baz baz")) + + with self.assertRaises(ErrorMessage) as e: + packager.close() + + self.assertEqual( + str(e.exception), + 'error: "bar/baz.manifest" is included from "base.manifest", ' + 'which is outside "bar"', + ) + + # bar/ is detected as a separate base because of chrome.manifest that + # is included nowhere, but chrome.manifest includes baz.manifest from + # the same directory. This shouldn't error out. + packager = SimplePackager(formatter) + packager.add("base.manifest", GeneratedFile(b"manifest foo/bar.manifest\n")) + packager.add("foo/bar.manifest", GeneratedFile(b"resource bar bar")) + packager.add("bar/baz.manifest", GeneratedFile(b"resource baz baz")) + packager.add("bar/chrome.manifest", GeneratedFile(b"manifest baz.manifest")) + packager.close() + + +class TestSimpleManifestSink(unittest.TestCase): + def test_simple_manifest_parser(self): + formatter = MockFormatter() + foobar = GeneratedFile(b"foobar") + foobaz = GeneratedFile(b"foobaz") + fooqux = GeneratedFile(b"fooqux") + foozot = GeneratedFile(b"foozot") + finder = MockFinder( + { + "bin/foo/bar": foobar, + "bin/foo/baz": foobaz, + "bin/foo/qux": fooqux, + "bin/foo/zot": foozot, + "bin/foo/chrome.manifest": GeneratedFile(b"resource foo foo/"), + "bin/chrome.manifest": GeneratedFile(b"manifest foo/chrome.manifest"), + } + ) + parser = SimpleManifestSink(finder, formatter) + component0 = Component("component0") + component1 = Component("component1") + component2 = Component("component2", destdir="destdir") + parser.add(component0, "bin/foo/b*") + parser.add(component1, "bin/foo/qux") + parser.add(component1, "bin/foo/chrome.manifest") + parser.add(component2, "bin/foo/zot") + self.assertRaises(ErrorMessage, parser.add, "component1", "bin/bar") + + self.assertEqual(formatter.log, []) + parser.close() + self.assertEqual( + formatter.log, + [ + (None, "add_base", "", False), + ( + ("foo/chrome.manifest", 1), + "add_manifest", + ManifestResource("foo", "foo", "foo/"), + ), + (None, "add", "foo/bar", foobar), + (None, "add", "foo/baz", foobaz), + (None, "add", "foo/qux", fooqux), + (None, "add", "destdir/foo/zot", foozot), + ], + ) + + self.assertEqual( + finder.log, + [ + "bin/foo/b*", + "bin/foo/qux", + "bin/foo/chrome.manifest", + "bin/foo/zot", + "bin/bar", + "bin/chrome.manifest", + ], + ) + + +class TestCallDeque(unittest.TestCase): + def test_call_deque(self): + class Logger(object): + def __init__(self): + self._log = [] + + def log(self, str): + self._log.append(str) + + @staticmethod + def staticlog(logger, str): + logger.log(str) + + def do_log(logger, str): + logger.log(str) + + logger = Logger() + d = CallDeque() + d.append(logger.log, "foo") + d.append(logger.log, "bar") + d.append(logger.staticlog, logger, "baz") + d.append(do_log, logger, "qux") + self.assertEqual(logger._log, []) + d.execute() + self.assertEqual(logger._log, ["foo", "bar", "baz", "qux"]) + + +class TestComponent(unittest.TestCase): + def do_split(self, string, name, options): + n, o = Component._split_component_and_options(string) + self.assertEqual(name, n) + self.assertEqual(options, o) + + def test_component_split_component_and_options(self): + self.do_split("component", "component", {}) + self.do_split("trailingspace ", "trailingspace", {}) + self.do_split(" leadingspace", "leadingspace", {}) + self.do_split(" trim ", "trim", {}) + self.do_split(' trim key="value"', "trim", {"key": "value"}) + self.do_split(' trim empty=""', "trim", {"empty": ""}) + self.do_split(' trim space=" "', "trim", {"space": " "}) + self.do_split( + 'component key="value" key2="second" ', + "component", + {"key": "value", "key2": "second"}, + ) + self.do_split( + 'trim key=" value with spaces " key2="spaces again"', + "trim", + {"key": " value with spaces ", "key2": "spaces again"}, + ) + + def do_split_error(self, string): + self.assertRaises(ValueError, Component._split_component_and_options, string) + + def test_component_split_component_and_options_errors(self): + self.do_split_error('"component') + self.do_split_error('comp"onent') + self.do_split_error('component"') + self.do_split_error('"component"') + self.do_split_error("=component") + self.do_split_error("comp=onent") + self.do_split_error("component=") + self.do_split_error('key="val"') + self.do_split_error("component key=") + self.do_split_error('component key="val') + self.do_split_error('component key=val"') + self.do_split_error('component key="val" x') + self.do_split_error('component x key="val"') + self.do_split_error('component key1="val" x key2="val"') + + def do_from_string(self, string, name, destdir=""): + component = Component.from_string(string) + self.assertEqual(name, component.name) + self.assertEqual(destdir, component.destdir) + + def test_component_from_string(self): + self.do_from_string("component", "component") + self.do_from_string("component-with-hyphen", "component-with-hyphen") + self.do_from_string('component destdir="foo/bar"', "component", "foo/bar") + self.do_from_string('component destdir="bar spc"', "component", "bar spc") + self.assertRaises(ErrorMessage, Component.from_string, "") + self.assertRaises(ErrorMessage, Component.from_string, "component novalue=") + self.assertRaises( + ErrorMessage, Component.from_string, "component badoption=badvalue" + ) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_packager_formats.py b/python/mozbuild/mozpack/test/test_packager_formats.py new file mode 100644 index 0000000000..b09971a102 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_packager_formats.py @@ -0,0 +1,537 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import unittest +from itertools import chain + +import mozunit +import six + +import mozpack.path as mozpath +from mozpack.chrome.manifest import ( + ManifestBinaryComponent, + ManifestComponent, + ManifestContent, + ManifestLocale, + ManifestResource, + ManifestSkin, +) +from mozpack.copier import FileRegistry +from mozpack.errors import ErrorMessage +from mozpack.files import GeneratedFile, ManifestFile +from mozpack.packager.formats import FlatFormatter, JarFormatter, OmniJarFormatter +from mozpack.test.test_files import bar_xpt, foo2_xpt, foo_xpt +from test_errors import TestErrors + +CONTENTS = { + "bases": { + # base_path: is_addon? + "": False, + "app": False, + "addon0": "unpacked", + "addon1": True, + "app/chrome/addons/addon2": True, + }, + "manifests": [ + ManifestContent("chrome/f", "oo", "oo/"), + ManifestContent("chrome/f", "bar", "oo/bar/"), + ManifestResource("chrome/f", "foo", "resource://bar/"), + ManifestBinaryComponent("components", "foo.so"), + ManifestContent("app/chrome", "content", "foo/"), + ManifestComponent("app/components", "{foo-id}", "foo.js"), + ManifestContent("addon0/chrome", "addon0", "foo/bar/"), + ManifestContent("addon1/chrome", "addon1", "foo/bar/"), + ManifestContent("app/chrome/addons/addon2/chrome", "addon2", "foo/bar/"), + ], + "files": { + "chrome/f/oo/bar/baz": GeneratedFile(b"foobarbaz"), + "chrome/f/oo/baz": GeneratedFile(b"foobaz"), + "chrome/f/oo/qux": GeneratedFile(b"fooqux"), + "components/foo.so": GeneratedFile(b"foo.so"), + "components/foo.xpt": foo_xpt, + "components/bar.xpt": bar_xpt, + "foo": GeneratedFile(b"foo"), + "app/chrome/foo/foo": GeneratedFile(b"appfoo"), + "app/components/foo.js": GeneratedFile(b"foo.js"), + "addon0/chrome/foo/bar/baz": GeneratedFile(b"foobarbaz"), + "addon0/components/foo.xpt": foo2_xpt, + "addon0/components/bar.xpt": bar_xpt, + "addon1/chrome/foo/bar/baz": GeneratedFile(b"foobarbaz"), + "addon1/components/foo.xpt": foo2_xpt, + "addon1/components/bar.xpt": bar_xpt, + "app/chrome/addons/addon2/chrome/foo/bar/baz": GeneratedFile(b"foobarbaz"), + "app/chrome/addons/addon2/components/foo.xpt": foo2_xpt, + "app/chrome/addons/addon2/components/bar.xpt": bar_xpt, + }, +} + +FILES = CONTENTS["files"] + +RESULT_FLAT = { + "chrome.manifest": [ + "manifest chrome/chrome.manifest", + "manifest components/components.manifest", + ], + "chrome/chrome.manifest": [ + "manifest f/f.manifest", + ], + "chrome/f/f.manifest": [ + "content oo oo/", + "content bar oo/bar/", + "resource foo resource://bar/", + ], + "chrome/f/oo/bar/baz": FILES["chrome/f/oo/bar/baz"], + "chrome/f/oo/baz": FILES["chrome/f/oo/baz"], + "chrome/f/oo/qux": FILES["chrome/f/oo/qux"], + "components/components.manifest": [ + "binary-component foo.so", + "interfaces bar.xpt", + "interfaces foo.xpt", + ], + "components/foo.so": FILES["components/foo.so"], + "components/foo.xpt": foo_xpt, + "components/bar.xpt": bar_xpt, + "foo": FILES["foo"], + "app/chrome.manifest": [ + "manifest chrome/chrome.manifest", + "manifest components/components.manifest", + ], + "app/chrome/chrome.manifest": [ + "content content foo/", + ], + "app/chrome/foo/foo": FILES["app/chrome/foo/foo"], + "app/components/components.manifest": [ + "component {foo-id} foo.js", + ], + "app/components/foo.js": FILES["app/components/foo.js"], +} + +for addon in ("addon0", "addon1", "app/chrome/addons/addon2"): + RESULT_FLAT.update( + { + mozpath.join(addon, p): f + for p, f in six.iteritems( + { + "chrome.manifest": [ + "manifest chrome/chrome.manifest", + "manifest components/components.manifest", + ], + "chrome/chrome.manifest": [ + "content %s foo/bar/" % mozpath.basename(addon), + ], + "chrome/foo/bar/baz": FILES[ + mozpath.join(addon, "chrome/foo/bar/baz") + ], + "components/components.manifest": [ + "interfaces bar.xpt", + "interfaces foo.xpt", + ], + "components/bar.xpt": bar_xpt, + "components/foo.xpt": foo2_xpt, + } + ) + } + ) + +RESULT_JAR = { + p: RESULT_FLAT[p] + for p in ( + "chrome.manifest", + "chrome/chrome.manifest", + "components/components.manifest", + "components/foo.so", + "components/foo.xpt", + "components/bar.xpt", + "foo", + "app/chrome.manifest", + "app/components/components.manifest", + "app/components/foo.js", + "addon0/chrome.manifest", + "addon0/components/components.manifest", + "addon0/components/foo.xpt", + "addon0/components/bar.xpt", + ) +} + +RESULT_JAR.update( + { + "chrome/f/f.manifest": [ + "content oo jar:oo.jar!/", + "content bar jar:oo.jar!/bar/", + "resource foo resource://bar/", + ], + "chrome/f/oo.jar": { + "bar/baz": FILES["chrome/f/oo/bar/baz"], + "baz": FILES["chrome/f/oo/baz"], + "qux": FILES["chrome/f/oo/qux"], + }, + "app/chrome/chrome.manifest": [ + "content content jar:foo.jar!/", + ], + "app/chrome/foo.jar": { + "foo": FILES["app/chrome/foo/foo"], + }, + "addon0/chrome/chrome.manifest": [ + "content addon0 jar:foo.jar!/bar/", + ], + "addon0/chrome/foo.jar": { + "bar/baz": FILES["addon0/chrome/foo/bar/baz"], + }, + "addon1.xpi": { + mozpath.relpath(p, "addon1"): f + for p, f in six.iteritems(RESULT_FLAT) + if p.startswith("addon1/") + }, + "app/chrome/addons/addon2.xpi": { + mozpath.relpath(p, "app/chrome/addons/addon2"): f + for p, f in six.iteritems(RESULT_FLAT) + if p.startswith("app/chrome/addons/addon2/") + }, + } +) + +RESULT_OMNIJAR = { + p: RESULT_FLAT[p] + for p in ( + "components/foo.so", + "foo", + ) +} + +RESULT_OMNIJAR.update({p: RESULT_JAR[p] for p in RESULT_JAR if p.startswith("addon")}) + +RESULT_OMNIJAR.update( + { + "omni.foo": { + "components/components.manifest": [ + "interfaces bar.xpt", + "interfaces foo.xpt", + ], + }, + "chrome.manifest": [ + "manifest components/components.manifest", + ], + "components/components.manifest": [ + "binary-component foo.so", + ], + "app/omni.foo": { + p: RESULT_FLAT["app/" + p] + for p in chain( + ( + "chrome.manifest", + "chrome/chrome.manifest", + "chrome/foo/foo", + "components/components.manifest", + "components/foo.js", + ), + ( + mozpath.relpath(p, "app") + for p in six.iterkeys(RESULT_FLAT) + if p.startswith("app/chrome/addons/addon2/") + ), + ) + }, + } +) + +RESULT_OMNIJAR["omni.foo"].update( + { + p: RESULT_FLAT[p] + for p in ( + "chrome.manifest", + "chrome/chrome.manifest", + "chrome/f/f.manifest", + "chrome/f/oo/bar/baz", + "chrome/f/oo/baz", + "chrome/f/oo/qux", + "components/foo.xpt", + "components/bar.xpt", + ) + } +) + +RESULT_OMNIJAR_WITH_SUBPATH = { + k.replace("omni.foo", "bar/omni.foo"): v for k, v in RESULT_OMNIJAR.items() +} + +CONTENTS_WITH_BASE = { + "bases": { + mozpath.join("base/root", b) if b else "base/root": a + for b, a in six.iteritems(CONTENTS["bases"]) + }, + "manifests": [ + m.move(mozpath.join("base/root", m.base)) for m in CONTENTS["manifests"] + ], + "files": { + mozpath.join("base/root", p): f for p, f in six.iteritems(CONTENTS["files"]) + }, +} + +EXTRA_CONTENTS = { + "extra/file": GeneratedFile(b"extra file"), +} + +CONTENTS_WITH_BASE["files"].update(EXTRA_CONTENTS) + + +def result_with_base(results): + result = {mozpath.join("base/root", p): v for p, v in six.iteritems(results)} + result.update(EXTRA_CONTENTS) + return result + + +RESULT_FLAT_WITH_BASE = result_with_base(RESULT_FLAT) +RESULT_JAR_WITH_BASE = result_with_base(RESULT_JAR) +RESULT_OMNIJAR_WITH_BASE = result_with_base(RESULT_OMNIJAR) + + +def fill_formatter(formatter, contents): + for base, is_addon in sorted(contents["bases"].items()): + formatter.add_base(base, is_addon) + + for manifest in contents["manifests"]: + formatter.add_manifest(manifest) + + for k, v in sorted(six.iteritems(contents["files"])): + if k.endswith(".xpt"): + formatter.add_interfaces(k, v) + else: + formatter.add(k, v) + + +def get_contents(registry, read_all=False, mode="rt"): + result = {} + for k, v in registry: + if isinstance(v, FileRegistry): + result[k] = get_contents(v) + elif isinstance(v, ManifestFile) or read_all: + if "b" in mode: + result[k] = v.open().read() + else: + result[k] = six.ensure_text(v.open().read()).splitlines() + else: + result[k] = v + return result + + +class TestFormatters(TestErrors, unittest.TestCase): + maxDiff = None + + def test_bases(self): + formatter = FlatFormatter(FileRegistry()) + formatter.add_base("") + formatter.add_base("addon0", addon=True) + formatter.add_base("browser") + self.assertEqual(formatter._get_base("platform.ini"), ("", "platform.ini")) + self.assertEqual( + formatter._get_base("browser/application.ini"), + ("browser", "application.ini"), + ) + self.assertEqual( + formatter._get_base("addon0/install.rdf"), ("addon0", "install.rdf") + ) + + def do_test_contents(self, formatter, contents): + for f in contents["files"]: + # .xpt files are merged, so skip them. + if not f.endswith(".xpt"): + self.assertTrue(formatter.contains(f)) + + def test_flat_formatter(self): + registry = FileRegistry() + formatter = FlatFormatter(registry) + + fill_formatter(formatter, CONTENTS) + self.assertEqual(get_contents(registry), RESULT_FLAT) + self.do_test_contents(formatter, CONTENTS) + + def test_jar_formatter(self): + registry = FileRegistry() + formatter = JarFormatter(registry) + + fill_formatter(formatter, CONTENTS) + self.assertEqual(get_contents(registry), RESULT_JAR) + self.do_test_contents(formatter, CONTENTS) + + def test_omnijar_formatter(self): + registry = FileRegistry() + formatter = OmniJarFormatter(registry, "omni.foo") + + fill_formatter(formatter, CONTENTS) + self.assertEqual(get_contents(registry), RESULT_OMNIJAR) + self.do_test_contents(formatter, CONTENTS) + + def test_flat_formatter_with_base(self): + registry = FileRegistry() + formatter = FlatFormatter(registry) + + fill_formatter(formatter, CONTENTS_WITH_BASE) + self.assertEqual(get_contents(registry), RESULT_FLAT_WITH_BASE) + self.do_test_contents(formatter, CONTENTS_WITH_BASE) + + def test_jar_formatter_with_base(self): + registry = FileRegistry() + formatter = JarFormatter(registry) + + fill_formatter(formatter, CONTENTS_WITH_BASE) + self.assertEqual(get_contents(registry), RESULT_JAR_WITH_BASE) + self.do_test_contents(formatter, CONTENTS_WITH_BASE) + + def test_omnijar_formatter_with_base(self): + registry = FileRegistry() + formatter = OmniJarFormatter(registry, "omni.foo") + + fill_formatter(formatter, CONTENTS_WITH_BASE) + self.assertEqual(get_contents(registry), RESULT_OMNIJAR_WITH_BASE) + self.do_test_contents(formatter, CONTENTS_WITH_BASE) + + def test_omnijar_formatter_with_subpath(self): + registry = FileRegistry() + formatter = OmniJarFormatter(registry, "bar/omni.foo") + + fill_formatter(formatter, CONTENTS) + self.assertEqual(get_contents(registry), RESULT_OMNIJAR_WITH_SUBPATH) + self.do_test_contents(formatter, CONTENTS) + + def test_omnijar_is_resource(self): + def is_resource(base, path): + registry = FileRegistry() + f = OmniJarFormatter( + registry, + "omni.foo", + non_resources=[ + "defaults/messenger/mailViews.dat", + "defaults/foo/*", + "*/dummy", + ], + ) + f.add_base("") + f.add_base("app") + f.add(mozpath.join(base, path), GeneratedFile(b"")) + if f.copier.contains(mozpath.join(base, path)): + return False + self.assertTrue(f.copier.contains(mozpath.join(base, "omni.foo"))) + self.assertTrue(f.copier[mozpath.join(base, "omni.foo")].contains(path)) + return True + + for base in ["", "app/"]: + self.assertTrue(is_resource(base, "chrome")) + self.assertTrue(is_resource(base, "chrome/foo/bar/baz.properties")) + self.assertFalse(is_resource(base, "chrome/icons/foo.png")) + self.assertTrue(is_resource(base, "components/foo.js")) + self.assertFalse(is_resource(base, "components/foo.so")) + self.assertTrue(is_resource(base, "res/foo.css")) + self.assertFalse(is_resource(base, "res/cursors/foo.png")) + self.assertFalse(is_resource(base, "res/MainMenu.nib/foo")) + self.assertTrue(is_resource(base, "defaults/pref/foo.js")) + self.assertFalse(is_resource(base, "defaults/pref/channel-prefs.js")) + self.assertTrue(is_resource(base, "defaults/preferences/foo.js")) + self.assertFalse(is_resource(base, "defaults/preferences/channel-prefs.js")) + self.assertTrue(is_resource(base, "modules/foo.jsm")) + self.assertTrue(is_resource(base, "greprefs.js")) + self.assertTrue(is_resource(base, "hyphenation/foo")) + self.assertTrue(is_resource(base, "update.locale")) + self.assertFalse(is_resource(base, "foo")) + self.assertFalse(is_resource(base, "foo/bar/greprefs.js")) + self.assertTrue(is_resource(base, "defaults/messenger/foo.dat")) + self.assertFalse(is_resource(base, "defaults/messenger/mailViews.dat")) + self.assertTrue(is_resource(base, "defaults/pref/foo.js")) + self.assertFalse(is_resource(base, "defaults/foo/bar.dat")) + self.assertFalse(is_resource(base, "defaults/foo/bar/baz.dat")) + self.assertTrue(is_resource(base, "chrome/foo/bar/baz/dummy_")) + self.assertFalse(is_resource(base, "chrome/foo/bar/baz/dummy")) + self.assertTrue(is_resource(base, "chrome/foo/bar/dummy_")) + self.assertFalse(is_resource(base, "chrome/foo/bar/dummy")) + + def test_chrome_override(self): + registry = FileRegistry() + f = FlatFormatter(registry) + f.add_base("") + f.add_manifest(ManifestContent("chrome", "foo", "foo/unix")) + # A more specific entry for a given chrome name can override a more + # generic one. + f.add_manifest(ManifestContent("chrome", "foo", "foo/win", "os=WINNT")) + f.add_manifest(ManifestContent("chrome", "foo", "foo/osx", "os=Darwin")) + + # Chrome with the same name overrides the previous registration. + with self.assertRaises(ErrorMessage) as e: + f.add_manifest(ManifestContent("chrome", "foo", "foo/")) + + self.assertEqual( + str(e.exception), + 'error: "content foo foo/" overrides ' '"content foo foo/unix"', + ) + + # Chrome with the same name and same flags overrides the previous + # registration. + with self.assertRaises(ErrorMessage) as e: + f.add_manifest(ManifestContent("chrome", "foo", "foo/", "os=WINNT")) + + self.assertEqual( + str(e.exception), + 'error: "content foo foo/ os=WINNT" overrides ' + '"content foo foo/win os=WINNT"', + ) + + # We may start with the more specific entry first + f.add_manifest(ManifestContent("chrome", "bar", "bar/win", "os=WINNT")) + # Then adding a more generic one overrides it. + with self.assertRaises(ErrorMessage) as e: + f.add_manifest(ManifestContent("chrome", "bar", "bar/unix")) + + self.assertEqual( + str(e.exception), + 'error: "content bar bar/unix" overrides ' '"content bar bar/win os=WINNT"', + ) + + # Adding something more specific still works. + f.add_manifest( + ManifestContent("chrome", "bar", "bar/win", "os=WINNT osversion>=7.0") + ) + + # Variations of skin/locales are allowed. + f.add_manifest( + ManifestSkin("chrome", "foo", "classic/1.0", "foo/skin/classic/") + ) + f.add_manifest(ManifestSkin("chrome", "foo", "modern/1.0", "foo/skin/modern/")) + + f.add_manifest(ManifestLocale("chrome", "foo", "en-US", "foo/locale/en-US/")) + f.add_manifest(ManifestLocale("chrome", "foo", "ja-JP", "foo/locale/ja-JP/")) + + # But same-skin/locale still error out. + with self.assertRaises(ErrorMessage) as e: + f.add_manifest( + ManifestSkin("chrome", "foo", "classic/1.0", "foo/skin/classic/foo") + ) + + self.assertEqual( + str(e.exception), + 'error: "skin foo classic/1.0 foo/skin/classic/foo" overrides ' + '"skin foo classic/1.0 foo/skin/classic/"', + ) + + with self.assertRaises(ErrorMessage) as e: + f.add_manifest( + ManifestLocale("chrome", "foo", "en-US", "foo/locale/en-US/foo") + ) + + self.assertEqual( + str(e.exception), + 'error: "locale foo en-US foo/locale/en-US/foo" overrides ' + '"locale foo en-US foo/locale/en-US/"', + ) + + # Duplicating existing manifest entries is not an error. + f.add_manifest(ManifestContent("chrome", "foo", "foo/unix")) + + self.assertEqual( + self.get_output(), + [ + 'warning: "content foo foo/unix" is duplicated. Skipping.', + ], + ) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_packager_l10n.py b/python/mozbuild/mozpack/test/test_packager_l10n.py new file mode 100644 index 0000000000..0714ae3252 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_packager_l10n.py @@ -0,0 +1,153 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import unittest + +import mozunit +import six + +from mozpack.chrome.manifest import Manifest, ManifestContent, ManifestLocale +from mozpack.copier import FileRegistry +from mozpack.files import GeneratedFile, ManifestFile +from mozpack.packager import l10n +from test_packager import MockFinder + + +class TestL10NRepack(unittest.TestCase): + def test_l10n_repack(self): + foo = GeneratedFile(b"foo") + foobar = GeneratedFile(b"foobar") + qux = GeneratedFile(b"qux") + bar = GeneratedFile(b"bar") + baz = GeneratedFile(b"baz") + dict_aa = GeneratedFile(b"dict_aa") + dict_bb = GeneratedFile(b"dict_bb") + dict_cc = GeneratedFile(b"dict_cc") + barbaz = GeneratedFile(b"barbaz") + lst = GeneratedFile(b"foo\nbar") + app_finder = MockFinder( + { + "bar/foo": foo, + "chrome/foo/foobar": foobar, + "chrome/qux/qux.properties": qux, + "chrome/qux/baz/baz.properties": baz, + "chrome/chrome.manifest": ManifestFile( + "chrome", + [ + ManifestContent("chrome", "foo", "foo/"), + ManifestLocale("chrome", "qux", "en-US", "qux/"), + ], + ), + "chrome.manifest": ManifestFile( + "", [Manifest("", "chrome/chrome.manifest")] + ), + "dict/aa": dict_aa, + "app/chrome/bar/barbaz.dtd": barbaz, + "app/chrome/chrome.manifest": ManifestFile( + "app/chrome", [ManifestLocale("app/chrome", "bar", "en-US", "bar/")] + ), + "app/chrome.manifest": ManifestFile( + "app", [Manifest("app", "chrome/chrome.manifest")] + ), + "app/dict/bb": dict_bb, + "app/dict/cc": dict_cc, + "app/chrome/bar/search/foo.xml": foo, + "app/chrome/bar/search/bar.xml": bar, + "app/chrome/bar/search/lst.txt": lst, + "META-INF/foo": foo, # Stripped. + "inner/META-INF/foo": foo, # Not stripped. + "app/META-INF/foo": foo, # Stripped. + "app/inner/META-INF/foo": foo, # Not stripped. + } + ) + app_finder.jarlogs = {} + app_finder.base = "app" + foo_l10n = GeneratedFile(b"foo_l10n") + qux_l10n = GeneratedFile(b"qux_l10n") + baz_l10n = GeneratedFile(b"baz_l10n") + barbaz_l10n = GeneratedFile(b"barbaz_l10n") + lst_l10n = GeneratedFile(b"foo\nqux") + l10n_finder = MockFinder( + { + "chrome/qux-l10n/qux.properties": qux_l10n, + "chrome/qux-l10n/baz/baz.properties": baz_l10n, + "chrome/chrome.manifest": ManifestFile( + "chrome", + [ + ManifestLocale("chrome", "qux", "x-test", "qux-l10n/"), + ], + ), + "chrome.manifest": ManifestFile( + "", [Manifest("", "chrome/chrome.manifest")] + ), + "dict/bb": dict_bb, + "dict/cc": dict_cc, + "app/chrome/bar-l10n/barbaz.dtd": barbaz_l10n, + "app/chrome/chrome.manifest": ManifestFile( + "app/chrome", + [ManifestLocale("app/chrome", "bar", "x-test", "bar-l10n/")], + ), + "app/chrome.manifest": ManifestFile( + "app", [Manifest("app", "chrome/chrome.manifest")] + ), + "app/dict/aa": dict_aa, + "app/chrome/bar-l10n/search/foo.xml": foo_l10n, + "app/chrome/bar-l10n/search/qux.xml": qux_l10n, + "app/chrome/bar-l10n/search/lst.txt": lst_l10n, + } + ) + l10n_finder.base = "l10n" + copier = FileRegistry() + formatter = l10n.FlatFormatter(copier) + + l10n._repack( + app_finder, + l10n_finder, + copier, + formatter, + ["dict", "chrome/**/search/*.xml"], + ) + self.maxDiff = None + + repacked = { + "bar/foo": foo, + "chrome/foo/foobar": foobar, + "chrome/qux-l10n/qux.properties": qux_l10n, + "chrome/qux-l10n/baz/baz.properties": baz_l10n, + "chrome/chrome.manifest": ManifestFile( + "chrome", + [ + ManifestContent("chrome", "foo", "foo/"), + ManifestLocale("chrome", "qux", "x-test", "qux-l10n/"), + ], + ), + "chrome.manifest": ManifestFile( + "", [Manifest("", "chrome/chrome.manifest")] + ), + "dict/bb": dict_bb, + "dict/cc": dict_cc, + "app/chrome/bar-l10n/barbaz.dtd": barbaz_l10n, + "app/chrome/chrome.manifest": ManifestFile( + "app/chrome", + [ManifestLocale("app/chrome", "bar", "x-test", "bar-l10n/")], + ), + "app/chrome.manifest": ManifestFile( + "app", [Manifest("app", "chrome/chrome.manifest")] + ), + "app/dict/aa": dict_aa, + "app/chrome/bar-l10n/search/foo.xml": foo_l10n, + "app/chrome/bar-l10n/search/qux.xml": qux_l10n, + "app/chrome/bar-l10n/search/lst.txt": lst_l10n, + "inner/META-INF/foo": foo, + "app/inner/META-INF/foo": foo, + } + + self.assertEqual( + dict((p, f.open().read()) for p, f in copier), + dict((p, f.open().read()) for p, f in six.iteritems(repacked)), + ) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_packager_unpack.py b/python/mozbuild/mozpack/test/test_packager_unpack.py new file mode 100644 index 0000000000..57a2d71eda --- /dev/null +++ b/python/mozbuild/mozpack/test/test_packager_unpack.py @@ -0,0 +1,67 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import mozunit + +from mozpack.copier import FileCopier, FileRegistry +from mozpack.packager.formats import FlatFormatter, JarFormatter, OmniJarFormatter +from mozpack.packager.unpack import unpack_to_registry +from mozpack.test.test_files import TestWithTmpDir +from mozpack.test.test_packager_formats import CONTENTS, fill_formatter, get_contents + + +class TestUnpack(TestWithTmpDir): + maxDiff = None + + @staticmethod + def _get_copier(cls): + copier = FileCopier() + formatter = cls(copier) + fill_formatter(formatter, CONTENTS) + return copier + + @classmethod + def setUpClass(cls): + cls.contents = get_contents( + cls._get_copier(FlatFormatter), read_all=True, mode="rb" + ) + + def _unpack_test(self, cls): + # Format a package with the given formatter class + copier = self._get_copier(cls) + copier.copy(self.tmpdir) + + # Unpack that package. Its content is expected to match that of a Flat + # formatted package. + registry = FileRegistry() + unpack_to_registry(self.tmpdir, registry, getattr(cls, "OMNIJAR_NAME", None)) + self.assertEqual( + get_contents(registry, read_all=True, mode="rb"), self.contents + ) + + def test_flat_unpack(self): + self._unpack_test(FlatFormatter) + + def test_jar_unpack(self): + self._unpack_test(JarFormatter) + + @staticmethod + def _omni_foo_formatter(name): + class OmniFooFormatter(OmniJarFormatter): + OMNIJAR_NAME = name + + def __init__(self, registry): + super(OmniFooFormatter, self).__init__(registry, name) + + return OmniFooFormatter + + def test_omnijar_unpack(self): + self._unpack_test(self._omni_foo_formatter("omni.foo")) + + def test_omnijar_subpath_unpack(self): + self._unpack_test(self._omni_foo_formatter("bar/omni.foo")) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_path.py b/python/mozbuild/mozpack/test/test_path.py new file mode 100644 index 0000000000..6c7aeb5400 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_path.py @@ -0,0 +1,152 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import unittest + +import mozunit + +from mozpack.path import ( + basedir, + basename, + commonprefix, + dirname, + join, + match, + normpath, + rebase, + relpath, + split, + splitext, +) + + +class TestPath(unittest.TestCase): + SEP = os.sep + + def test_relpath(self): + self.assertEqual(relpath("foo", "foo"), "") + self.assertEqual(relpath(self.SEP.join(("foo", "bar")), "foo/bar"), "") + self.assertEqual(relpath(self.SEP.join(("foo", "bar")), "foo"), "bar") + self.assertEqual( + relpath(self.SEP.join(("foo", "bar", "baz")), "foo"), "bar/baz" + ) + self.assertEqual(relpath(self.SEP.join(("foo", "bar")), "foo/bar/baz"), "..") + self.assertEqual(relpath(self.SEP.join(("foo", "bar")), "foo/baz"), "../bar") + self.assertEqual(relpath("foo/", "foo"), "") + self.assertEqual(relpath("foo/bar/", "foo"), "bar") + + def test_join(self): + self.assertEqual(join("foo", "bar", "baz"), "foo/bar/baz") + self.assertEqual(join("foo", "", "bar"), "foo/bar") + self.assertEqual(join("", "foo", "bar"), "foo/bar") + self.assertEqual(join("", "foo", "/bar"), "/bar") + + def test_normpath(self): + self.assertEqual( + normpath(self.SEP.join(("foo", "bar", "baz", "..", "qux"))), "foo/bar/qux" + ) + + def test_dirname(self): + self.assertEqual(dirname("foo/bar/baz"), "foo/bar") + self.assertEqual(dirname("foo/bar"), "foo") + self.assertEqual(dirname("foo"), "") + self.assertEqual(dirname("foo/bar/"), "foo/bar") + + def test_commonprefix(self): + self.assertEqual( + commonprefix( + [self.SEP.join(("foo", "bar", "baz")), "foo/qux", "foo/baz/qux"] + ), + "foo/", + ) + self.assertEqual( + commonprefix([self.SEP.join(("foo", "bar", "baz")), "foo/qux", "baz/qux"]), + "", + ) + + def test_basename(self): + self.assertEqual(basename("foo/bar/baz"), "baz") + self.assertEqual(basename("foo/bar"), "bar") + self.assertEqual(basename("foo"), "foo") + self.assertEqual(basename("foo/bar/"), "") + + def test_split(self): + self.assertEqual( + split(self.SEP.join(("foo", "bar", "baz"))), ["foo", "bar", "baz"] + ) + + def test_splitext(self): + self.assertEqual( + splitext(self.SEP.join(("foo", "bar", "baz.qux"))), ("foo/bar/baz", ".qux") + ) + + def test_basedir(self): + foobarbaz = self.SEP.join(("foo", "bar", "baz")) + self.assertEqual(basedir(foobarbaz, ["foo", "bar", "baz"]), "foo") + self.assertEqual(basedir(foobarbaz, ["foo", "foo/bar", "baz"]), "foo/bar") + self.assertEqual(basedir(foobarbaz, ["foo/bar", "foo", "baz"]), "foo/bar") + self.assertEqual(basedir(foobarbaz, ["foo", "bar", ""]), "foo") + self.assertEqual(basedir(foobarbaz, ["bar", "baz", ""]), "") + + def test_match(self): + self.assertTrue(match("foo", "")) + self.assertTrue(match("foo/bar/baz.qux", "foo/bar")) + self.assertTrue(match("foo/bar/baz.qux", "foo")) + self.assertTrue(match("foo", "*")) + self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*")) + self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*")) + self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*")) + self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*")) + self.assertTrue(match("foo/bar/baz.qux", "foo/*/baz.qux")) + self.assertTrue(match("foo/bar/baz.qux", "*/bar/baz.qux")) + self.assertTrue(match("foo/bar/baz.qux", "*/*/baz.qux")) + self.assertTrue(match("foo/bar/baz.qux", "*/*/*")) + self.assertTrue(match("foo/bar/baz.qux", "foo/*/*")) + self.assertTrue(match("foo/bar/baz.qux", "foo/*/*.qux")) + self.assertTrue(match("foo/bar/baz.qux", "foo/b*/*z.qux")) + self.assertTrue(match("foo/bar/baz.qux", "foo/b*r/ba*z.qux")) + self.assertFalse(match("foo/bar/baz.qux", "foo/b*z/ba*r.qux")) + self.assertTrue(match("foo/bar/baz.qux", "**")) + self.assertTrue(match("foo/bar/baz.qux", "**/baz.qux")) + self.assertTrue(match("foo/bar/baz.qux", "**/bar/baz.qux")) + self.assertTrue(match("foo/bar/baz.qux", "foo/**/baz.qux")) + self.assertTrue(match("foo/bar/baz.qux", "foo/**/*.qux")) + self.assertTrue(match("foo/bar/baz.qux", "**/foo/bar/baz.qux")) + self.assertTrue(match("foo/bar/baz.qux", "foo/**/bar/baz.qux")) + self.assertTrue(match("foo/bar/baz.qux", "foo/**/bar/*.qux")) + self.assertTrue(match("foo/bar/baz.qux", "foo/**/*.qux")) + self.assertTrue(match("foo/bar/baz.qux", "**/*.qux")) + self.assertFalse(match("foo/bar/baz.qux", "**.qux")) + self.assertFalse(match("foo/bar", "foo/*/bar")) + self.assertTrue(match("foo/bar/baz.qux", "foo/**/bar/**")) + self.assertFalse(match("foo/nobar/baz.qux", "foo/**/bar/**")) + self.assertTrue(match("foo/bar", "foo/**/bar/**")) + + def test_rebase(self): + self.assertEqual(rebase("foo", "foo/bar", "bar/baz"), "baz") + self.assertEqual(rebase("foo", "foo", "bar/baz"), "bar/baz") + self.assertEqual(rebase("foo/bar", "foo", "baz"), "bar/baz") + + +if os.altsep: + + class TestAltPath(TestPath): + SEP = os.altsep + + class TestReverseAltPath(TestPath): + def setUp(self): + sep = os.sep + os.sep = os.altsep + os.altsep = sep + + def tearDown(self): + self.setUp() + + class TestAltReverseAltPath(TestReverseAltPath): + SEP = os.altsep + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_pkg.py b/python/mozbuild/mozpack/test/test_pkg.py new file mode 100644 index 0000000000..f1febbbae0 --- /dev/null +++ b/python/mozbuild/mozpack/test/test_pkg.py @@ -0,0 +1,138 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from pathlib import Path +from string import Template +from unittest.mock import patch + +import mozunit + +import mozpack.pkg +from mozpack.pkg import ( + create_bom, + create_payload, + create_pkg, + get_app_info_plist, + get_apple_template, + get_relative_glob_list, + save_text_file, + xar_package_folder, +) +from mozpack.test.test_files import TestWithTmpDir + + +class TestPkg(TestWithTmpDir): + maxDiff = None + + class MockSubprocessRun: + stderr = "" + stdout = "" + returncode = 0 + + def __init__(self, returncode=0): + self.returncode = returncode + + def _mk_test_file(self, name, mode=0o777): + tool = Path(self.tmpdir) / f"{name}" + tool.touch() + tool.chmod(mode) + return tool + + def test_get_apple_template(self): + tmpl = get_apple_template("Distribution.template") + assert type(tmpl) == Template + + def test_get_apple_template_not_file(self): + with self.assertRaises(Exception): + get_apple_template("tmpl-should-not-exist") + + def test_save_text_file(self): + content = "Hello" + destination = Path(self.tmpdir) / "test_save_text_file" + save_text_file(content, destination) + with destination.open("r") as file: + assert content == file.read() + + def test_get_app_info_plist(self): + app_path = Path(self.tmpdir) / "app" + (app_path / "Contents").mkdir(parents=True) + (app_path / "Contents/Info.plist").touch() + data = {"foo": "bar"} + with patch.object(mozpack.pkg.plistlib, "load", lambda x: data): + assert data == get_app_info_plist(app_path) + + def test_get_app_info_plist_not_file(self): + app_path = Path(self.tmpdir) / "app-does-not-exist" + with self.assertRaises(Exception): + get_app_info_plist(app_path) + + def _mock_payload(self, returncode): + def _mock_run(*args, **kwargs): + return self.MockSubprocessRun(returncode) + + return _mock_run + + def test_create_payload(self): + destination = Path(self.tmpdir) / "mockPayload" + with patch.object(mozpack.pkg.subprocess, "run", self._mock_payload(0)): + create_payload(destination, Path(self.tmpdir), "cpio") + + def test_create_bom(self): + bom_path = Path(self.tmpdir) / "Bom" + bom_path.touch() + root_path = Path(self.tmpdir) + tool_path = Path(self.tmpdir) / "not-really-used-during-test" + with patch.object(mozpack.pkg.subprocess, "check_call", lambda *x: None): + create_bom(bom_path, root_path, tool_path) + + def get_relative_glob_list(self): + source = Path(self.tmpdir) + (source / "testfile").touch() + glob = "*" + assert len(get_relative_glob_list(source, glob)) == 1 + + def test_xar_package_folder(self): + source = Path(self.tmpdir) + dest = source / "fakedestination" + dest.touch() + tool = source / "faketool" + with patch.object(mozpack.pkg.subprocess, "check_call", lambda *x, **y: None): + xar_package_folder(source, dest, tool) + + def test_xar_package_folder_not_absolute(self): + source = Path("./some/relative/path") + dest = Path("./some/other/relative/path") + tool = source / "faketool" + with patch.object(mozpack.pkg.subprocess, "check_call", lambda: None): + with self.assertRaises(Exception): + xar_package_folder(source, dest, tool) + + def test_create_pkg(self): + def noop(*x, **y): + pass + + def mock_get_app_info_plist(*args): + return {"CFBundleShortVersionString": "1.0.0"} + + def mock_get_apple_template(*args): + return Template("fake template") + + source = Path(self.tmpdir) / "FakeApp.app" + source.mkdir() + output = Path(self.tmpdir) / "output.pkg" + fake_tool = Path(self.tmpdir) / "faketool" + with patch.multiple( + mozpack.pkg, + get_app_info_plist=mock_get_app_info_plist, + get_apple_template=mock_get_apple_template, + save_text_file=noop, + create_payload=noop, + create_bom=noop, + xar_package_folder=noop, + ): + create_pkg(source, output, fake_tool, fake_tool, fake_tool) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozpack/test/test_unify.py b/python/mozbuild/mozpack/test/test_unify.py new file mode 100644 index 0000000000..15de50dccc --- /dev/null +++ b/python/mozbuild/mozpack/test/test_unify.py @@ -0,0 +1,250 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import sys +from io import StringIO + +import mozunit + +from mozbuild.util import ensureParentDir +from mozpack.errors import AccumulatedErrors, ErrorMessage, errors +from mozpack.files import FileFinder +from mozpack.mozjar import JarWriter +from mozpack.test.test_files import MockDest, TestWithTmpDir +from mozpack.unify import UnifiedBuildFinder, UnifiedFinder + + +class TestUnified(TestWithTmpDir): + def create_one(self, which, path, content): + file = self.tmppath(os.path.join(which, path)) + ensureParentDir(file) + if isinstance(content, str): + content = content.encode("utf-8") + open(file, "wb").write(content) + + def create_both(self, path, content): + for p in ["a", "b"]: + self.create_one(p, path, content) + + +class TestUnifiedFinder(TestUnified): + def test_unified_finder(self): + self.create_both("foo/bar", "foobar") + self.create_both("foo/baz", "foobaz") + self.create_one("a", "bar", "bar") + self.create_one("b", "baz", "baz") + self.create_one("a", "qux", "foobar") + self.create_one("b", "qux", "baz") + self.create_one("a", "test/foo", "a\nb\nc\n") + self.create_one("b", "test/foo", "b\nc\na\n") + self.create_both("test/bar", "a\nb\nc\n") + + finder = UnifiedFinder( + FileFinder(self.tmppath("a")), + FileFinder(self.tmppath("b")), + sorted=["test"], + ) + self.assertEqual( + sorted( + [(f, c.open().read().decode("utf-8")) for f, c in finder.find("foo")] + ), + [("foo/bar", "foobar"), ("foo/baz", "foobaz")], + ) + self.assertRaises(ErrorMessage, any, finder.find("bar")) + self.assertRaises(ErrorMessage, any, finder.find("baz")) + self.assertRaises(ErrorMessage, any, finder.find("qux")) + self.assertEqual( + sorted( + [(f, c.open().read().decode("utf-8")) for f, c in finder.find("test")] + ), + [("test/bar", "a\nb\nc\n"), ("test/foo", "a\nb\nc\n")], + ) + + +class TestUnifiedBuildFinder(TestUnified): + def test_unified_build_finder(self): + finder = UnifiedBuildFinder( + FileFinder(self.tmppath("a")), FileFinder(self.tmppath("b")) + ) + + # Test chrome.manifest unification + self.create_both("chrome.manifest", "a\nb\nc\n") + self.create_one("a", "chrome/chrome.manifest", "a\nb\nc\n") + self.create_one("b", "chrome/chrome.manifest", "b\nc\na\n") + self.assertEqual( + sorted( + [ + (f, c.open().read().decode("utf-8")) + for f, c in finder.find("**/chrome.manifest") + ] + ), + [("chrome.manifest", "a\nb\nc\n"), ("chrome/chrome.manifest", "a\nb\nc\n")], + ) + + # Test buildconfig.html unification + self.create_one( + "a", + "chrome/browser/foo/buildconfig.html", + "\n".join( + [ + "", + " ", + "
", + "

Build Configuration

", + "
foo
", + "
", + " ", + "", + ] + ), + ) + self.create_one( + "b", + "chrome/browser/foo/buildconfig.html", + "\n".join( + [ + "", + " ", + "
", + "

Build Configuration

", + "
bar
", + "
", + " ", + "", + ] + ), + ) + self.assertEqual( + sorted( + [ + (f, c.open().read().decode("utf-8")) + for f, c in finder.find("**/buildconfig.html") + ] + ), + [ + ( + "chrome/browser/foo/buildconfig.html", + "\n".join( + [ + "", + " ", + "
", + "

Build Configuration

", + "
foo
", + "
", + "
bar
", + "
", + " ", + "", + ] + ), + ) + ], + ) + + # Test xpi file unification + xpi = MockDest() + with JarWriter(fileobj=xpi, compress=True) as jar: + jar.add("foo", "foo") + jar.add("bar", "bar") + foo_xpi = xpi.read() + self.create_both("foo.xpi", foo_xpi) + + with JarWriter(fileobj=xpi, compress=True) as jar: + jar.add("foo", "bar") + self.create_one("a", "bar.xpi", foo_xpi) + self.create_one("b", "bar.xpi", xpi.read()) + + errors.out = StringIO() + with self.assertRaises(AccumulatedErrors), errors.accumulate(): + self.assertEqual( + [(f, c.open().read()) for f, c in finder.find("*.xpi")], + [("foo.xpi", foo_xpi)], + ) + errors.out = sys.stderr + + # Test install.rdf unification + x86_64 = "Darwin_x86_64-gcc3" + x86 = "Darwin_x86-gcc3" + target_tag = "<{em}targetPlatform>{platform}" + target_attr = '{em}targetPlatform="{platform}" ' + + rdf_tag = "".join( + [ + '<{RDF}Description {em}bar="bar" {em}qux="qux">', + "<{em}foo>foo", + "{targets}", + "<{em}baz>baz", + "", + ] + ) + rdf_attr = "".join( + [ + '<{RDF}Description {em}bar="bar" {attr}{em}qux="qux">', + "{targets}", + "<{em}foo>foo<{em}baz>baz", + "", + ] + ) + + for descr_ns, target_ns in (("RDF:", ""), ("", "em:"), ("RDF:", "em:")): + # First we need to infuse the above strings with our namespaces and + # platform values. + ns = {"RDF": descr_ns, "em": target_ns} + target_tag_x86_64 = target_tag.format(platform=x86_64, **ns) + target_tag_x86 = target_tag.format(platform=x86, **ns) + target_attr_x86_64 = target_attr.format(platform=x86_64, **ns) + target_attr_x86 = target_attr.format(platform=x86, **ns) + + tag_x86_64 = rdf_tag.format(targets=target_tag_x86_64, **ns) + tag_x86 = rdf_tag.format(targets=target_tag_x86, **ns) + tag_merged = rdf_tag.format( + targets=target_tag_x86_64 + target_tag_x86, **ns + ) + tag_empty = rdf_tag.format(targets="", **ns) + + attr_x86_64 = rdf_attr.format(attr=target_attr_x86_64, targets="", **ns) + attr_x86 = rdf_attr.format(attr=target_attr_x86, targets="", **ns) + attr_merged = rdf_attr.format( + attr="", targets=target_tag_x86_64 + target_tag_x86, **ns + ) + + # This table defines the test cases, columns "a" and "b" being the + # contents of the install.rdf of the respective platform and + # "result" the exepected merged content after unification. + testcases = ( + # _____a_____ _____b_____ ___result___# + (tag_x86_64, tag_x86, tag_merged), + (tag_x86_64, tag_empty, tag_empty), + (tag_empty, tag_x86, tag_empty), + (tag_empty, tag_empty, tag_empty), + (attr_x86_64, attr_x86, attr_merged), + (tag_x86_64, attr_x86, tag_merged), + (attr_x86_64, tag_x86, attr_merged), + (attr_x86_64, tag_empty, tag_empty), + (tag_empty, attr_x86, tag_empty), + ) + + # Now create the files from the above table and compare + results = [] + for emid, (rdf_a, rdf_b, result) in enumerate(testcases): + filename = "ext/id{0}/install.rdf".format(emid) + self.create_one("a", filename, rdf_a) + self.create_one("b", filename, rdf_b) + results.append((filename, result)) + + self.assertEqual( + sorted( + [ + (f, c.open().read().decode("utf-8")) + for f, c in finder.find("**/install.rdf") + ] + ), + results, + ) + + +if __name__ == "__main__": + mozunit.main() diff --git a/python/mozbuild/mozpack/unify.py b/python/mozbuild/mozpack/unify.py new file mode 100644 index 0000000000..ca4d0017a9 --- /dev/null +++ b/python/mozbuild/mozpack/unify.py @@ -0,0 +1,265 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import re +import struct +import subprocess +from collections import OrderedDict +from tempfile import mkstemp + +import buildconfig + +import mozpack.path as mozpath +from mozbuild.util import hexdump +from mozpack.errors import errors +from mozpack.executables import MACHO_SIGNATURES +from mozpack.files import BaseFile, BaseFinder, ExecutableFile, GeneratedFile + +# Regular expressions for unifying install.rdf +FIND_TARGET_PLATFORM = re.compile( + r""" + <(?P[-._0-9A-Za-z]+:)?targetPlatform> # The targetPlatform tag, with any namespace + (?P[^<]*) # The actual platform value + # The closing tag + """, + re.X, +) +FIND_TARGET_PLATFORM_ATTR = re.compile( + r""" + (?P<(?:[-._0-9A-Za-z]+:)?Description) # The opening part of the tag + (?P[^>]*?)\s+ # The initial attributes + (?P[-._0-9A-Za-z]+:)?targetPlatform= # The targetPlatform attribute, with any namespace + [\'"](?P[^\'"]+)[\'"] # The actual platform value + (?P[^>]*?>) # The remaining attributes and closing angle bracket + """, + re.X, +) + + +def may_unify_binary(file): + """ + Return whether the given BaseFile instance is an ExecutableFile that + may be unified. Only non-fat Mach-O binaries are to be unified. + """ + if isinstance(file, ExecutableFile): + signature = file.open().read(4) + if len(signature) < 4: + return False + signature = struct.unpack(">L", signature)[0] + if signature in MACHO_SIGNATURES: + return True + return False + + +class UnifiedExecutableFile(BaseFile): + """ + File class for executable and library files that to be unified with 'lipo'. + """ + + def __init__(self, executable1, executable2): + """ + Initialize a UnifiedExecutableFile with a pair of ExecutableFiles to + be unified. They are expected to be non-fat Mach-O executables. + """ + assert isinstance(executable1, ExecutableFile) + assert isinstance(executable2, ExecutableFile) + self._executables = (executable1, executable2) + + def copy(self, dest, skip_if_older=True): + """ + Create a fat executable from the two Mach-O executable given when + creating the instance. + skip_if_older is ignored. + """ + assert isinstance(dest, str) + tmpfiles = [] + try: + for e in self._executables: + fd, f = mkstemp() + os.close(fd) + tmpfiles.append(f) + e.copy(f, skip_if_older=False) + lipo = buildconfig.substs.get("LIPO") or "lipo" + subprocess.check_call([lipo, "-create"] + tmpfiles + ["-output", dest]) + except Exception as e: + errors.error( + "Failed to unify %s and %s: %s" + % (self._executables[0].path, self._executables[1].path, str(e)) + ) + finally: + for f in tmpfiles: + os.unlink(f) + + +class UnifiedFinder(BaseFinder): + """ + Helper to get unified BaseFile instances from two distinct trees on the + file system. + """ + + def __init__(self, finder1, finder2, sorted=[], **kargs): + """ + Initialize a UnifiedFinder. finder1 and finder2 are BaseFinder + instances from which files are picked. UnifiedFinder.find() will act as + FileFinder.find() but will error out when matches can only be found in + one of the two trees and not the other. It will also error out if + matches can be found on both ends but their contents are not identical. + + The sorted argument gives a list of mozpath.match patterns. File + paths matching one of these patterns will have their contents compared + with their lines sorted. + """ + assert isinstance(finder1, BaseFinder) + assert isinstance(finder2, BaseFinder) + self._finder1 = finder1 + self._finder2 = finder2 + self._sorted = sorted + BaseFinder.__init__(self, finder1.base, **kargs) + + def _find(self, path): + """ + UnifiedFinder.find() implementation. + """ + # There is no `OrderedSet`. Operator `|` was added only in + # Python 3.9, so we merge by hand. + all_paths = OrderedDict() + + files1 = OrderedDict() + for p, f in self._finder1.find(path): + files1[p] = f + all_paths[p] = True + files2 = OrderedDict() + for p, f in self._finder2.find(path): + files2[p] = f + all_paths[p] = True + + for p in all_paths: + err = errors.count + unified = self.unify_file(p, files1.get(p), files2.get(p)) + if unified: + yield p, unified + elif err == errors.count: # No errors have already been reported. + self._report_difference(p, files1.get(p), files2.get(p)) + + def _report_difference(self, path, file1, file2): + """ + Report differences between files in both trees. + """ + if not file1: + errors.error("File missing in %s: %s" % (self._finder1.base, path)) + return + if not file2: + errors.error("File missing in %s: %s" % (self._finder2.base, path)) + return + + errors.error( + "Can't unify %s: file differs between %s and %s" + % (path, self._finder1.base, self._finder2.base) + ) + if not isinstance(file1, ExecutableFile) and not isinstance( + file2, ExecutableFile + ): + from difflib import unified_diff + + try: + lines1 = [l.decode("utf-8") for l in file1.open().readlines()] + lines2 = [l.decode("utf-8") for l in file2.open().readlines()] + except UnicodeDecodeError: + lines1 = hexdump(file1.open().read()) + lines2 = hexdump(file2.open().read()) + + for line in unified_diff( + lines1, + lines2, + os.path.join(self._finder1.base, path), + os.path.join(self._finder2.base, path), + ): + errors.out.write(line) + + def unify_file(self, path, file1, file2): + """ + Given two BaseFiles and the path they were found at, return a + unified version of the files. If the files match, the first BaseFile + may be returned. + If the files don't match or one of them is `None`, the method returns + `None`. + Subclasses may decide to unify by using one of the files in that case. + """ + if not file1 or not file2: + return None + + if may_unify_binary(file1) and may_unify_binary(file2): + return UnifiedExecutableFile(file1, file2) + + content1 = file1.open().readlines() + content2 = file2.open().readlines() + if content1 == content2: + return file1 + for pattern in self._sorted: + if mozpath.match(path, pattern): + if sorted(content1) == sorted(content2): + return file1 + break + return None + + +class UnifiedBuildFinder(UnifiedFinder): + """ + Specialized UnifiedFinder for Mozilla applications packaging. It allows + ``*.manifest`` files to differ in their order, and unifies ``buildconfig.html`` + files by merging their content. + """ + + def __init__(self, finder1, finder2, **kargs): + UnifiedFinder.__init__( + self, finder1, finder2, sorted=["**/*.manifest"], **kargs + ) + + def unify_file(self, path, file1, file2): + """ + Unify files taking Mozilla application special cases into account. + Otherwise defer to UnifiedFinder.unify_file. + """ + basename = mozpath.basename(path) + if file1 and file2 and basename == "buildconfig.html": + content1 = file1.open().readlines() + content2 = file2.open().readlines() + # Copy everything from the first file up to the end of its
, + # insert a
between the two files and copy the second file's + # content beginning after its leading

. + return GeneratedFile( + b"".join( + content1[: content1.index(b"

\n")] + + [b"
\n"] + + content2[ + content2.index(b"

Build Configuration

\n") + 1 : + ] + ) + ) + elif file1 and file2 and basename == "install.rdf": + # install.rdf files often have em:targetPlatform (either as + # attribute or as tag) that will differ between platforms. The + # unified install.rdf should contain both em:targetPlatforms if + # they exist, or strip them if only one file has a target platform. + content1, content2 = ( + FIND_TARGET_PLATFORM_ATTR.sub( + lambda m: m.group("tag") + + m.group("attrs") + + m.group("otherattrs") + + "<%stargetPlatform>%s" + % (m.group("ns") or "", m.group("platform"), m.group("ns") or ""), + f.open().read().decode("utf-8"), + ) + for f in (file1, file2) + ) + + platform2 = FIND_TARGET_PLATFORM.search(content2) + return GeneratedFile( + FIND_TARGET_PLATFORM.sub( + lambda m: m.group(0) + platform2.group(0) if platform2 else "", + content1, + ) + ) + return UnifiedFinder.unify_file(self, path, file1, file2) diff --git a/python/mozbuild/setup.py b/python/mozbuild/setup.py new file mode 100644 index 0000000000..30785493b0 --- /dev/null +++ b/python/mozbuild/setup.py @@ -0,0 +1,29 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from setuptools import find_packages, setup + +VERSION = "0.2" + +setup( + author="Mozilla Foundation", + author_email="dev-builds@lists.mozilla.org", + name="mozbuild", + description="Mozilla build system functionality.", + license="MPL 2.0", + packages=find_packages(), + version=VERSION, + install_requires=[ + "jsmin", + "mozfile", + ], + classifiers=[ + "Development Status :: 3 - Alpha", + "Topic :: Software Development :: Build Tools", + "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: Implementation :: CPython", + ], + keywords="mozilla build", +) -- cgit v1.2.3